Compare commits

...

24 Commits

Author SHA1 Message Date
9dad59e25d tests: close generator instead of session in test_get_db_yields_async_session 2026-03-26 16:09:44 -04:00
d3vyce
29326ab532 perf: batch insert fixtures (#188) 2026-03-26 20:29:25 +01:00
d3vyce
04afef7e33 feat(fixtures): fixtures multi-variant contexts, custom Enum support, and context-filtered loading (#187) 2026-03-26 20:19:41 +01:00
d3vyce
666c621fda fix: create_db_session commits via real transaction, not savepoint (#184) 2026-03-26 19:57:40 +01:00
460b760fa4 Version 2.4.3 2026-03-26 07:58:32 -04:00
dependabot[bot]
65d0b0e0b1 ⬆ Bump ty from 0.0.23 to 0.0.25 (#178)
* ⬆ Bump ty from 0.0.23 to 0.0.25

Bumps [ty](https://github.com/astral-sh/ty) from 0.0.23 to 0.0.25.
- [Release notes](https://github.com/astral-sh/ty/releases)
- [Changelog](https://github.com/astral-sh/ty/blob/main/CHANGELOG.md)
- [Commits](https://github.com/astral-sh/ty/compare/0.0.23...0.0.25)

---
updated-dependencies:
- dependency-name: ty
  dependency-version: 0.0.25
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

* fix: ty warnings

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: d3vyce <nicolas.sudres@proton.me>
2026-03-26 12:57:29 +01:00
dependabot[bot]
2d49cd32db ⬆ Update uv-build requirement from <0.11.0,>=0.10 to >=0.10,<0.12.0 (#181) 2026-03-26 09:43:52 +01:00
dependabot[bot]
a5dd756d87 ⬆ Bump actions/deploy-pages from 4 to 5 (#177) 2026-03-26 09:43:40 +01:00
dependabot[bot]
781cfb66c9 ⬆ Bump zensical from 0.0.27 to 0.0.29 (#179) 2026-03-26 09:42:53 +01:00
dependabot[bot]
91b84f8146 ⬆ Bump ruff from 0.15.6 to 0.15.7 (#180) 2026-03-26 09:42:38 +01:00
dependabot[bot]
396e381ac3 ⬆ Bump pytest-cov from 7.0.0 to 7.1.0 (#182) 2026-03-26 09:41:59 +01:00
d3vyce
b4eb4c1ca9 fix: force auto-begin in create_db_dependency so lock_tables always uses savepoints (#176) 2026-03-25 19:26:28 +01:00
c90717754f chore: add prek (pre-commit alternative) 2026-03-25 14:24:30 -04:00
337985ef38 Version 2.4.2 2026-03-24 15:39:14 -04:00
d3vyce
b5e6dfe6fe refactor: test suite cleanup and simplification (#174) 2026-03-24 20:38:35 +01:00
d3vyce
6681b7ade7 fix: defer on_create/on_update/on_delete dispatch until outermost transaction commits (#172) 2026-03-24 19:56:03 +01:00
d3vyce
6981c33dc8 fix: inherit @watch field filter from parent classes via MRO traversal (#170) 2026-03-23 19:08:17 +01:00
d3vyce
0c7a99039c fix: await any awaitable callback return value, not only coroutines (#168) 2026-03-23 18:58:48 +01:00
d3vyce
bcb5b0bfda fix: suppress on_create/on_delete for objects created and deleted within the same transaction (#166) 2026-03-23 18:51:28 +01:00
100e1c1aa9 Version 2.4.1 2026-03-21 11:48:17 -04:00
d3vyce
db6c7a565f feat: add offset_params, cursor_params and paginate_params FastAPI dependency factories (#162) 2026-03-21 16:44:11 +01:00
d3vyce
768e405554 fix: use URL-safe base64 encoding for cursor tokens (#160) 2026-03-21 15:33:17 +01:00
d3vyce
f0223ebde4 feat: add pages computed field to OffsetPagination schema (#159) 2026-03-21 15:24:11 +01:00
d3vyce
f8c9bf69fe feat: add include_total flag to offset pagination to skip COUNT query (#158) 2026-03-21 15:16:22 +01:00
31 changed files with 1898 additions and 370 deletions

View File

@@ -34,5 +34,5 @@ jobs:
with: with:
path: site path: site
- uses: actions/deploy-pages@v4 - uses: actions/deploy-pages@v5
id: deployment id: deployment

34
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,34 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: check-added-large-files
args: ["--maxkb=750"]
exclude: ^uv.lock$
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: local
hooks:
- id: local-ruff-check
name: ruff check
entry: uv run ruff check --force-exclude --fix --exit-non-zero-on-fix .
require_serial: true
language: unsupported
types: [python]
- id: local-ruff-format
name: ruff format
entry: uv run ruff format --force-exclude --exit-non-zero-on-format .
require_serial: true
language: unsupported
types: [python]
- id: local-ty
name: ty check
entry: uv run ty check
require_serial: true
language: unsupported
pass_filenames: false

View File

@@ -72,6 +72,7 @@ GET /articles/offset?page=2&items_per_page=10&search=fastapi&status=published&or
], ],
"pagination": { "pagination": {
"total_count": 42, "total_count": 42,
"pages": 5,
"page": 2, "page": 2,
"items_per_page": 10, "items_per_page": 10,
"has_more": true "has_more": true
@@ -85,6 +86,8 @@ GET /articles/offset?page=2&items_per_page=10&search=fastapi&status=published&or
`filter_attributes` always reflects the values visible **after** applying the active filters. Use it to populate filter dropdowns on the client. `filter_attributes` always reflects the values visible **after** applying the active filters. Use it to populate filter dropdowns on the client.
To skip the `COUNT(*)` query for better performance on large tables, pass `include_total=False`. `pagination.total_count` will be `null` in the response, while `has_more` remains accurate.
### Cursor pagination ### Cursor pagination
Best for feeds, infinite scroll, or any high-throughput API where offset performance degrades. Best for feeds, infinite scroll, or any high-throughput API where offset performance degrades.
@@ -144,7 +147,7 @@ GET /articles/?pagination_type=offset&page=1&items_per_page=10
"status": "SUCCESS", "status": "SUCCESS",
"pagination_type": "offset", "pagination_type": "offset",
"data": ["..."], "data": ["..."],
"pagination": { "total_count": 42, "page": 1, "items_per_page": 10, "has_more": true } "pagination": { "total_count": 42, "pages": 5, "page": 1, "items_per_page": 10, "has_more": true }
} }
``` ```

View File

@@ -182,6 +182,7 @@ The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.Async
"data": ["..."], "data": ["..."],
"pagination": { "pagination": {
"total_count": 100, "total_count": 100,
"pages": 5,
"page": 1, "page": 1,
"items_per_page": 20, "items_per_page": 20,
"has_more": true "has_more": true
@@ -189,6 +190,40 @@ The [`offset_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.Async
} }
``` ```
#### Skipping the COUNT query
!!! info "Added in `v2.4.1`"
By default `offset_paginate` runs two queries: one for the page items and one `COUNT(*)` for `total_count`. On large tables the `COUNT` can be expensive. Pass `include_total=False` to skip it:
```python
result = await UserCrud.offset_paginate(
session=session,
page=page,
items_per_page=items_per_page,
include_total=False,
schema=UserRead,
)
```
#### Pagination params dependency
!!! info "Added in `v2.4.1`"
Use [`offset_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.offset_params) to generate a FastAPI dependency that injects `page` and `items_per_page` from query parameters with configurable defaults and a `max_page_size` cap:
```python
from typing import Annotated
from fastapi import Depends
@router.get("")
async def list_users(
session: SessionDep,
params: Annotated[dict, Depends(UserCrud.offset_params(default_page_size=20, max_page_size=100))],
) -> OffsetPaginatedResponse[UserRead]:
return await UserCrud.offset_paginate(session=session, **params, schema=UserRead)
```
### Cursor pagination ### Cursor pagination
```python ```python
@@ -238,7 +273,7 @@ The cursor column is set once on [`CrudFactory`](../reference/crud.md#fastapi_to
!!! note !!! note
`cursor_column` is required. Calling [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) on a CRUD class that has no `cursor_column` configured raises a `ValueError`. `cursor_column` is required. Calling [`cursor_paginate`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_paginate) on a CRUD class that has no `cursor_column` configured raises a `ValueError`.
The cursor value is base64-encoded when returned to the client and decoded back to the correct Python type on the next request. The following SQLAlchemy column types are supported: The cursor value is URL-safe base64-encoded (no padding) when returned to the client and decoded back to the correct Python type on the next request. The following SQLAlchemy column types are supported:
| SQLAlchemy type | Python type | | SQLAlchemy type | Python type |
|---|---| |---|---|
@@ -256,6 +291,24 @@ PostCrud = CrudFactory(model=Post, cursor_column=Post.id)
PostCrud = CrudFactory(model=Post, cursor_column=Post.created_at) PostCrud = CrudFactory(model=Post, cursor_column=Post.created_at)
``` ```
#### Pagination params dependency
!!! info "Added in `v2.4.1`"
Use [`cursor_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.cursor_params) to inject `cursor` and `items_per_page` from query parameters with a `max_page_size` cap:
```python
from typing import Annotated
from fastapi import Depends
@router.get("")
async def list_users(
session: SessionDep,
params: Annotated[dict, Depends(UserCrud.cursor_params(default_page_size=20, max_page_size=100))],
) -> CursorPaginatedResponse[UserRead]:
return await UserCrud.cursor_paginate(session=session, **params, schema=UserRead)
```
### Unified endpoint (both strategies) ### Unified endpoint (both strategies)
!!! info "Added in `v2.3.0`" !!! info "Added in `v2.3.0`"
@@ -289,7 +342,24 @@ GET /users?pagination_type=offset&page=2&items_per_page=10
GET /users?pagination_type=cursor&cursor=eyJ2YWx1ZSI6...&items_per_page=10 GET /users?pagination_type=cursor&cursor=eyJ2YWx1ZSI6...&items_per_page=10
``` ```
Both `page` and `cursor` are always accepted by the endpoint — unused parameters are silently ignored by `paginate()`. #### Pagination params dependency
!!! info "Added in `v2.4.1`"
Use [`paginate_params()`](../reference/crud.md#fastapi_toolsets.crud.factory.AsyncCrud.paginate_params) to inject all parameters at once with configurable defaults and a `max_page_size` cap:
```python
from typing import Annotated
from fastapi import Depends
from fastapi_toolsets.schemas import PaginatedResponse
@router.get("")
async def list_users(
session: SessionDep,
params: Annotated[dict, Depends(UserCrud.paginate_params(default_page_size=20, max_page_size=100))],
) -> PaginatedResponse[UserRead]:
return await UserCrud.paginate(session, **params, schema=UserRead)
```
## Search ## Search

View File

@@ -38,18 +38,20 @@ By context with [`load_fixtures_by_context`](../reference/fixtures.md#fastapi_to
from fastapi_toolsets.fixtures import load_fixtures_by_context from fastapi_toolsets.fixtures import load_fixtures_by_context
async with db_context() as session: async with db_context() as session:
await load_fixtures_by_context(session=session, registry=fixtures, context=Context.TESTING) await load_fixtures_by_context(session, fixtures, Context.TESTING)
``` ```
Directly with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures): Directly by name with [`load_fixtures`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.load_fixtures):
```python ```python
from fastapi_toolsets.fixtures import load_fixtures from fastapi_toolsets.fixtures import load_fixtures
async with db_context() as session: async with db_context() as session:
await load_fixtures(session=session, registry=fixtures) await load_fixtures(session, fixtures, "roles", "test_users")
``` ```
Both functions return a `dict[str, list[...]]` mapping each fixture name to the list of loaded instances.
## Contexts ## Contexts
[`Context`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.Context) is an enum with predefined values: [`Context`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.Context) is an enum with predefined values:
@@ -58,10 +60,60 @@ async with db_context() as session:
|---------|-------------| |---------|-------------|
| `Context.BASE` | Core data required in all environments | | `Context.BASE` | Core data required in all environments |
| `Context.TESTING` | Data only loaded during tests | | `Context.TESTING` | Data only loaded during tests |
| `Context.DEVELOPMENT` | Data only loaded in development |
| `Context.PRODUCTION` | Data only loaded in production | | `Context.PRODUCTION` | Data only loaded in production |
A fixture with no `contexts` defined takes `Context.BASE` by default. A fixture with no `contexts` defined takes `Context.BASE` by default.
### Custom contexts
Plain strings and any `Enum` subclass are accepted wherever a `Context` enum is expected.
```python
from enum import Enum
class AppContext(str, Enum):
STAGING = "staging"
DEMO = "demo"
@fixtures.register(contexts=[AppContext.STAGING])
def staging_data():
return [Config(key="feature_x", enabled=True)]
await load_fixtures_by_context(session, fixtures, AppContext.STAGING)
```
### Default context for a registry
Pass `contexts` to `FixtureRegistry` to set a default for all fixtures registered in it:
```python
testing_registry = FixtureRegistry(contexts=[Context.TESTING])
@testing_registry.register # implicitly contexts=[Context.TESTING]
def test_orders():
return [Order(id=1, total=99)]
```
### Same fixture name, multiple context variants
The same fixture name may be registered under different (non-overlapping) context sets. When multiple contexts are loaded together, all matching variants are merged:
```python
@fixtures.register(contexts=[Context.BASE])
def users():
return [User(id=1, username="admin")]
@fixtures.register(contexts=[Context.TESTING])
def users():
return [User(id=2, username="tester")]
# loads both admin and tester
await load_fixtures_by_context(session, fixtures, Context.BASE, Context.TESTING)
```
Registering two variants with overlapping context sets raises `ValueError`.
## Load strategies ## Load strategies
[`LoadStrategy`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.LoadStrategy) controls how the fixture loader handles rows that already exist: [`LoadStrategy`](../reference/fixtures.md#fastapi_toolsets.fixtures.enum.LoadStrategy) controls how the fixture loader handles rows that already exist:
@@ -69,20 +121,44 @@ A fixture with no `contexts` defined takes `Context.BASE` by default.
| Strategy | Description | | Strategy | Description |
|----------|-------------| |----------|-------------|
| `LoadStrategy.INSERT` | Insert only, fail on duplicates | | `LoadStrategy.INSERT` | Insert only, fail on duplicates |
| `LoadStrategy.UPSERT` | Insert or update on conflict | | `LoadStrategy.MERGE` | Insert or update on conflict (default) |
| `LoadStrategy.SKIP` | Skip rows that already exist | | `LoadStrategy.SKIP_EXISTING` | Skip rows that already exist |
```python
await load_fixtures_by_context(
session, fixtures, Context.BASE, strategy=LoadStrategy.SKIP_EXISTING
)
```
## Merging registries ## Merging registries
Split fixtures definitions across modules and merge them: Split fixture definitions across modules and merge them:
```python ```python
from myapp.fixtures.dev import dev_fixtures from myapp.fixtures.dev import dev_fixtures
from myapp.fixtures.prod import prod_fixtures from myapp.fixtures.prod import prod_fixtures
fixtures = fixturesRegistry() fixtures = FixtureRegistry()
fixtures.include_registry(registry=dev_fixtures) fixtures.include_registry(registry=dev_fixtures)
fixtures.include_registry(registry=prod_fixtures) fixtures.include_registry(registry=prod_fixtures)
```
Fixtures with the same name are allowed as long as their context sets do not overlap. Conflicting contexts raise `ValueError`.
## Looking up fixture instances
[`get_obj_by_attr`](../reference/fixtures.md#fastapi_toolsets.fixtures.utils.get_obj_by_attr) retrieves a specific instance from a fixture function by attribute value — useful when building cross-fixture `depends_on` relationships:
```python
from fastapi_toolsets.fixtures import get_obj_by_attr
@fixtures.register(depends_on=["roles"])
def users():
admin_role = get_obj_by_attr(roles, "name", "admin")
return [User(id=1, username="alice", role_id=admin_role.id)]
```
Raises `StopIteration` if no matching instance is found.
## Pytest integration ## Pytest integration
@@ -111,7 +187,6 @@ async def test_user_can_login(fixture_users: list[User], fixture_roles: list[Rol
... ...
``` ```
The load order is resolved automatically from the `depends_on` declarations in your registry. Each generated fixture receives `db_session` as a dependency and returns the list of loaded model instances. The load order is resolved automatically from the `depends_on` declarations in your registry. Each generated fixture receives `db_session` as a dependency and returns the list of loaded model instances.
## CLI integration ## CLI integration

View File

@@ -138,6 +138,23 @@ Server-side defaults (e.g. `id`, `created_at`) are fully populated in all callba
| `@watch("status", "role")` | Only fires when `status` or `role` changes | | `@watch("status", "role")` | Only fires when `status` or `role` changes |
| *(no decorator)* | Fires when **any** mapped field changes | | *(no decorator)* | Fires when **any** mapped field changes |
`@watch` is inherited through the class hierarchy. If a subclass does not declare its own `@watch`, it uses the filter from the nearest decorated parent. Applying `@watch` on the subclass overrides the parent's filter:
```python
@watch("status")
class Order(Base, UUIDMixin, WatchedFieldsMixin):
...
class UrgentOrder(Order):
# inherits @watch("status") — on_update fires only for status changes
...
@watch("priority")
class PriorityOrder(Order):
# overrides parent — on_update fires only for priority changes
...
```
#### Option 1 — catch-all with `on_event` #### Option 1 — catch-all with `on_event`
Override `on_event` to handle all event types in one place. The specific methods delegate here by default: Override `on_event` to handle all event types in one place. The specific methods delegate here by default:
@@ -197,6 +214,25 @@ The `changes` dict maps each watched field that changed to `{"old": ..., "new":
!!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected." !!! warning "Callbacks fire only for ORM-level changes. Rows updated via raw SQL (`UPDATE ... SET ...`) are not detected."
!!! warning "Callbacks fire when the **outermost active context** (savepoint or transaction) commits."
If you create several related objects using `CrudFactory.create` and need
callbacks to see all of them (including associations), wrap the whole
operation in a single [`get_transaction`](db.md) or [`lock_tables`](db.md)
block. Without it, each `create` call commits its own savepoint and
`on_create` fires before the remaining objects exist.
```python
from fastapi_toolsets.db import get_transaction
async with get_transaction(session):
order = await OrderCrud.create(session, order_data)
item = await ItemCrud.create(session, item_data)
await session.refresh(order, attribute_names=["items"])
order.items.append(item)
# on_create fires here for both order and item,
# with the full association already committed.
```
## Composing mixins ## Composing mixins
All mixins can be combined in any order. The only constraint is that exactly one primary key must be defined — either via `UUIDMixin` or directly on the model. All mixins can be combined in any order. The only constraint is that exactly one primary key must be defined — either via `UUIDMixin` or directly on the model.

View File

@@ -1,8 +1,8 @@
from typing import Annotated from typing import Annotated
from fastapi import APIRouter, Depends, Query from fastapi import APIRouter, Depends
from fastapi_toolsets.crud import OrderByClause, PaginationType from fastapi_toolsets.crud import OrderByClause
from fastapi_toolsets.schemas import ( from fastapi_toolsets.schemas import (
CursorPaginatedResponse, CursorPaginatedResponse,
OffsetPaginatedResponse, OffsetPaginatedResponse,
@@ -20,19 +20,20 @@ router = APIRouter(prefix="/articles")
@router.get("/offset") @router.get("/offset")
async def list_articles_offset( async def list_articles_offset(
session: SessionDep, session: SessionDep,
params: Annotated[
dict,
Depends(ArticleCrud.offset_params(default_page_size=20, max_page_size=100)),
],
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())], filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
order_by: Annotated[ order_by: Annotated[
OrderByClause | None, OrderByClause | None,
Depends(ArticleCrud.order_params(default_field=Article.created_at)), Depends(ArticleCrud.order_params(default_field=Article.created_at)),
], ],
page: int = Query(1, ge=1),
items_per_page: int = Query(20, ge=1, le=100),
search: str | None = None, search: str | None = None,
) -> OffsetPaginatedResponse[ArticleRead]: ) -> OffsetPaginatedResponse[ArticleRead]:
return await ArticleCrud.offset_paginate( return await ArticleCrud.offset_paginate(
session=session, session=session,
page=page, **params,
items_per_page=items_per_page,
search=search, search=search,
filter_by=filter_by or None, filter_by=filter_by or None,
order_by=order_by, order_by=order_by,
@@ -43,19 +44,20 @@ async def list_articles_offset(
@router.get("/cursor") @router.get("/cursor")
async def list_articles_cursor( async def list_articles_cursor(
session: SessionDep, session: SessionDep,
params: Annotated[
dict,
Depends(ArticleCrud.cursor_params(default_page_size=20, max_page_size=100)),
],
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())], filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
order_by: Annotated[ order_by: Annotated[
OrderByClause | None, OrderByClause | None,
Depends(ArticleCrud.order_params(default_field=Article.created_at)), Depends(ArticleCrud.order_params(default_field=Article.created_at)),
], ],
cursor: str | None = None,
items_per_page: int = Query(20, ge=1, le=100),
search: str | None = None, search: str | None = None,
) -> CursorPaginatedResponse[ArticleRead]: ) -> CursorPaginatedResponse[ArticleRead]:
return await ArticleCrud.cursor_paginate( return await ArticleCrud.cursor_paginate(
session=session, session=session,
cursor=cursor, **params,
items_per_page=items_per_page,
search=search, search=search,
filter_by=filter_by or None, filter_by=filter_by or None,
order_by=order_by, order_by=order_by,
@@ -66,23 +68,20 @@ async def list_articles_cursor(
@router.get("/") @router.get("/")
async def list_articles( async def list_articles(
session: SessionDep, session: SessionDep,
params: Annotated[
dict,
Depends(ArticleCrud.paginate_params(default_page_size=20, max_page_size=100)),
],
filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())], filter_by: Annotated[dict[str, list[str]], Depends(ArticleCrud.filter_params())],
order_by: Annotated[ order_by: Annotated[
OrderByClause | None, OrderByClause | None,
Depends(ArticleCrud.order_params(default_field=Article.created_at)), Depends(ArticleCrud.order_params(default_field=Article.created_at)),
], ],
pagination_type: PaginationType = PaginationType.OFFSET,
page: int = Query(1, ge=1),
cursor: str | None = None,
items_per_page: int = Query(20, ge=1, le=100),
search: str | None = None, search: str | None = None,
) -> PaginatedResponse[ArticleRead]: ) -> PaginatedResponse[ArticleRead]:
return await ArticleCrud.paginate( return await ArticleCrud.paginate(
session, session,
pagination_type=pagination_type, **params,
page=page,
cursor=cursor,
items_per_page=items_per_page,
search=search, search=search,
filter_by=filter_by or None, filter_by=filter_by or None,
order_by=order_by, order_by=order_by,

View File

@@ -1,6 +1,6 @@
[project] [project]
name = "fastapi-toolsets" name = "fastapi-toolsets"
version = "2.4.0" version = "2.4.3"
description = "Production-ready utilities for FastAPI applications" description = "Production-ready utilities for FastAPI applications"
readme = "README.md" readme = "README.md"
license = "MIT" license = "MIT"
@@ -67,6 +67,7 @@ dev = [
{include-group = "tests"}, {include-group = "tests"},
{include-group = "docs"}, {include-group = "docs"},
"fastapi-toolsets[all]", "fastapi-toolsets[all]",
"prek>=0.3.8",
"ruff>=0.1.0", "ruff>=0.1.0",
"ty>=0.0.1a0", "ty>=0.0.1a0",
] ]
@@ -84,7 +85,7 @@ docs = [
] ]
[build-system] [build-system]
requires = ["uv_build>=0.10,<0.11.0"] requires = ["uv_build>=0.10,<0.12.0"]
build-backend = "uv_build" build-backend = "uv_build"
[tool.pytest.ini_options] [tool.pytest.ini_options]

View File

@@ -21,4 +21,4 @@ Example usage:
return Response(data={"user": user.username}, message="Success") return Response(data={"user": user.username}, message="Success")
""" """
__version__ = "2.4.0" __version__ = "2.4.3"

View File

@@ -58,18 +58,33 @@ class _CursorDirection(str, Enum):
def _encode_cursor( def _encode_cursor(
value: Any, *, direction: _CursorDirection = _CursorDirection.NEXT value: Any, *, direction: _CursorDirection = _CursorDirection.NEXT
) -> str: ) -> str:
"""Encode a cursor column value and navigation direction as a base64 string.""" """Encode a cursor column value and navigation direction as a URL-safe base64 string."""
return base64.b64encode( return (
base64.urlsafe_b64encode(
json.dumps({"val": str(value), "dir": direction}).encode() json.dumps({"val": str(value), "dir": direction}).encode()
).decode() )
.decode()
.rstrip("=")
)
def _decode_cursor(cursor: str) -> tuple[str, _CursorDirection]: def _decode_cursor(cursor: str) -> tuple[str, _CursorDirection]:
"""Decode a cursor base64 string into ``(raw_value, direction)``.""" """Decode a URL-safe base64 cursor string into ``(raw_value, direction)``."""
payload = json.loads(base64.b64decode(cursor.encode()).decode()) padded = cursor + "=" * (-len(cursor) % 4)
payload = json.loads(base64.urlsafe_b64decode(padded).decode())
return payload["val"], _CursorDirection(payload["dir"]) return payload["val"], _CursorDirection(payload["dir"])
def _page_size_query(default: int, max_size: int) -> int:
"""Return a FastAPI ``Query`` for the ``items_per_page`` parameter."""
return Query(
default,
ge=1,
le=max_size,
description=f"Number of items per page (max {max_size})",
)
def _parse_cursor_value(raw_val: str, col_type: Any) -> Any: def _parse_cursor_value(raw_val: str, col_type: Any) -> Any:
"""Parse a raw cursor string value back into the appropriate Python type.""" """Parse a raw cursor string value back into the appropriate Python type."""
if isinstance(col_type, Integer): if isinstance(col_type, Integer):
@@ -254,6 +269,7 @@ class AsyncCrud(Generic[ModelType]):
facet_fields: Sequence[FacetFieldType] | None = None, facet_fields: Sequence[FacetFieldType] | None = None,
) -> Callable[..., Awaitable[dict[str, list[str]]]]: ) -> Callable[..., Awaitable[dict[str, list[str]]]]:
"""Return a FastAPI dependency that collects facet filter values from query parameters. """Return a FastAPI dependency that collects facet filter values from query parameters.
Args: Args:
facet_fields: Override the facet fields for this dependency. Falls back to the facet_fields: Override the facet fields for this dependency. Falls back to the
class-level ``facet_fields`` if not provided. class-level ``facet_fields`` if not provided.
@@ -279,7 +295,7 @@ class AsyncCrud(Generic[ModelType]):
return {k: v for k, v in kwargs.items() if v is not None} return {k: v for k, v in kwargs.items() if v is not None}
dependency.__name__ = f"{cls.model.__name__}FilterParams" dependency.__name__ = f"{cls.model.__name__}FilterParams"
dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined] dependency.__signature__ = inspect.Signature( # type: ignore[attr-defined] # ty:ignore[unresolved-attribute]
parameters=[ parameters=[
inspect.Parameter( inspect.Parameter(
k, k,
@@ -293,6 +309,121 @@ class AsyncCrud(Generic[ModelType]):
return dependency return dependency
@classmethod
def offset_params(
cls: type[Self],
*,
default_page_size: int = 20,
max_page_size: int = 100,
include_total: bool = True,
) -> Callable[..., Awaitable[dict[str, Any]]]:
"""Return a FastAPI dependency that collects offset pagination params from query params.
Args:
default_page_size: Default value for the ``items_per_page`` query parameter.
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
``le`` on the ``Query``).
include_total: Server-side flag forwarded as-is to ``include_total`` in
:meth:`offset_paginate`. Not exposed as a query parameter.
Returns:
An async dependency that resolves to a dict with ``page``,
``items_per_page``, and ``include_total`` keys, ready to be
unpacked into :meth:`offset_paginate`.
"""
async def dependency(
page: int = Query(1, ge=1, description="Page number (1-indexed)"),
items_per_page: int = _page_size_query(default_page_size, max_page_size),
) -> dict[str, Any]:
return {
"page": page,
"items_per_page": items_per_page,
"include_total": include_total,
}
dependency.__name__ = f"{cls.model.__name__}OffsetParams"
return dependency
@classmethod
def cursor_params(
cls: type[Self],
*,
default_page_size: int = 20,
max_page_size: int = 100,
) -> Callable[..., Awaitable[dict[str, Any]]]:
"""Return a FastAPI dependency that collects cursor pagination params from query params.
Args:
default_page_size: Default value for the ``items_per_page`` query parameter.
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
``le`` on the ``Query``).
Returns:
An async dependency that resolves to a dict with ``cursor`` and
``items_per_page`` keys, ready to be unpacked into
:meth:`cursor_paginate`.
"""
async def dependency(
cursor: str | None = Query(
None, description="Cursor token from a previous response"
),
items_per_page: int = _page_size_query(default_page_size, max_page_size),
) -> dict[str, Any]:
return {"cursor": cursor, "items_per_page": items_per_page}
dependency.__name__ = f"{cls.model.__name__}CursorParams"
return dependency
@classmethod
def paginate_params(
cls: type[Self],
*,
default_page_size: int = 20,
max_page_size: int = 100,
default_pagination_type: PaginationType = PaginationType.OFFSET,
include_total: bool = True,
) -> Callable[..., Awaitable[dict[str, Any]]]:
"""Return a FastAPI dependency that collects all pagination params from query params.
Args:
default_page_size: Default value for the ``items_per_page`` query parameter.
max_page_size: Maximum allowed value for ``items_per_page`` (enforced via
``le`` on the ``Query``).
default_pagination_type: Default pagination strategy.
include_total: Server-side flag forwarded as-is to ``include_total`` in
:meth:`paginate`. Not exposed as a query parameter.
Returns:
An async dependency that resolves to a dict with ``pagination_type``,
``page``, ``cursor``, ``items_per_page``, and ``include_total`` keys,
ready to be unpacked into :meth:`paginate`.
"""
async def dependency(
pagination_type: PaginationType = Query(
default_pagination_type, description="Pagination strategy"
),
page: int = Query(
1, ge=1, description="Page number (1-indexed, offset only)"
),
cursor: str | None = Query(
None, description="Cursor token from a previous response (cursor only)"
),
items_per_page: int = _page_size_query(default_page_size, max_page_size),
) -> dict[str, Any]:
return {
"pagination_type": pagination_type,
"page": page,
"cursor": cursor,
"items_per_page": items_per_page,
"include_total": include_total,
}
dependency.__name__ = f"{cls.model.__name__}PaginateParams"
return dependency
@classmethod @classmethod
def order_params( def order_params(
cls: type[Self], cls: type[Self],
@@ -922,6 +1053,7 @@ class AsyncCrud(Generic[ModelType]):
order_by: OrderByClause | None = None, order_by: OrderByClause | None = None,
page: int = 1, page: int = 1,
items_per_page: int = 20, items_per_page: int = 20,
include_total: bool = True,
search: str | SearchConfig | None = None, search: str | SearchConfig | None = None,
search_fields: Sequence[SearchFieldType] | None = None, search_fields: Sequence[SearchFieldType] | None = None,
facet_fields: Sequence[FacetFieldType] | None = None, facet_fields: Sequence[FacetFieldType] | None = None,
@@ -939,6 +1071,8 @@ class AsyncCrud(Generic[ModelType]):
order_by: Column or list of columns to order by order_by: Column or list of columns to order by
page: Page number (1-indexed) page: Page number (1-indexed)
items_per_page: Number of items per page items_per_page: Number of items per page
include_total: When ``False``, skip the ``COUNT`` query;
``pagination.total_count`` will be ``None``.
search: Search query string or SearchConfig object search: Search query string or SearchConfig object
search_fields: Fields to search in (overrides class default) search_fields: Fields to search in (overrides class default)
facet_fields: Columns to compute distinct values for (overrides class default) facet_fields: Columns to compute distinct values for (overrides class default)
@@ -983,10 +1117,10 @@ class AsyncCrud(Generic[ModelType]):
if order_by is not None: if order_by is not None:
q = q.order_by(order_by) q = q.order_by(order_by)
if include_total:
q = q.offset(offset).limit(items_per_page) q = q.offset(offset).limit(items_per_page)
result = await session.execute(q) result = await session.execute(q)
raw_items = cast(list[ModelType], result.unique().scalars().all()) raw_items = cast(list[ModelType], result.unique().scalars().all())
items: list[Any] = [schema.model_validate(item) for item in raw_items]
# Count query (with same joins and filters) # Count query (with same joins and filters)
pk_col = cls.model.__mapper__.primary_key[0] pk_col = cls.model.__mapper__.primary_key[0]
@@ -1003,7 +1137,18 @@ class AsyncCrud(Generic[ModelType]):
count_q = count_q.where(and_(*filters)) count_q = count_q.where(and_(*filters))
count_result = await session.execute(count_q) count_result = await session.execute(count_q)
total_count = count_result.scalar_one() total_count: int | None = count_result.scalar_one()
has_more = page * items_per_page < total_count
else:
# Fetch one extra row to detect if a next page exists without COUNT
q = q.offset(offset).limit(items_per_page + 1)
result = await session.execute(q)
raw_items = cast(list[ModelType], result.unique().scalars().all())
has_more = len(raw_items) > items_per_page
raw_items = raw_items[:items_per_page]
total_count = None
items: list[Any] = [schema.model_validate(item) for item in raw_items]
filter_attributes = await cls._build_filter_attributes( filter_attributes = await cls._build_filter_attributes(
session, facet_fields, filters, search_joins session, facet_fields, filters, search_joins
@@ -1015,7 +1160,7 @@ class AsyncCrud(Generic[ModelType]):
total_count=total_count, total_count=total_count,
items_per_page=items_per_page, items_per_page=items_per_page,
page=page, page=page,
has_more=page * items_per_page < total_count, has_more=has_more,
), ),
filter_attributes=filter_attributes, filter_attributes=filter_attributes,
) )
@@ -1190,6 +1335,7 @@ class AsyncCrud(Generic[ModelType]):
page: int = ..., page: int = ...,
cursor: str | None = ..., cursor: str | None = ...,
items_per_page: int = ..., items_per_page: int = ...,
include_total: bool = ...,
search: str | SearchConfig | None = ..., search: str | SearchConfig | None = ...,
search_fields: Sequence[SearchFieldType] | None = ..., search_fields: Sequence[SearchFieldType] | None = ...,
facet_fields: Sequence[FacetFieldType] | None = ..., facet_fields: Sequence[FacetFieldType] | None = ...,
@@ -1212,6 +1358,7 @@ class AsyncCrud(Generic[ModelType]):
page: int = ..., page: int = ...,
cursor: str | None = ..., cursor: str | None = ...,
items_per_page: int = ..., items_per_page: int = ...,
include_total: bool = ...,
search: str | SearchConfig | None = ..., search: str | SearchConfig | None = ...,
search_fields: Sequence[SearchFieldType] | None = ..., search_fields: Sequence[SearchFieldType] | None = ...,
facet_fields: Sequence[FacetFieldType] | None = ..., facet_fields: Sequence[FacetFieldType] | None = ...,
@@ -1233,6 +1380,7 @@ class AsyncCrud(Generic[ModelType]):
page: int = 1, page: int = 1,
cursor: str | None = None, cursor: str | None = None,
items_per_page: int = 20, items_per_page: int = 20,
include_total: bool = True,
search: str | SearchConfig | None = None, search: str | SearchConfig | None = None,
search_fields: Sequence[SearchFieldType] | None = None, search_fields: Sequence[SearchFieldType] | None = None,
facet_fields: Sequence[FacetFieldType] | None = None, facet_fields: Sequence[FacetFieldType] | None = None,
@@ -1258,6 +1406,8 @@ class AsyncCrud(Generic[ModelType]):
:class:`.CursorPaginatedResponse`. Only used when :class:`.CursorPaginatedResponse`. Only used when
``pagination_type`` is ``CURSOR``. ``pagination_type`` is ``CURSOR``.
items_per_page: Number of items per page (default 20). items_per_page: Number of items per page (default 20).
include_total: When ``False``, skip the ``COUNT`` query;
only applies when ``pagination_type`` is ``OFFSET``.
search: Search query string or :class:`.SearchConfig` object. search: Search query string or :class:`.SearchConfig` object.
search_fields: Fields to search in (overrides class default). search_fields: Fields to search in (overrides class default).
facet_fields: Columns to compute distinct values for (overrides facet_fields: Columns to compute distinct values for (overrides
@@ -1304,6 +1454,7 @@ class AsyncCrud(Generic[ModelType]):
order_by=order_by, order_by=order_by,
page=page, page=page,
items_per_page=items_per_page, items_per_page=items_per_page,
include_total=include_total,
search=search, search=search,
search_fields=search_fields, search_fields=search_fields,
facet_fields=facet_fields, facet_fields=facet_fields,

View File

@@ -56,6 +56,7 @@ def create_db_dependency(
async def get_db() -> AsyncGenerator[AsyncSession, None]: async def get_db() -> AsyncGenerator[AsyncSession, None]:
async with session_maker() as session: async with session_maker() as session:
await session.connection()
yield session yield session
if session.in_transaction(): if session.in_transaction():
await session.commit() await session.commit()

View File

@@ -30,7 +30,7 @@ def init_exceptions_handlers(app: FastAPI) -> FastAPI:
""" """
_register_exception_handlers(app) _register_exception_handlers(app)
_original_openapi = app.openapi _original_openapi = app.openapi
app.openapi = lambda: _patched_openapi(app, _original_openapi) # type: ignore[method-assign] app.openapi = lambda: _patched_openapi(app, _original_openapi) # type: ignore[method-assign] # ty:ignore[invalid-assignment]
return app return app

View File

@@ -2,6 +2,7 @@
from collections.abc import Callable, Sequence from collections.abc import Callable, Sequence
from dataclasses import dataclass, field from dataclasses import dataclass, field
from enum import Enum
from typing import Any, cast from typing import Any, cast
from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import DeclarativeBase
@@ -12,6 +13,13 @@ from .enum import Context
logger = get_logger() logger = get_logger()
def _normalize_contexts(
contexts: list[str | Enum] | tuple[str | Enum, ...],
) -> list[str]:
"""Convert a sequence of any Enum subclass and/or plain strings to a list of strings."""
return [c.value if isinstance(c, Enum) else c for c in contexts]
@dataclass @dataclass
class Fixture: class Fixture:
"""A fixture definition with metadata.""" """A fixture definition with metadata."""
@@ -50,17 +58,42 @@ class FixtureRegistry:
Post(id=1, title="Test", user_id=1), Post(id=1, title="Test", user_id=1),
] ]
``` ```
Fixtures with the same name may be registered for **different** contexts.
When multiple contexts are loaded together, their instances are merged:
```python
@fixtures.register(contexts=[Context.BASE])
def users():
return [User(id=1, username="admin")]
@fixtures.register(contexts=[Context.TESTING])
def users():
return [User(id=2, username="tester")]
# load_fixtures_by_context(..., Context.BASE, Context.TESTING)
# → loads both User(admin) and User(tester) under the "users" name
```
""" """
def __init__( def __init__(
self, self,
contexts: list[str | Context] | None = None, contexts: list[str | Enum] | None = None,
) -> None: ) -> None:
self._fixtures: dict[str, Fixture] = {} self._fixtures: dict[str, list[Fixture]] = {}
self._default_contexts: list[str] | None = ( self._default_contexts: list[str] | None = (
[c.value if isinstance(c, Context) else c for c in contexts] _normalize_contexts(contexts) if contexts else None
if contexts )
else None
def _validate_no_context_overlap(self, name: str, new_contexts: list[str]) -> None:
"""Raise ``ValueError`` if any existing variant for *name* overlaps."""
existing_variants = self._fixtures.get(name, [])
new_set = set(new_contexts)
for variant in existing_variants:
if set(variant.contexts) & new_set:
raise ValueError(
f"Fixture '{name}' already exists in the current registry "
f"with overlapping contexts. Use distinct context sets for "
f"each variant of the same fixture name."
) )
def register( def register(
@@ -69,7 +102,7 @@ class FixtureRegistry:
*, *,
name: str | None = None, name: str | None = None,
depends_on: list[str] | None = None, depends_on: list[str] | None = None,
contexts: list[str | Context] | None = None, contexts: list[str | Enum] | None = None,
) -> Callable[..., Any]: ) -> Callable[..., Any]:
"""Register a fixture function. """Register a fixture function.
@@ -79,7 +112,8 @@ class FixtureRegistry:
func: Fixture function returning list of model instances func: Fixture function returning list of model instances
name: Fixture name (defaults to function name) name: Fixture name (defaults to function name)
depends_on: List of fixture names this depends on depends_on: List of fixture names this depends on
contexts: List of contexts this fixture belongs to contexts: List of contexts this fixture belongs to. Both
:class:`Context` enum values and plain strings are accepted.
Example: Example:
```python ```python
@@ -90,7 +124,6 @@ class FixtureRegistry:
@fixtures.register(depends_on=["roles"], contexts=[Context.TESTING]) @fixtures.register(depends_on=["roles"], contexts=[Context.TESTING])
def test_users(): def test_users():
return [User(id=1, username="test", role_id=1)] return [User(id=1, username="test", role_id=1)]
```
""" """
def decorator( def decorator(
@@ -98,20 +131,21 @@ class FixtureRegistry:
) -> Callable[[], Sequence[DeclarativeBase]]: ) -> Callable[[], Sequence[DeclarativeBase]]:
fixture_name = name or cast(Any, fn).__name__ fixture_name = name or cast(Any, fn).__name__
if contexts is not None: if contexts is not None:
fixture_contexts = [ fixture_contexts = _normalize_contexts(contexts)
c.value if isinstance(c, Context) else c for c in contexts
]
elif self._default_contexts is not None: elif self._default_contexts is not None:
fixture_contexts = self._default_contexts fixture_contexts = self._default_contexts
else: else:
fixture_contexts = [Context.BASE.value] fixture_contexts = [Context.BASE.value]
self._fixtures[fixture_name] = Fixture( self._validate_no_context_overlap(fixture_name, fixture_contexts)
self._fixtures.setdefault(fixture_name, []).append(
Fixture(
name=fixture_name, name=fixture_name,
func=fn, func=fn,
depends_on=depends_on or [], depends_on=depends_on or [],
contexts=fixture_contexts, contexts=fixture_contexts,
) )
)
return fn return fn
if func is not None: if func is not None:
@@ -121,11 +155,14 @@ class FixtureRegistry:
def include_registry(self, registry: "FixtureRegistry") -> None: def include_registry(self, registry: "FixtureRegistry") -> None:
"""Include another `FixtureRegistry` in the same current `FixtureRegistry`. """Include another `FixtureRegistry` in the same current `FixtureRegistry`.
Fixtures with the same name are allowed as long as their context sets
do not overlap. Conflicting contexts raise :class:`ValueError`.
Args: Args:
registry: The `FixtureRegistry` to include registry: The `FixtureRegistry` to include
Raises: Raises:
ValueError: If a fixture name already exists in the current registry ValueError: If a fixture name already exists with overlapping contexts
Example: Example:
```python ```python
@@ -139,31 +176,73 @@ class FixtureRegistry:
registry.include_registry(registry=dev_registry) registry.include_registry(registry=dev_registry)
``` ```
""" """
for name, fixture in registry._fixtures.items(): for name, variants in registry._fixtures.items():
if name in self._fixtures: for fixture in variants:
raise ValueError( self._validate_no_context_overlap(name, fixture.contexts)
f"Fixture '{name}' already exists in the current registry" self._fixtures.setdefault(name, []).append(fixture)
)
self._fixtures[name] = fixture
def get(self, name: str) -> Fixture: def get(self, name: str) -> Fixture:
"""Get a fixture by name.""" """Get a fixture by name.
Raises:
KeyError: If no fixture with *name* is registered.
ValueError: If the fixture has multiple context variants — use
:meth:`get_variants` in that case.
"""
if name not in self._fixtures: if name not in self._fixtures:
raise KeyError(f"Fixture '{name}' not found") raise KeyError(f"Fixture '{name}' not found")
return self._fixtures[name] variants = self._fixtures[name]
if len(variants) > 1:
raise ValueError(
f"Fixture '{name}' has {len(variants)} context variants. "
f"Use get_variants('{name}') to retrieve them."
)
return variants[0]
def get_variants(self, name: str, *contexts: str | Enum) -> list[Fixture]:
"""Return all registered variants for *name*, optionally filtered by context.
Args:
name: Fixture name.
*contexts: If given, only return variants whose context set
intersects with these values. Both :class:`Context` enum
values and plain strings are accepted.
Returns:
List of matching :class:`Fixture` objects (may be empty when a
context filter is applied and nothing matches).
Raises:
KeyError: If no fixture with *name* is registered.
"""
if name not in self._fixtures:
raise KeyError(f"Fixture '{name}' not found")
variants = self._fixtures[name]
if not contexts:
return list(variants)
context_values = set(_normalize_contexts(contexts))
return [v for v in variants if set(v.contexts) & context_values]
def get_all(self) -> list[Fixture]: def get_all(self) -> list[Fixture]:
"""Get all registered fixtures.""" """Get all registered fixtures (all variants of all names)."""
return list(self._fixtures.values()) return [f for variants in self._fixtures.values() for f in variants]
def get_by_context(self, *contexts: str | Context) -> list[Fixture]: def get_by_context(self, *contexts: str | Enum) -> list[Fixture]:
"""Get fixtures for specific contexts.""" """Get fixtures for specific contexts."""
context_values = {c.value if isinstance(c, Context) else c for c in contexts} context_values = set(_normalize_contexts(contexts))
return [f for f in self._fixtures.values() if set(f.contexts) & context_values] return [
f
for variants in self._fixtures.values()
for f in variants
if set(f.contexts) & context_values
]
def resolve_dependencies(self, *names: str) -> list[str]: def resolve_dependencies(self, *names: str) -> list[str]:
"""Resolve fixture dependencies in topological order. """Resolve fixture dependencies in topological order.
When a fixture name has multiple context variants, the union of all
variants' ``depends_on`` lists is used.
Args: Args:
*names: Fixture names to resolve *names: Fixture names to resolve
@@ -185,9 +264,20 @@ class FixtureRegistry:
raise ValueError(f"Circular dependency detected: {name}") raise ValueError(f"Circular dependency detected: {name}")
visiting.add(name) visiting.add(name)
fixture = self.get(name) variants = self._fixtures.get(name)
if variants is None:
raise KeyError(f"Fixture '{name}' not found")
for dep in fixture.depends_on: # Union of depends_on across all variants, preserving first-seen order.
seen_deps: set[str] = set()
all_deps: list[str] = []
for variant in variants:
for dep in variant.depends_on:
if dep not in seen_deps:
all_deps.append(dep)
seen_deps.add(dep)
for dep in all_deps:
visit(dep) visit(dep)
visiting.remove(name) visiting.remove(name)
@@ -199,7 +289,7 @@ class FixtureRegistry:
return resolved return resolved
def resolve_context_dependencies(self, *contexts: str | Context) -> list[str]: def resolve_context_dependencies(self, *contexts: str | Enum) -> list[str]:
"""Resolve all fixtures for contexts with dependencies. """Resolve all fixtures for contexts with dependencies.
Args: Args:
@@ -209,7 +299,9 @@ class FixtureRegistry:
List of fixture names in load order List of fixture names in load order
""" """
context_fixtures = self.get_by_context(*contexts) context_fixtures = self.get_by_context(*contexts)
names = [f.name for f in context_fixtures] # Deduplicate names while preserving first-seen order (a name can
# appear multiple times if it has variants in different contexts).
names = list(dict.fromkeys(f.name for f in context_fixtures))
all_deps: set[str] = set() all_deps: set[str] = set()
for name in names: for name in names:

View File

@@ -1,8 +1,11 @@
"""Fixture loading utilities for database seeding.""" """Fixture loading utilities for database seeding."""
from collections.abc import Callable, Sequence from collections.abc import Callable, Sequence
from enum import Enum
from typing import Any from typing import Any
from sqlalchemy import inspect as sa_inspect
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import DeclarativeBase
@@ -10,23 +13,152 @@ from ..db import get_transaction
from ..logger import get_logger from ..logger import get_logger
from ..types import ModelType from ..types import ModelType
from .enum import LoadStrategy from .enum import LoadStrategy
from .registry import Context, FixtureRegistry from .registry import FixtureRegistry, _normalize_contexts
logger = get_logger() logger = get_logger()
def _instance_to_dict(instance: DeclarativeBase) -> dict[str, Any]:
"""Extract column values from a model instance, skipping unset server-default columns."""
state = sa_inspect(instance)
state_dict = state.dict
result: dict[str, Any] = {}
for prop in state.mapper.column_attrs:
if prop.key not in state_dict:
continue
val = state_dict[prop.key]
if val is None:
col = prop.columns[0]
if col.server_default is not None or (
col.default is not None and col.default.is_callable
):
continue
result[prop.key] = val
return result
def _group_by_type(
instances: list[DeclarativeBase],
) -> list[tuple[type[DeclarativeBase], list[DeclarativeBase]]]:
"""Group instances by their concrete model class, preserving insertion order."""
groups: dict[type[DeclarativeBase], list[DeclarativeBase]] = {}
for instance in instances:
groups.setdefault(type(instance), []).append(instance)
return list(groups.items())
async def _batch_insert(
session: AsyncSession,
model_cls: type[DeclarativeBase],
instances: list[DeclarativeBase],
) -> None:
"""INSERT all instances — raises on conflict (no duplicate handling)."""
dicts = [_instance_to_dict(i) for i in instances]
await session.execute(pg_insert(model_cls).values(dicts))
async def _batch_merge(
session: AsyncSession,
model_cls: type[DeclarativeBase],
instances: list[DeclarativeBase],
) -> None:
"""UPSERT: insert new rows, update existing ones with the provided values."""
mapper = model_cls.__mapper__
pk_names = [col.name for col in mapper.primary_key]
pk_names_set = set(pk_names)
non_pk_cols = [
prop.key
for prop in mapper.column_attrs
if not any(col.name in pk_names_set for col in prop.columns)
]
dicts = [_instance_to_dict(i) for i in instances]
stmt = pg_insert(model_cls).values(dicts)
if non_pk_cols:
stmt = stmt.on_conflict_do_update(
index_elements=pk_names,
set_={col: stmt.excluded[col] for col in non_pk_cols},
)
else:
stmt = stmt.on_conflict_do_nothing(index_elements=pk_names)
await session.execute(stmt)
async def _batch_skip_existing(
session: AsyncSession,
model_cls: type[DeclarativeBase],
instances: list[DeclarativeBase],
) -> list[DeclarativeBase]:
"""INSERT only rows that do not already exist; return the inserted ones."""
mapper = model_cls.__mapper__
pk_names = [col.name for col in mapper.primary_key]
no_pk: list[DeclarativeBase] = []
with_pk_pairs: list[tuple[DeclarativeBase, Any]] = []
for inst in instances:
pk = _get_primary_key(inst)
if pk is None:
no_pk.append(inst)
else:
with_pk_pairs.append((inst, pk))
loaded: list[DeclarativeBase] = list(no_pk)
if no_pk:
await session.execute(
pg_insert(model_cls).values([_instance_to_dict(i) for i in no_pk])
)
if with_pk_pairs:
with_pk = [i for i, _ in with_pk_pairs]
stmt = (
pg_insert(model_cls)
.values([_instance_to_dict(i) for i in with_pk])
.on_conflict_do_nothing(index_elements=pk_names)
)
result = await session.execute(stmt.returning(*mapper.primary_key))
inserted_pks = {row[0] if len(pk_names) == 1 else tuple(row) for row in result}
loaded.extend(inst for inst, pk in with_pk_pairs if pk in inserted_pks)
return loaded
async def _load_ordered( async def _load_ordered(
session: AsyncSession, session: AsyncSession,
registry: FixtureRegistry, registry: FixtureRegistry,
ordered_names: list[str], ordered_names: list[str],
strategy: LoadStrategy, strategy: LoadStrategy,
contexts: tuple[str, ...] | None = None,
) -> dict[str, list[DeclarativeBase]]: ) -> dict[str, list[DeclarativeBase]]:
"""Load fixtures in order.""" """Load fixtures in order using batch Core INSERT statements.
When *contexts* is provided only variants whose context set intersects with
*contexts* are called for each name; their instances are concatenated.
When *contexts* is ``None`` all variants of each name are loaded.
"""
results: dict[str, list[DeclarativeBase]] = {} results: dict[str, list[DeclarativeBase]] = {}
for name in ordered_names: for name in ordered_names:
fixture = registry.get(name) variants = (
instances = list(fixture.func()) registry.get_variants(name, *contexts)
if contexts is not None
else registry.get_variants(name)
)
# Cross-context dependency fallback: if we're loading by context but
# no variant matches (e.g. a "base"-only fixture required by a
# "testing" fixture), load all available variants so the dependency
# is satisfied.
if contexts is not None and not variants:
variants = registry.get_variants(name)
if not variants:
results[name] = []
continue
instances = [inst for v in variants for inst in v.func()]
if not instances: if not instances:
results[name] = [] results[name] = []
@@ -36,25 +168,17 @@ async def _load_ordered(
loaded: list[DeclarativeBase] = [] loaded: list[DeclarativeBase] = []
async with get_transaction(session): async with get_transaction(session):
for instance in instances: for model_cls, group in _group_by_type(instances):
if strategy == LoadStrategy.INSERT: match strategy:
session.add(instance) case LoadStrategy.INSERT:
loaded.append(instance) await _batch_insert(session, model_cls, group)
loaded.extend(group)
elif strategy == LoadStrategy.MERGE: case LoadStrategy.MERGE:
merged = await session.merge(instance) await _batch_merge(session, model_cls, group)
loaded.append(merged) loaded.extend(group)
case LoadStrategy.SKIP_EXISTING:
else: # LoadStrategy.SKIP_EXISTING inserted = await _batch_skip_existing(session, model_cls, group)
pk = _get_primary_key(instance) loaded.extend(inserted)
if pk is not None:
existing = await session.get(type(instance), pk)
if existing is None:
session.add(instance)
loaded.append(instance)
else:
session.add(instance)
loaded.append(instance)
results[name] = loaded results[name] = loaded
logger.info(f"Loaded fixture '{name}': {len(loaded)} {model_name}(s)") logger.info(f"Loaded fixture '{name}': {len(loaded)} {model_name}(s)")
@@ -109,6 +233,8 @@ async def load_fixtures(
) -> dict[str, list[DeclarativeBase]]: ) -> dict[str, list[DeclarativeBase]]:
"""Load specific fixtures by name with dependencies. """Load specific fixtures by name with dependencies.
All context variants of each requested fixture are loaded and merged.
Args: Args:
session: Database session session: Database session
registry: Fixture registry registry: Fixture registry
@@ -125,19 +251,27 @@ async def load_fixtures(
async def load_fixtures_by_context( async def load_fixtures_by_context(
session: AsyncSession, session: AsyncSession,
registry: FixtureRegistry, registry: FixtureRegistry,
*contexts: str | Context, *contexts: str | Enum,
strategy: LoadStrategy = LoadStrategy.MERGE, strategy: LoadStrategy = LoadStrategy.MERGE,
) -> dict[str, list[DeclarativeBase]]: ) -> dict[str, list[DeclarativeBase]]:
"""Load all fixtures for specific contexts. """Load all fixtures for specific contexts.
For each fixture name, only the variants whose context set intersects with
*contexts* are loaded. When a name has variants in multiple of the
requested contexts, their instances are merged before being inserted.
Args: Args:
session: Database session session: Database session
registry: Fixture registry registry: Fixture registry
*contexts: Contexts to load (e.g., Context.BASE, Context.TESTING) *contexts: Contexts to load (e.g., ``Context.BASE``, ``Context.TESTING``,
or plain strings for custom contexts)
strategy: How to handle existing records strategy: How to handle existing records
Returns: Returns:
Dict mapping fixture names to loaded instances Dict mapping fixture names to loaded instances
""" """
context_strings = tuple(_normalize_contexts(contexts))
ordered = registry.resolve_context_dependencies(*contexts) ordered = registry.resolve_context_dependencies(*contexts)
return await _load_ordered(session, registry, ordered, strategy) return await _load_ordered(
session, registry, ordered, strategy, contexts=context_strings
)

View File

@@ -66,7 +66,7 @@ def configure_logging(
_SENTINEL = object() _SENTINEL = object()
def get_logger(name: str | None = _SENTINEL) -> logging.Logger: # type: ignore[assignment] def get_logger(name: str | None = _SENTINEL) -> logging.Logger: # type: ignore[assignment] # ty:ignore[invalid-parameter-default]
"""Return a logger with the given *name*. """Return a logger with the given *name*.
A thin convenience wrapper around :func:`logging.getLogger` that keeps A thin convenience wrapper around :func:`logging.getLogger` that keeps

View File

@@ -1,6 +1,7 @@
"""Field-change monitoring via SQLAlchemy session events.""" """Field-change monitoring via SQLAlchemy session events."""
import asyncio import asyncio
import inspect
import weakref import weakref
from collections.abc import Awaitable from collections.abc import Awaitable
from enum import Enum from enum import Enum
@@ -25,6 +26,7 @@ _SESSION_PENDING_NEW = "_ft_pending_new"
_SESSION_CREATES = "_ft_creates" _SESSION_CREATES = "_ft_creates"
_SESSION_DELETES = "_ft_deletes" _SESSION_DELETES = "_ft_deletes"
_SESSION_UPDATES = "_ft_updates" _SESSION_UPDATES = "_ft_updates"
_SESSION_SAVEPOINT_DEPTH = "_ft_sp_depth"
class ModelEvent(str, Enum): class ModelEvent(str, Enum):
@@ -65,6 +67,14 @@ def _snapshot_column_attrs(obj: Any) -> dict[str, Any]:
} }
def _get_watched_fields(cls: type) -> list[str] | None:
"""Return the watched fields for *cls*, walking the MRO to inherit from parents."""
for klass in cls.__mro__:
if klass in _WATCHED_FIELDS:
return _WATCHED_FIELDS[klass]
return None
def _upsert_changes( def _upsert_changes(
pending: dict[int, tuple[Any, dict[str, dict[str, Any]]]], pending: dict[int, tuple[Any, dict[str, dict[str, Any]]]],
obj: Any, obj: Any,
@@ -83,6 +93,22 @@ def _upsert_changes(
pending[key] = (obj, changes) pending[key] = (obj, changes)
@event.listens_for(AsyncSession.sync_session_class, "after_transaction_create")
def _after_transaction_create(session: Any, transaction: Any) -> None:
if transaction.nested:
session.info[_SESSION_SAVEPOINT_DEPTH] = (
session.info.get(_SESSION_SAVEPOINT_DEPTH, 0) + 1
)
@event.listens_for(AsyncSession.sync_session_class, "after_transaction_end")
def _after_transaction_end(session: Any, transaction: Any) -> None:
if transaction.nested:
depth = session.info.get(_SESSION_SAVEPOINT_DEPTH, 0)
if depth > 0: # pragma: no branch
session.info[_SESSION_SAVEPOINT_DEPTH] = depth - 1
@event.listens_for(AsyncSession.sync_session_class, "after_flush") @event.listens_for(AsyncSession.sync_session_class, "after_flush")
def _after_flush(session: Any, flush_context: Any) -> None: def _after_flush(session: Any, flush_context: Any) -> None:
# New objects: capture references while session.new is still populated. # New objects: capture references while session.new is still populated.
@@ -102,7 +128,7 @@ def _after_flush(session: Any, flush_context: Any) -> None:
continue continue
# None = not in dict = watch all fields; list = specific fields only # None = not in dict = watch all fields; list = specific fields only
watched = _WATCHED_FIELDS.get(type(obj)) watched = _get_watched_fields(type(obj))
changes: dict[str, dict[str, Any]] = {} changes: dict[str, dict[str, Any]] = {}
attrs = ( attrs = (
@@ -169,7 +195,7 @@ def _schedule_with_snapshot(
_sa_set_committed_value(obj, key, value) _sa_set_committed_value(obj, key, value)
try: try:
result = fn(*args) result = fn(*args)
if asyncio.iscoroutine(result): if inspect.isawaitable(result):
await result await result
except Exception as exc: except Exception as exc:
_logger.error(_CALLBACK_ERROR_MSG, exc_info=exc) _logger.error(_CALLBACK_ERROR_MSG, exc_info=exc)
@@ -180,12 +206,24 @@ def _schedule_with_snapshot(
@event.listens_for(AsyncSession.sync_session_class, "after_commit") @event.listens_for(AsyncSession.sync_session_class, "after_commit")
def _after_commit(session: Any) -> None: def _after_commit(session: Any) -> None:
if session.info.get(_SESSION_SAVEPOINT_DEPTH, 0) > 0:
return
creates: list[Any] = session.info.pop(_SESSION_CREATES, []) creates: list[Any] = session.info.pop(_SESSION_CREATES, [])
deletes: list[Any] = session.info.pop(_SESSION_DELETES, []) deletes: list[Any] = session.info.pop(_SESSION_DELETES, [])
field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = session.info.pop( field_changes: dict[int, tuple[Any, dict[str, dict[str, Any]]]] = session.info.pop(
_SESSION_UPDATES, {} _SESSION_UPDATES, {}
) )
if creates and deletes:
transient_ids = {id(o) for o in creates} & {id(o) for o in deletes}
if transient_ids:
creates = [o for o in creates if id(o) not in transient_ids]
deletes = [o for o in deletes if id(o) not in transient_ids]
field_changes = {
k: v for k, v in field_changes.items() if k not in transient_ids
}
if not creates and not deletes and not field_changes: if not creates and not deletes and not field_changes:
return return

View File

@@ -7,6 +7,7 @@ from contextlib import asynccontextmanager
from typing import Any from typing import Any
from httpx import ASGITransport, AsyncClient from httpx import ASGITransport, AsyncClient
from sqlalchemy import text
from sqlalchemy.engine import make_url from sqlalchemy.engine import make_url
from sqlalchemy.ext.asyncio import ( from sqlalchemy.ext.asyncio import (
AsyncSession, AsyncSession,
@@ -15,13 +16,8 @@ from sqlalchemy.ext.asyncio import (
) )
from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import DeclarativeBase
from sqlalchemy import text from ..db import cleanup_tables as _cleanup_tables
from ..db import create_database
from ..db import (
cleanup_tables as _cleanup_tables,
create_database,
create_db_context,
)
async def cleanup_tables( async def cleanup_tables(
@@ -129,7 +125,8 @@ async def create_worker_database(
worker_url = worker_database_url( worker_url = worker_database_url(
database_url=database_url, default_test_db=default_test_db database_url=database_url, default_test_db=default_test_db
) )
worker_db_name: str = make_url(worker_url).database # type: ignore[assignment] worker_db_name = make_url(worker_url).database
assert worker_db_name is not None
engine = create_async_engine(database_url, isolation_level="AUTOCOMMIT") engine = create_async_engine(database_url, isolation_level="AUTOCOMMIT")
try: try:
@@ -268,15 +265,12 @@ async def create_db_session(
async with engine.begin() as conn: async with engine.begin() as conn:
await conn.run_sync(base.metadata.create_all) await conn.run_sync(base.metadata.create_all)
# Create session using existing db context utility
session_maker = async_sessionmaker(engine, expire_on_commit=expire_on_commit) session_maker = async_sessionmaker(engine, expire_on_commit=expire_on_commit)
get_session = create_db_context(session_maker) async with session_maker() as session:
async with get_session() as session:
yield session yield session
if cleanup: if cleanup:
await cleanup_tables(session, base) await _cleanup_tables(session=session, base=base)
if drop_tables: if drop_tables:
async with engine.begin() as conn: async with engine.begin() as conn:

View File

@@ -1,9 +1,10 @@
"""Base Pydantic schemas for API responses.""" """Base Pydantic schemas for API responses."""
import math
from enum import Enum from enum import Enum
from typing import Annotated, Any, ClassVar, Generic, Literal, TypeVar, Union from typing import Annotated, Any, ClassVar, Generic, Literal, TypeVar, Union
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, ConfigDict, Field, computed_field
from .types import DataT from .types import DataT
@@ -98,17 +99,29 @@ class OffsetPagination(PydanticBase):
"""Pagination metadata for offset-based list responses. """Pagination metadata for offset-based list responses.
Attributes: Attributes:
total_count: Total number of items across all pages total_count: Total number of items across all pages.
``None`` when ``include_total=False``.
items_per_page: Number of items per page items_per_page: Number of items per page
page: Current page number (1-indexed) page: Current page number (1-indexed)
has_more: Whether there are more pages has_more: Whether there are more pages
pages: Total number of pages
""" """
total_count: int total_count: int | None
items_per_page: int items_per_page: int
page: int page: int
has_more: bool has_more: bool
@computed_field
@property
def pages(self) -> int | None:
"""Total number of pages, or ``None`` when ``total_count`` is unknown."""
if self.total_count is None:
return None
if self.items_per_page == 0:
return 0
return math.ceil(self.total_count / self.items_per_page)
class CursorPagination(PydanticBase): class CursorPagination(PydanticBase):
"""Pagination metadata for cursor-based list responses. """Pagination metadata for cursor-based list responses.
@@ -152,18 +165,18 @@ class PaginatedResponse(BaseResponse, Generic[DataT]):
_discriminated_union_cache: ClassVar[dict[Any, Any]] = {} _discriminated_union_cache: ClassVar[dict[Any, Any]] = {}
def __class_getitem__( # type: ignore[invalid-method-override] def __class_getitem__( # ty:ignore[invalid-method-override]
cls, item: type[Any] | tuple[type[Any], ...] cls, item: type[Any] | tuple[type[Any], ...]
) -> type[Any]: ) -> type[Any]:
if cls is PaginatedResponse and not isinstance(item, TypeVar): if cls is PaginatedResponse and not isinstance(item, TypeVar):
cached = cls._discriminated_union_cache.get(item) cached = cls._discriminated_union_cache.get(item)
if cached is None: if cached is None:
cached = Annotated[ cached = Annotated[
Union[CursorPaginatedResponse[item], OffsetPaginatedResponse[item]], # type: ignore[invalid-type-form] Union[CursorPaginatedResponse[item], OffsetPaginatedResponse[item]], # ty:ignore[invalid-type-form]
Field(discriminator="pagination_type"), Field(discriminator="pagination_type"),
] ]
cls._discriminated_union_cache[item] = cached cls._discriminated_union_cache[item] = cached
return cached # type: ignore[invalid-return-type] return cached # ty:ignore[invalid-return-type]
return super().__class_getitem__(item) return super().__class_getitem__(item)

View File

@@ -321,30 +321,3 @@ async def db_session(engine):
# Drop tables after test # Drop tables after test
async with engine.begin() as conn: async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.drop_all)
@pytest.fixture
def sample_role_data() -> RoleCreate:
"""Sample role creation data."""
return RoleCreate(name="admin")
@pytest.fixture
def sample_user_data() -> UserCreate:
"""Sample user creation data."""
return UserCreate(
username="testuser",
email="test@example.com",
is_active=True,
)
@pytest.fixture
def sample_post_data() -> PostCreate:
"""Sample post creation data."""
return PostCreate(
title="Test Post",
content="Test content",
is_published=True,
author_id=uuid.uuid4(),
)

View File

@@ -1759,6 +1759,52 @@ class TestSchemaResponse:
assert result.data[0].username == "pg_user" assert result.data[0].username == "pg_user"
assert not hasattr(result.data[0], "email") assert not hasattr(result.data[0], "email")
@pytest.mark.anyio
async def test_include_total_false_skips_count(self, db_session: AsyncSession):
"""offset_paginate with include_total=False returns total_count=None."""
from fastapi_toolsets.schemas import OffsetPagination
for i in range(5):
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
result = await RoleCrud.offset_paginate(
db_session, items_per_page=10, include_total=False, schema=RoleRead
)
assert isinstance(result.pagination, OffsetPagination)
assert result.pagination.total_count is None
assert len(result.data) == 5
assert result.pagination.has_more is False
@pytest.mark.anyio
async def test_include_total_false_has_more_true(self, db_session: AsyncSession):
"""offset_paginate with include_total=False sets has_more via extra-row probe."""
for i in range(15):
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
result = await RoleCrud.offset_paginate(
db_session, items_per_page=10, include_total=False, schema=RoleRead
)
assert result.pagination.total_count is None
assert result.pagination.has_more is True
assert len(result.data) == 10
@pytest.mark.anyio
async def test_include_total_false_exact_page_boundary(
self, db_session: AsyncSession
):
"""offset_paginate with include_total=False: has_more=False when items == page size."""
for i in range(10):
await RoleCrud.create(db_session, RoleCreate(name=f"role{i:02d}"))
result = await RoleCrud.offset_paginate(
db_session, items_per_page=10, include_total=False, schema=RoleRead
)
assert result.pagination.has_more is False
assert len(result.data) == 10
class TestCursorPaginate: class TestCursorPaginate:
"""Tests for cursor-based pagination via cursor_paginate().""" """Tests for cursor-based pagination via cursor_paginate()."""
@@ -2520,4 +2566,21 @@ class TestPaginate:
db_session, db_session,
pagination_type="unknown", pagination_type="unknown",
schema=RoleRead, schema=RoleRead,
) # type: ignore[no-matching-overload] ) # type: ignore[no-matching-overload] # ty:ignore[no-matching-overload]
@pytest.mark.anyio
async def test_offset_include_total_false(self, db_session: AsyncSession):
"""paginate() passes include_total=False through to offset_paginate."""
from fastapi_toolsets.schemas import OffsetPagination
await RoleCrud.create(db_session, RoleCreate(name="admin"))
result = await RoleCrud.paginate(
db_session,
pagination_type=PaginationType.OFFSET,
include_total=False,
schema=RoleRead,
)
assert isinstance(result.pagination, OffsetPagination)
assert result.pagination.total_count is None

View File

@@ -14,12 +14,14 @@ from fastapi_toolsets.crud import (
get_searchable_fields, get_searchable_fields,
) )
from fastapi_toolsets.exceptions import InvalidOrderFieldError from fastapi_toolsets.exceptions import InvalidOrderFieldError
from fastapi_toolsets.schemas import OffsetPagination from fastapi_toolsets.schemas import OffsetPagination, PaginationType
from .conftest import ( from .conftest import (
Role, Role,
RoleCreate, RoleCreate,
RoleCrud, RoleCrud,
RoleCursorCrud,
RoleRead,
User, User,
UserCreate, UserCreate,
UserCrud, UserCrud,
@@ -988,7 +990,7 @@ class TestFilterParamsSchema:
UserFacetCrud = CrudFactory(User, facet_fields=[User.username]) UserFacetCrud = CrudFactory(User, facet_fields=[User.username])
dep = UserFacetCrud.filter_params() dep = UserFacetCrud.filter_params()
assert dep.__name__ == "UserFilterParams" # type: ignore[union-attr] assert dep.__name__ == "UserFilterParams" # type: ignore[union-attr] # ty:ignore[unresolved-attribute]
@pytest.mark.anyio @pytest.mark.anyio
async def test_integration_with_offset_paginate(self, db_session: AsyncSession): async def test_integration_with_offset_paginate(self, db_session: AsyncSession):
@@ -1193,3 +1195,245 @@ class TestOrderParamsSchema:
assert results[0].username == "alice" assert results[0].username == "alice"
assert results[1].username == "charlie" assert results[1].username == "charlie"
class TestOffsetParamsSchema:
"""Tests for AsyncCrud.offset_params()."""
def test_returns_page_and_items_per_page_params(self):
"""Returned dependency has page and items_per_page params only."""
dep = RoleCrud.offset_params()
param_names = set(inspect.signature(dep).parameters)
assert param_names == {"page", "items_per_page"}
def test_dependency_name_includes_model_name(self):
"""Dependency function is named after the model."""
dep = RoleCrud.offset_params()
assert getattr(dep, "__name__") == "RoleOffsetParams"
def test_default_page_size_reflected_in_items_per_page_default(self):
"""default_page_size is used as the default for items_per_page."""
dep = RoleCrud.offset_params(default_page_size=42)
sig = inspect.signature(dep)
assert sig.parameters["items_per_page"].default.default == 42
def test_max_page_size_reflected_in_items_per_page_le(self):
"""max_page_size is used as le constraint on items_per_page."""
dep = RoleCrud.offset_params(max_page_size=50)
sig = inspect.signature(dep)
le = next(
m.le
for m in sig.parameters["items_per_page"].default.metadata
if hasattr(m, "le")
)
assert le == 50
def test_include_total_not_a_query_param(self):
"""include_total is not exposed as a query parameter."""
dep = RoleCrud.offset_params()
param_names = set(inspect.signature(dep).parameters)
assert "include_total" not in param_names
@pytest.mark.anyio
async def test_include_total_true_forwarded_in_result(self):
"""include_total=True factory arg appears in the resolved dict."""
result = await RoleCrud.offset_params(include_total=True)(
page=1, items_per_page=10
)
assert result["include_total"] is True
@pytest.mark.anyio
async def test_include_total_false_forwarded_in_result(self):
"""include_total=False factory arg appears in the resolved dict."""
result = await RoleCrud.offset_params(include_total=False)(
page=1, items_per_page=10
)
assert result["include_total"] is False
@pytest.mark.anyio
async def test_awaiting_dep_returns_dict(self):
"""Awaiting the dependency returns a dict with page, items_per_page, include_total."""
dep = RoleCrud.offset_params(include_total=False)
result = await dep(page=2, items_per_page=10)
assert result == {"page": 2, "items_per_page": 10, "include_total": False}
@pytest.mark.anyio
async def test_integrates_with_offset_paginate(self, db_session: AsyncSession):
"""offset_params output can be unpacked directly into offset_paginate."""
await RoleCrud.create(db_session, RoleCreate(name="admin"))
dep = RoleCrud.offset_params()
params = await dep(page=1, items_per_page=10)
result = await RoleCrud.offset_paginate(db_session, **params, schema=RoleRead)
assert result.pagination.page == 1
assert result.pagination.items_per_page == 10
class TestCursorParamsSchema:
"""Tests for AsyncCrud.cursor_params()."""
def test_returns_cursor_and_items_per_page_params(self):
"""Returned dependency has cursor and items_per_page params."""
dep = RoleCursorCrud.cursor_params()
param_names = set(inspect.signature(dep).parameters)
assert param_names == {"cursor", "items_per_page"}
def test_dependency_name_includes_model_name(self):
"""Dependency function is named after the model."""
dep = RoleCursorCrud.cursor_params()
assert getattr(dep, "__name__") == "RoleCursorParams"
def test_default_page_size_reflected_in_items_per_page_default(self):
"""default_page_size is used as the default for items_per_page."""
dep = RoleCursorCrud.cursor_params(default_page_size=15)
sig = inspect.signature(dep)
assert sig.parameters["items_per_page"].default.default == 15
def test_max_page_size_reflected_in_items_per_page_le(self):
"""max_page_size is used as le constraint on items_per_page."""
dep = RoleCursorCrud.cursor_params(max_page_size=75)
sig = inspect.signature(dep)
le = next(
m.le
for m in sig.parameters["items_per_page"].default.metadata
if hasattr(m, "le")
)
assert le == 75
def test_cursor_defaults_to_none(self):
"""cursor defaults to None."""
dep = RoleCursorCrud.cursor_params()
sig = inspect.signature(dep)
assert sig.parameters["cursor"].default.default is None
@pytest.mark.anyio
async def test_awaiting_dep_returns_dict(self):
"""Awaiting the dependency returns a dict with cursor and items_per_page."""
dep = RoleCursorCrud.cursor_params()
result = await dep(cursor=None, items_per_page=5)
assert result == {"cursor": None, "items_per_page": 5}
@pytest.mark.anyio
async def test_integrates_with_cursor_paginate(self, db_session: AsyncSession):
"""cursor_params output can be unpacked directly into cursor_paginate."""
await RoleCrud.create(db_session, RoleCreate(name="admin"))
dep = RoleCursorCrud.cursor_params()
params = await dep(cursor=None, items_per_page=10)
result = await RoleCursorCrud.cursor_paginate(
db_session, **params, schema=RoleRead
)
assert result.pagination.items_per_page == 10
class TestPaginateParamsSchema:
"""Tests for AsyncCrud.paginate_params()."""
def test_returns_all_params(self):
"""Returned dependency has pagination_type, page, cursor, items_per_page (no include_total)."""
dep = RoleCursorCrud.paginate_params()
param_names = set(inspect.signature(dep).parameters)
assert param_names == {"pagination_type", "page", "cursor", "items_per_page"}
def test_dependency_name_includes_model_name(self):
"""Dependency function is named after the model."""
dep = RoleCursorCrud.paginate_params()
assert getattr(dep, "__name__") == "RolePaginateParams"
def test_default_pagination_type(self):
"""default_pagination_type is reflected in pagination_type default."""
from fastapi_toolsets.schemas import PaginationType
dep = RoleCursorCrud.paginate_params(
default_pagination_type=PaginationType.CURSOR
)
sig = inspect.signature(dep)
assert (
sig.parameters["pagination_type"].default.default == PaginationType.CURSOR
)
def test_default_page_size(self):
"""default_page_size is reflected in items_per_page default."""
dep = RoleCursorCrud.paginate_params(default_page_size=15)
sig = inspect.signature(dep)
assert sig.parameters["items_per_page"].default.default == 15
def test_max_page_size_le_constraint(self):
"""max_page_size is used as le constraint on items_per_page."""
dep = RoleCursorCrud.paginate_params(max_page_size=60)
sig = inspect.signature(dep)
le = next(
m.le
for m in sig.parameters["items_per_page"].default.metadata
if hasattr(m, "le")
)
assert le == 60
def test_include_total_not_a_query_param(self):
"""include_total is not exposed as a query parameter."""
dep = RoleCursorCrud.paginate_params()
assert "include_total" not in set(inspect.signature(dep).parameters)
@pytest.mark.anyio
async def test_include_total_forwarded_in_result(self):
"""include_total factory arg appears in the resolved dict."""
result_true = await RoleCursorCrud.paginate_params(include_total=True)(
pagination_type=PaginationType.OFFSET,
page=1,
cursor=None,
items_per_page=10,
)
result_false = await RoleCursorCrud.paginate_params(include_total=False)(
pagination_type=PaginationType.OFFSET,
page=1,
cursor=None,
items_per_page=10,
)
assert result_true["include_total"] is True
assert result_false["include_total"] is False
@pytest.mark.anyio
async def test_awaiting_dep_returns_dict(self):
"""Awaiting the dependency returns a dict with all pagination keys."""
dep = RoleCursorCrud.paginate_params()
result = await dep(
pagination_type=PaginationType.OFFSET,
page=2,
cursor=None,
items_per_page=10,
)
assert result == {
"pagination_type": PaginationType.OFFSET,
"page": 2,
"cursor": None,
"items_per_page": 10,
"include_total": True,
}
@pytest.mark.anyio
async def test_integrates_with_paginate_offset(self, db_session: AsyncSession):
"""paginate_params output unpacks into paginate() for offset strategy."""
from fastapi_toolsets.schemas import OffsetPagination
await RoleCrud.create(db_session, RoleCreate(name="admin"))
params = await RoleCursorCrud.paginate_params()(
pagination_type=PaginationType.OFFSET,
page=1,
cursor=None,
items_per_page=10,
)
result = await RoleCursorCrud.paginate(db_session, **params, schema=RoleRead)
assert isinstance(result.pagination, OffsetPagination)
@pytest.mark.anyio
async def test_integrates_with_paginate_cursor(self, db_session: AsyncSession):
"""paginate_params output unpacks into paginate() for cursor strategy."""
from fastapi_toolsets.schemas import CursorPagination
await RoleCrud.create(db_session, RoleCreate(name="admin"))
params = await RoleCursorCrud.paginate_params()(
pagination_type=PaginationType.CURSOR,
page=1,
cursor=None,
items_per_page=10,
)
result = await RoleCursorCrud.paginate(db_session, **params, schema=RoleRead)
assert isinstance(result.pagination, CursorPagination)

View File

@@ -68,6 +68,55 @@ class TestCreateDbDependency:
await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.drop_all)
await engine.dispose() await engine.dispose()
@pytest.mark.anyio
async def test_in_transaction_on_yield(self):
"""Session is already in a transaction when the endpoint body starts."""
engine = create_async_engine(DATABASE_URL, echo=False)
session_factory = async_sessionmaker(engine, expire_on_commit=False)
get_db = create_db_dependency(session_factory)
async for session in get_db():
assert session.in_transaction()
break
await engine.dispose()
@pytest.mark.anyio
async def test_update_after_lock_tables_is_persisted(self):
"""Changes made after lock_tables exits (before endpoint returns) are committed.
Regression: without the auto-begin fix, lock_tables would start and commit a
real outer transaction, leaving the session idle. Any modifications after that
point were silently dropped.
"""
engine = create_async_engine(DATABASE_URL, echo=False)
session_factory = async_sessionmaker(engine, expire_on_commit=False)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
try:
get_db = create_db_dependency(session_factory)
async for session in get_db():
async with lock_tables(session, [Role]):
role = Role(name="lock_then_update")
session.add(role)
await session.flush()
# lock_tables has exited — outer transaction must still be open
assert session.in_transaction()
role.name = "updated_after_lock"
async with session_factory() as verify:
result = await RoleCrud.first(
verify, [Role.name == "updated_after_lock"]
)
assert result is not None
finally:
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await engine.dispose()
class TestCreateDbContext: class TestCreateDbContext:
"""Tests for create_db_context.""" """Tests for create_db_context."""
@@ -363,7 +412,8 @@ class TestCreateDatabase:
.set(database="test_create_db_general") .set(database="test_create_db_general")
.render_as_string(hide_password=False) .render_as_string(hide_password=False)
) )
expected_db: str = make_url(target_url).database # type: ignore[assignment] expected_db = make_url(target_url).database
assert expected_db is not None
engine = create_async_engine(DATABASE_URL, isolation_level="AUTOCOMMIT") engine = create_async_engine(DATABASE_URL, isolation_level="AUTOCOMMIT")
try: try:

View File

@@ -20,7 +20,7 @@ from .conftest import Role, RoleCreate, RoleCrud, User
async def mock_get_db() -> AsyncGenerator[AsyncSession, None]: async def mock_get_db() -> AsyncGenerator[AsyncSession, None]:
"""Mock session dependency for testing.""" """Mock session dependency for testing."""
yield None yield None # type: ignore[misc] # ty:ignore[invalid-yield]
MockSessionDep = Annotated[AsyncSession, Depends(mock_get_db)] MockSessionDep = Annotated[AsyncSession, Depends(mock_get_db)]

View File

@@ -10,12 +10,13 @@ import datetime
import pytest import pytest
from fastapi import FastAPI from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient from httpx import ASGITransport, AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.ext.asyncio import AsyncSession
from docs_src.examples.pagination_search.db import get_db from docs_src.examples.pagination_search.db import get_db
from docs_src.examples.pagination_search.models import Article, Base, Category from docs_src.examples.pagination_search.models import Article, Base, Category
from docs_src.examples.pagination_search.routes import router from docs_src.examples.pagination_search.routes import router
from fastapi_toolsets.exceptions import init_exceptions_handlers from fastapi_toolsets.exceptions import init_exceptions_handlers
from fastapi_toolsets.pytest import create_db_session
from .conftest import DATABASE_URL from .conftest import DATABASE_URL
@@ -35,20 +36,8 @@ def build_app(session: AsyncSession) -> FastAPI:
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
async def ex_db_session(): async def ex_db_session():
"""Isolated session for the example models (separate tables from conftest).""" """Isolated session for the example models (separate tables from conftest)."""
engine = create_async_engine(DATABASE_URL, echo=False) async with create_db_session(DATABASE_URL, Base) as session:
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
session_factory = async_sessionmaker(engine, expire_on_commit=False)
session = session_factory()
try:
yield session yield session
finally:
await session.close()
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await engine.dispose()
@pytest.fixture @pytest.fixture
@@ -108,7 +97,7 @@ class TestAppSessionDep:
gen = get_db() gen = get_db()
session = await gen.__anext__() session = await gen.__anext__()
assert isinstance(session, AsyncSession) assert isinstance(session, AsyncSession)
await session.close() await gen.aclose()
class TestOffsetPagination: class TestOffsetPagination:

View File

@@ -1,6 +1,7 @@
"""Tests for fastapi_toolsets.fixtures module.""" """Tests for fastapi_toolsets.fixtures module."""
import uuid import uuid
from enum import Enum
import pytest import pytest
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
@@ -14,9 +15,22 @@ from fastapi_toolsets.fixtures import (
load_fixtures_by_context, load_fixtures_by_context,
) )
from fastapi_toolsets.fixtures.utils import _get_primary_key from fastapi_toolsets.fixtures.utils import _get_primary_key, _instance_to_dict
from .conftest import IntRole, Permission, Role, User from .conftest import IntRole, Permission, Role, RoleCrud, User, UserCrud
class AppContext(str, Enum):
"""Example user-defined str+Enum context."""
STAGING = "staging"
DEMO = "demo"
class PlainEnumContext(Enum):
"""Example user-defined plain Enum context (no str mixin)."""
STAGING = "staging"
class TestContext: class TestContext:
@@ -39,6 +53,86 @@ class TestContext:
assert Context.TESTING.value == "testing" assert Context.TESTING.value == "testing"
class TestCustomEnumContext:
"""Custom Enum types are accepted wherever Context/str are expected."""
def test_cannot_subclass_context_with_members(self):
"""Python prohibits extending an Enum that already has members."""
with pytest.raises(TypeError):
class MyContext(Context): # noqa: F841 # ty: ignore[subclass-of-final-class]
STAGING = "staging"
def test_custom_enum_values_interchangeable_with_context(self):
"""A custom enum with the same .value as a built-in Context member is
treated as the same context — fixtures registered under one are found
by the other."""
class AppContextFull(str, Enum):
BASE = "base"
STAGING = "staging"
registry = FixtureRegistry()
@registry.register(contexts=[Context.BASE])
def roles():
return []
# AppContextFull.BASE has value "base" — same as Context.BASE
fixtures = registry.get_by_context(AppContextFull.BASE)
assert len(fixtures) == 1
def test_custom_enum_registry_default_contexts(self):
"""FixtureRegistry(contexts=[...]) accepts a custom Enum."""
registry = FixtureRegistry(contexts=[AppContext.STAGING])
@registry.register
def data():
return []
fixture = registry.get("data")
assert fixture.contexts == ["staging"]
def test_custom_enum_resolve_context_dependencies(self):
"""resolve_context_dependencies accepts a custom Enum context."""
registry = FixtureRegistry()
@registry.register(contexts=[AppContext.STAGING])
def staging_roles():
return []
order = registry.resolve_context_dependencies(AppContext.STAGING)
assert "staging_roles" in order
@pytest.mark.anyio
async def test_custom_enum_e2e(self, db_session: AsyncSession):
"""End-to-end: register with custom Enum, load with the same Enum."""
registry = FixtureRegistry()
@registry.register(contexts=[AppContext.STAGING])
def staging_roles():
return [Role(id=uuid.uuid4(), name="staging-admin")]
result = await load_fixtures_by_context(
db_session, registry, AppContext.STAGING
)
assert len(result["staging_roles"]) == 1
@pytest.mark.anyio
async def test_plain_enum_e2e(self, db_session: AsyncSession):
"""End-to-end: register with plain Enum, load with the same Enum."""
registry = FixtureRegistry()
@registry.register(contexts=[PlainEnumContext.STAGING])
def staging_roles():
return [Role(id=uuid.uuid4(), name="plain-staging-admin")]
result = await load_fixtures_by_context(
db_session, registry, PlainEnumContext.STAGING
)
assert len(result["staging_roles"]) == 1
class TestLoadStrategy: class TestLoadStrategy:
"""Tests for LoadStrategy enum.""" """Tests for LoadStrategy enum."""
@@ -407,6 +501,37 @@ class TestDependencyResolution:
with pytest.raises(ValueError, match="Circular dependency"): with pytest.raises(ValueError, match="Circular dependency"):
registry.resolve_dependencies("a") registry.resolve_dependencies("a")
def test_resolve_raises_for_unknown_dependency(self):
"""KeyError when depends_on references an unregistered fixture."""
registry = FixtureRegistry()
@registry.register(depends_on=["ghost"])
def users():
return []
with pytest.raises(KeyError, match="ghost"):
registry.resolve_dependencies("users")
def test_resolve_deduplicates_shared_depends_on_across_variants(self):
"""A dep shared by two same-name variants appears only once in the order."""
registry = FixtureRegistry()
@registry.register(contexts=[Context.BASE])
def roles():
return []
@registry.register(depends_on=["roles"], contexts=[Context.BASE])
def items():
return []
@registry.register(depends_on=["roles"], contexts=[Context.TESTING])
def items(): # noqa: F811
return []
order = registry.resolve_dependencies("items")
assert order.count("roles") == 1
assert order.index("roles") < order.index("items")
def test_resolve_context_dependencies(self): def test_resolve_context_dependencies(self):
"""Resolve all fixtures for a context with dependencies.""" """Resolve all fixtures for a context with dependencies."""
registry = FixtureRegistry() registry = FixtureRegistry()
@@ -447,8 +572,6 @@ class TestLoadFixtures:
assert "roles" in result assert "roles" in result
assert len(result["roles"]) == 2 assert len(result["roles"]) == 2
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 2 assert count == 2
@@ -479,8 +602,6 @@ class TestLoadFixtures:
assert "roles" in result assert "roles" in result
assert "users" in result assert "users" in result
from .conftest import RoleCrud, UserCrud
assert await RoleCrud.count(db_session) == 1 assert await RoleCrud.count(db_session) == 1
assert await UserCrud.count(db_session) == 1 assert await UserCrud.count(db_session) == 1
@@ -497,8 +618,6 @@ class TestLoadFixtures:
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE) await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE)
await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE) await load_fixtures(db_session, registry, "roles", strategy=LoadStrategy.MERGE)
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 1 assert count == 1
@@ -526,8 +645,6 @@ class TestLoadFixtures:
db_session, registry, "roles", strategy=LoadStrategy.SKIP_EXISTING db_session, registry, "roles", strategy=LoadStrategy.SKIP_EXISTING
) )
from .conftest import RoleCrud
role = await RoleCrud.first(db_session, [Role.id == role_id]) role = await RoleCrud.first(db_session, [Role.id == role_id])
assert role is not None assert role is not None
assert role.name == "original" assert role.name == "original"
@@ -553,8 +670,6 @@ class TestLoadFixtures:
assert "roles" in result assert "roles" in result
assert len(result["roles"]) == 2 assert len(result["roles"]) == 2
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 2 assert count == 2
@@ -594,8 +709,6 @@ class TestLoadFixtures:
assert "roles" in result assert "roles" in result
assert "other_roles" in result assert "other_roles" in result
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 2 assert count == 2
@@ -660,8 +773,6 @@ class TestLoadFixturesByContext:
await load_fixtures_by_context(db_session, registry, Context.BASE) await load_fixtures_by_context(db_session, registry, Context.BASE)
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 1 assert count == 1
@@ -688,8 +799,6 @@ class TestLoadFixturesByContext:
db_session, registry, Context.BASE, Context.TESTING db_session, registry, Context.BASE, Context.TESTING
) )
from .conftest import RoleCrud
count = await RoleCrud.count(db_session) count = await RoleCrud.count(db_session)
assert count == 2 assert count == 2
@@ -717,8 +826,6 @@ class TestLoadFixturesByContext:
await load_fixtures_by_context(db_session, registry, Context.TESTING) await load_fixtures_by_context(db_session, registry, Context.TESTING)
from .conftest import RoleCrud, UserCrud
assert await RoleCrud.count(db_session) == 1 assert await RoleCrud.count(db_session) == 1
assert await UserCrud.count(db_session) == 1 assert await UserCrud.count(db_session) == 1
@@ -813,3 +920,82 @@ class TestGetPrimaryKey:
instance = Permission(subject="post") # action is None instance = Permission(subject="post") # action is None
pk = _get_primary_key(instance) pk = _get_primary_key(instance)
assert pk is None assert pk is None
class TestRegistryGetVariants:
"""Tests for FixtureRegistry.get and get_variants edge cases."""
def test_get_raises_value_error_for_multi_variant(self):
"""get() raises ValueError when the fixture has multiple context variants."""
registry = FixtureRegistry()
@registry.register(contexts=[Context.BASE])
def items():
return []
@registry.register(contexts=[Context.TESTING])
def items(): # noqa: F811
return []
with pytest.raises(ValueError, match="get_variants"):
registry.get("items")
def test_get_variants_raises_key_error_for_unknown(self):
"""get_variants() raises KeyError for an unregistered name."""
registry = FixtureRegistry()
with pytest.raises(KeyError, match="not found"):
registry.get_variants("no_such_fixture")
class TestInstanceToDict:
"""Unit tests for the _instance_to_dict helper."""
def test_explicit_values_included(self):
"""All explicitly set column values appear in the result."""
role_id = uuid.uuid4()
instance = Role(id=role_id, name="admin")
d = _instance_to_dict(instance)
assert d["id"] == role_id
assert d["name"] == "admin"
def test_callable_default_none_excluded(self):
"""A column whose value is None but has a callable Python-side default
(e.g. ``default=uuid.uuid4``) is excluded so the DB generates it."""
instance = Role(id=None, name="admin")
d = _instance_to_dict(instance)
assert "id" not in d
assert d["name"] == "admin"
def test_nullable_none_included(self):
"""None on a nullable column with no default is kept (explicit NULL)."""
instance = User(id=uuid.uuid4(), username="u", email="e@e.com", role_id=None)
d = _instance_to_dict(instance)
assert "role_id" in d
assert d["role_id"] is None
class TestBatchMergeNonPkColumns:
"""Batch MERGE on a model with no non-PK columns (PK-only table)."""
@pytest.mark.anyio
async def test_merge_pk_only_model(self, db_session: AsyncSession):
"""MERGE strategy on a PK-only model uses on_conflict_do_nothing."""
registry = FixtureRegistry()
@registry.register
def permissions():
return [
Permission(subject="post", action="read"),
Permission(subject="post", action="write"),
]
result = await load_fixtures(
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
)
assert len(result["permissions"]) == 2
# Run again — conflicts are silently ignored.
result2 = await load_fixtures(
db_session, registry, "permissions", strategy=LoadStrategy.MERGE
)
assert len(result2["permissions"]) == 2

View File

@@ -101,7 +101,7 @@ class TestMetricsImportGuard:
with patch("builtins.__import__", side_effect=blocking_import): with patch("builtins.__import__", side_effect=blocking_import):
mod = importlib.import_module("fastapi_toolsets.metrics") mod = importlib.import_module("fastapi_toolsets.metrics")
with pytest.raises(ImportError, match="prometheus_client"): with pytest.raises(ImportError, match="prometheus_client"):
mod.init_metrics(None, None) # type: ignore[arg-type] mod.init_metrics(None, None) # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
finally: finally:
for key in list(sys.modules): for key in list(sys.modules):
if key.startswith("fastapi_toolsets.metrics"): if key.startswith("fastapi_toolsets.metrics"):
@@ -171,8 +171,15 @@ class TestPytestImportGuard:
class TestCliImportGuard: class TestCliImportGuard:
"""Tests for CLI module import guard when typer is missing.""" """Tests for CLI module import guard when typer is missing."""
def test_import_raises_without_typer(self): @pytest.mark.parametrize(
"""Importing cli.app raises when typer is missing.""" "expected_match",
[
"typer",
r"pip install fastapi-toolsets\[cli\]",
],
)
def test_import_raises_without_typer(self, expected_match):
"""Importing cli.app raises when typer is missing, with an informative error message."""
saved, blocking_import = _reload_without_package( saved, blocking_import = _reload_without_package(
"fastapi_toolsets.cli.app", ["typer"] "fastapi_toolsets.cli.app", ["typer"]
) )
@@ -186,33 +193,7 @@ class TestCliImportGuard:
try: try:
with patch("builtins.__import__", side_effect=blocking_import): with patch("builtins.__import__", side_effect=blocking_import):
with pytest.raises(ImportError, match="typer"): with pytest.raises(ImportError, match=expected_match):
importlib.import_module("fastapi_toolsets.cli.app")
finally:
for key in list(sys.modules):
if key.startswith("fastapi_toolsets.cli.app") or key.startswith(
"fastapi_toolsets.cli.config"
):
sys.modules.pop(key, None)
sys.modules.update(saved)
def test_error_message_suggests_cli_extra(self):
"""Error message suggests installing the cli extra."""
saved, blocking_import = _reload_without_package(
"fastapi_toolsets.cli.app", ["typer"]
)
config_keys = [
k for k in sys.modules if k.startswith("fastapi_toolsets.cli.config")
]
for key in config_keys:
if key not in saved:
saved[key] = sys.modules.pop(key)
try:
with patch("builtins.__import__", side_effect=blocking_import):
with pytest.raises(
ImportError, match=r"pip install fastapi-toolsets\[cli\]"
):
importlib.import_module("fastapi_toolsets.cli.app") importlib.import_module("fastapi_toolsets.cli.app")
finally: finally:
for key in list(sys.modules): for key in list(sys.modules):

View File

@@ -1,6 +1,5 @@
"""Tests for fastapi_toolsets.metrics module.""" """Tests for fastapi_toolsets.metrics module."""
import os
import tempfile import tempfile
from unittest.mock import AsyncMock, MagicMock from unittest.mock import AsyncMock, MagicMock
@@ -287,6 +286,16 @@ class TestIncludeRegistry:
class TestInitMetrics: class TestInitMetrics:
"""Tests for init_metrics function.""" """Tests for init_metrics function."""
@pytest.fixture
def metrics_client(self):
"""Create a FastAPI app with MetricsRegistry and return a TestClient."""
app = FastAPI()
registry = MetricsRegistry()
init_metrics(app, registry)
client = TestClient(app)
yield client
client.close()
def test_returns_app(self): def test_returns_app(self):
"""Returns the FastAPI app.""" """Returns the FastAPI app."""
app = FastAPI() app = FastAPI()
@@ -294,26 +303,14 @@ class TestInitMetrics:
result = init_metrics(app, registry) result = init_metrics(app, registry)
assert result is app assert result is app
def test_metrics_endpoint_responds(self): def test_metrics_endpoint_responds(self, metrics_client):
"""The /metrics endpoint returns 200.""" """The /metrics endpoint returns 200."""
app = FastAPI() response = metrics_client.get("/metrics")
registry = MetricsRegistry()
init_metrics(app, registry)
client = TestClient(app)
response = client.get("/metrics")
assert response.status_code == 200 assert response.status_code == 200
def test_metrics_endpoint_content_type(self): def test_metrics_endpoint_content_type(self, metrics_client):
"""The /metrics endpoint returns prometheus content type.""" """The /metrics endpoint returns prometheus content type."""
app = FastAPI() response = metrics_client.get("/metrics")
registry = MetricsRegistry()
init_metrics(app, registry)
client = TestClient(app)
response = client.get("/metrics")
assert "text/plain" in response.headers["content-type"] assert "text/plain" in response.headers["content-type"]
def test_custom_path(self): def test_custom_path(self):
@@ -445,11 +442,10 @@ class TestInitMetrics:
class TestMultiProcessMode: class TestMultiProcessMode:
"""Tests for multi-process Prometheus mode.""" """Tests for multi-process Prometheus mode."""
def test_multiprocess_with_env_var(self): def test_multiprocess_with_env_var(self, monkeypatch):
"""Multi-process mode works when PROMETHEUS_MULTIPROC_DIR is set.""" """Multi-process mode works when PROMETHEUS_MULTIPROC_DIR is set."""
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
os.environ["PROMETHEUS_MULTIPROC_DIR"] = tmpdir monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", tmpdir)
try:
# Use a separate registry to avoid conflicts with default # Use a separate registry to avoid conflicts with default
prom_registry = CollectorRegistry() prom_registry = CollectorRegistry()
app = FastAPI() app = FastAPI()
@@ -469,12 +465,10 @@ class TestMultiProcessMode:
response = client.get("/metrics") response = client.get("/metrics")
assert response.status_code == 200 assert response.status_code == 200
finally:
del os.environ["PROMETHEUS_MULTIPROC_DIR"]
def test_single_process_without_env_var(self): def test_single_process_without_env_var(self, monkeypatch):
"""Single-process mode when PROMETHEUS_MULTIPROC_DIR is not set.""" """Single-process mode when PROMETHEUS_MULTIPROC_DIR is not set."""
os.environ.pop("PROMETHEUS_MULTIPROC_DIR", None) monkeypatch.delenv("PROMETHEUS_MULTIPROC_DIR", raising=False)
app = FastAPI() app = FastAPI()
registry = MetricsRegistry() registry = MetricsRegistry()

View File

@@ -6,27 +6,28 @@ from contextlib import suppress
from types import SimpleNamespace from types import SimpleNamespace
from unittest.mock import patch from unittest.mock import patch
import fastapi_toolsets.models.watched as _watched_module
import pytest import pytest
from sqlalchemy import String from sqlalchemy import String
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from fastapi_toolsets.pytest import create_db_session
import fastapi_toolsets.models.watched as _watched_module
from fastapi_toolsets.models import ( from fastapi_toolsets.models import (
CreatedAtMixin, CreatedAtMixin,
ModelEvent, ModelEvent,
TimestampMixin, TimestampMixin,
UpdatedAtMixin,
UUIDMixin, UUIDMixin,
UUIDv7Mixin, UUIDv7Mixin,
UpdatedAtMixin,
WatchedFieldsMixin, WatchedFieldsMixin,
watch, watch,
) )
from fastapi_toolsets.models.watched import ( from fastapi_toolsets.models.watched import (
_SESSION_CREATES, _SESSION_CREATES,
_SESSION_DELETES, _SESSION_DELETES,
_SESSION_UPDATES,
_SESSION_PENDING_NEW, _SESSION_PENDING_NEW,
_SESSION_UPDATES,
_after_commit, _after_commit,
_after_flush, _after_flush,
_after_flush_postexec, _after_flush_postexec,
@@ -81,8 +82,6 @@ class FullMixinModel(MixinBase, UUIDMixin, UpdatedAtMixin):
name: Mapped[str] = mapped_column(String(50)) name: Mapped[str] = mapped_column(String(50))
# --- WatchedFieldsMixin test models ---
_test_events: list[dict] = [] _test_events: list[dict] = []
@@ -145,6 +144,66 @@ class NonWatchedModel(MixinBase):
value: Mapped[str] = mapped_column(String(50)) value: Mapped[str] = mapped_column(String(50))
_poly_events: list[dict] = []
class PolyAnimal(MixinBase, UUIDMixin, WatchedFieldsMixin):
"""Base class for STI polymorphism tests."""
__tablename__ = "mixin_poly_animals"
__mapper_args__ = {"polymorphic_on": "kind", "polymorphic_identity": "animal"}
kind: Mapped[str] = mapped_column(String(50))
name: Mapped[str] = mapped_column(String(50))
async def on_create(self) -> None:
_poly_events.append(
{"event": "create", "type": type(self).__name__, "obj_id": self.id}
)
async def on_delete(self) -> None:
_poly_events.append(
{"event": "delete", "type": type(self).__name__, "obj_id": self.id}
)
class PolyDog(PolyAnimal):
"""STI subclass — shares the same table as PolyAnimal."""
__mapper_args__ = {"polymorphic_identity": "dog"}
_watch_inherit_events: list[dict] = []
@watch("status")
class WatchParent(MixinBase, UUIDMixin, WatchedFieldsMixin):
"""Base class with @watch("status") — subclasses should inherit this filter."""
__tablename__ = "mixin_watch_parent"
__mapper_args__ = {"polymorphic_on": "kind", "polymorphic_identity": "parent"}
kind: Mapped[str] = mapped_column(String(50))
status: Mapped[str] = mapped_column(String(50))
other: Mapped[str] = mapped_column(String(50))
async def on_update(self, changes: dict) -> None:
_watch_inherit_events.append({"type": type(self).__name__, "changes": changes})
class WatchChild(WatchParent):
"""STI subclass that does NOT redeclare @watch — should inherit parent's filter."""
__mapper_args__ = {"polymorphic_identity": "child"}
@watch("other")
class WatchOverride(WatchParent):
"""STI subclass that overrides @watch with a different field."""
__mapper_args__ = {"polymorphic_identity": "override"}
_attr_access_events: list[dict] = [] _attr_access_events: list[dict] = []
@@ -172,6 +231,7 @@ class AttrAccessModel(MixinBase, UUIDMixin, WatchedFieldsMixin):
_sync_events: list[dict] = [] _sync_events: list[dict] = []
_future_events: list[str] = []
@watch("status") @watch("status")
@@ -192,41 +252,33 @@ class SyncCallbackModel(MixinBase, UUIDMixin, WatchedFieldsMixin):
_sync_events.append({"event": "update", "changes": changes}) _sync_events.append({"event": "update", "changes": changes})
class FutureCallbackModel(MixinBase, UUIDMixin, WatchedFieldsMixin):
"""Model whose on_create returns an asyncio.Task (awaitable, not a coroutine)."""
__tablename__ = "mixin_future_callback_models"
name: Mapped[str] = mapped_column(String(50))
def on_create(self) -> "asyncio.Task[None]":
async def _work() -> None:
_future_events.append("created")
return asyncio.ensure_future(_work())
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
async def mixin_session(): async def mixin_session():
engine = create_async_engine(DATABASE_URL, echo=False) async with create_db_session(DATABASE_URL, MixinBase) as session:
async with engine.begin() as conn:
await conn.run_sync(MixinBase.metadata.create_all)
session_factory = async_sessionmaker(engine, expire_on_commit=False)
session = session_factory()
try:
yield session yield session
finally:
await session.close()
async with engine.begin() as conn:
await conn.run_sync(MixinBase.metadata.drop_all)
await engine.dispose()
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
async def mixin_session_expire(): async def mixin_session_expire():
"""Session with expire_on_commit=True (the default) to exercise attribute access after commit.""" """Session with expire_on_commit=True (the default) to exercise attribute access after commit."""
engine = create_async_engine(DATABASE_URL, echo=False) async with create_db_session(
async with engine.begin() as conn: DATABASE_URL, MixinBase, expire_on_commit=True
await conn.run_sync(MixinBase.metadata.create_all) ) as session:
session_factory = async_sessionmaker(engine, expire_on_commit=True)
session = session_factory()
try:
yield session yield session
finally:
await session.close()
async with engine.begin() as conn:
await conn.run_sync(MixinBase.metadata.drop_all)
await engine.dispose()
class TestUUIDMixin: class TestUUIDMixin:
@@ -473,6 +525,67 @@ class TestWatchDecorator:
watch() watch()
class TestWatchInheritance:
@pytest.fixture(autouse=True)
def clear_events(self):
_watch_inherit_events.clear()
yield
_watch_inherit_events.clear()
@pytest.mark.anyio
async def test_child_inherits_parent_watch_filter(self, mixin_session):
"""Subclass without @watch inherits the parent's field filter."""
obj = WatchChild(status="initial", other="x")
mixin_session.add(obj)
await mixin_session.commit()
await asyncio.sleep(0)
obj.other = "changed" # not watched by parent's @watch("status")
await mixin_session.commit()
await asyncio.sleep(0)
assert _watch_inherit_events == []
@pytest.mark.anyio
async def test_child_triggers_on_watched_field(self, mixin_session):
"""Subclass without @watch triggers on_update for the parent's watched field."""
obj = WatchChild(status="initial", other="x")
mixin_session.add(obj)
await mixin_session.commit()
await asyncio.sleep(0)
obj.status = "updated"
await mixin_session.commit()
await asyncio.sleep(0)
assert len(_watch_inherit_events) == 1
assert _watch_inherit_events[0]["type"] == "WatchChild"
assert "status" in _watch_inherit_events[0]["changes"]
@pytest.mark.anyio
async def test_subclass_override_takes_precedence(self, mixin_session):
"""Subclass @watch overrides the parent's field filter."""
obj = WatchOverride(status="initial", other="x")
mixin_session.add(obj)
await mixin_session.commit()
await asyncio.sleep(0)
obj.status = (
"changed" # watched by parent but overridden by child's @watch("other")
)
await mixin_session.commit()
await asyncio.sleep(0)
assert _watch_inherit_events == []
obj.other = "changed"
await mixin_session.commit()
await asyncio.sleep(0)
assert len(_watch_inherit_events) == 1
assert "other" in _watch_inherit_events[0]["changes"]
class TestUpsertChanges: class TestUpsertChanges:
def test_inserts_new_entry(self): def test_inserts_new_entry(self):
"""New key is inserted with the full changes dict.""" """New key is inserted with the full changes dict."""
@@ -871,6 +984,119 @@ class TestWatchedFieldsMixin:
} }
class TestTransientObject:
"""Create + delete within the same transaction should fire no events."""
@pytest.fixture(autouse=True)
def clear_events(self):
_test_events.clear()
yield
_test_events.clear()
@pytest.mark.anyio
async def test_no_events_when_created_and_deleted_in_same_transaction(
self, mixin_session
):
"""Neither on_create nor on_delete fires when the object never survives a commit."""
obj = WatchedModel(status="active", other="x")
mixin_session.add(obj)
await mixin_session.flush()
await mixin_session.delete(obj)
await mixin_session.commit()
await asyncio.sleep(0)
assert _test_events == []
@pytest.mark.anyio
async def test_other_objects_unaffected(self, mixin_session):
"""on_create still fires for objects that are not deleted in the same transaction."""
survivor = WatchedModel(status="active", other="x")
transient = WatchedModel(status="gone", other="y")
mixin_session.add(survivor)
mixin_session.add(transient)
await mixin_session.flush()
await mixin_session.delete(transient)
await mixin_session.commit()
await asyncio.sleep(0)
creates = [e for e in _test_events if e["event"] == "create"]
deletes = [e for e in _test_events if e["event"] == "delete"]
assert len(creates) == 1
assert creates[0]["obj_id"] == survivor.id
assert deletes == []
@pytest.mark.anyio
async def test_distinct_create_and_delete_both_fire(self, mixin_session):
"""on_create and on_delete both fire when different objects are created and deleted."""
existing = WatchedModel(status="old", other="x")
mixin_session.add(existing)
await mixin_session.commit()
await asyncio.sleep(0)
_test_events.clear()
new_obj = WatchedModel(status="new", other="y")
mixin_session.add(new_obj)
await mixin_session.delete(existing)
await mixin_session.commit()
await asyncio.sleep(0)
creates = [e for e in _test_events if e["event"] == "create"]
deletes = [e for e in _test_events if e["event"] == "delete"]
assert len(creates) == 1
assert len(deletes) == 1
class TestPolymorphism:
"""WatchedFieldsMixin with STI (Single Table Inheritance)."""
@pytest.fixture(autouse=True)
def clear_events(self):
_poly_events.clear()
yield
_poly_events.clear()
@pytest.mark.anyio
async def test_on_create_fires_once_for_subclass(self, mixin_session):
"""on_create fires exactly once for a STI subclass instance."""
dog = PolyDog(name="Rex")
mixin_session.add(dog)
await mixin_session.commit()
await asyncio.sleep(0)
assert len(_poly_events) == 1
assert _poly_events[0]["event"] == "create"
assert _poly_events[0]["type"] == "PolyDog"
@pytest.mark.anyio
async def test_on_delete_fires_for_subclass(self, mixin_session):
"""on_delete fires for a STI subclass instance."""
dog = PolyDog(name="Rex")
mixin_session.add(dog)
await mixin_session.commit()
await asyncio.sleep(0)
_poly_events.clear()
await mixin_session.delete(dog)
await mixin_session.commit()
await asyncio.sleep(0)
assert len(_poly_events) == 1
assert _poly_events[0]["event"] == "delete"
assert _poly_events[0]["type"] == "PolyDog"
@pytest.mark.anyio
async def test_transient_subclass_fires_no_events(self, mixin_session):
"""Create + delete of a STI subclass in one transaction fires no events."""
dog = PolyDog(name="Rex")
mixin_session.add(dog)
await mixin_session.flush()
await mixin_session.delete(dog)
await mixin_session.commit()
await asyncio.sleep(0)
assert _poly_events == []
class TestWatchAll: class TestWatchAll:
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def clear_events(self): def clear_events(self):
@@ -968,6 +1194,28 @@ class TestSyncCallbacks:
assert updates[0]["changes"]["status"] == {"old": "initial", "new": "updated"} assert updates[0]["changes"]["status"] == {"old": "initial", "new": "updated"}
class TestFutureCallbacks:
"""Callbacks returning a non-coroutine awaitable (asyncio.Task / Future)."""
@pytest.fixture(autouse=True)
def clear_events(self):
_future_events.clear()
yield
_future_events.clear()
@pytest.mark.anyio
async def test_task_callback_is_awaited(self, mixin_session):
"""on_create returning an asyncio.Task is awaited and its work completes."""
obj = FutureCallbackModel(name="test")
mixin_session.add(obj)
await mixin_session.commit()
# Two turns: one for _run() to execute, one for the inner _work() task.
await asyncio.sleep(0)
await asyncio.sleep(0)
assert _future_events == ["created"]
class TestAttributeAccessInCallbacks: class TestAttributeAccessInCallbacks:
"""Verify that self attributes are accessible inside every callback type. """Verify that self attributes are accessible inside every callback type.

View File

@@ -7,9 +7,10 @@ from fastapi import Depends, FastAPI
from httpx import AsyncClient from httpx import AsyncClient
from sqlalchemy import select, text from sqlalchemy import select, text
from sqlalchemy.engine import make_url from sqlalchemy.engine import make_url
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from fastapi_toolsets.db import get_transaction
from fastapi_toolsets.fixtures import Context, FixtureRegistry from fastapi_toolsets.fixtures import Context, FixtureRegistry
from fastapi_toolsets.pytest import ( from fastapi_toolsets.pytest import (
create_async_client, create_async_client,
@@ -336,6 +337,55 @@ class TestCreateDbSession:
result = await session.execute(select(Role)) result = await session.execute(select(Role))
assert result.all() == [] assert result.all() == []
@pytest.mark.anyio
async def test_get_transaction_commits_visible_to_separate_session(self):
"""Data written via get_transaction() is committed and visible to other sessions."""
role_id = uuid.uuid4()
async with create_db_session(DATABASE_URL, Base, drop_tables=False) as session:
# Simulate what _create_fixture_function does: insert via get_transaction
# with no explicit commit afterward.
async with get_transaction(session):
role = Role(id=role_id, name="visible_to_other_session")
session.add(role)
# The data must have been committed (begin/commit, not a savepoint),
# so a separate engine/session can read it.
other_engine = create_async_engine(DATABASE_URL, echo=False)
try:
other_session_maker = async_sessionmaker(
other_engine, expire_on_commit=False
)
async with other_session_maker() as other:
result = await other.execute(select(Role).where(Role.id == role_id))
fetched = result.scalar_one_or_none()
assert fetched is not None, (
"Fixture data inserted via get_transaction() must be committed "
"and visible to a separate session. If create_db_session uses "
"create_db_context, auto-begin forces get_transaction() into "
"savepoints instead of real commits."
)
assert fetched.name == "visible_to_other_session"
finally:
await other_engine.dispose()
# Cleanup
async with create_db_session(DATABASE_URL, Base, drop_tables=True) as _:
pass
class TestDeprecatedCleanupTables:
"""Tests for the deprecated cleanup_tables re-export in fastapi_toolsets.pytest."""
@pytest.mark.anyio
async def test_emits_deprecation_warning(self):
"""cleanup_tables imported from fastapi_toolsets.pytest emits DeprecationWarning."""
from fastapi_toolsets.pytest.utils import cleanup_tables
async with create_db_session(DATABASE_URL, Base, drop_tables=True) as session:
with pytest.warns(DeprecationWarning, match="fastapi_toolsets.db"):
await cleanup_tables(session, Base)
class TestGetXdistWorker: class TestGetXdistWorker:
"""Tests for _get_xdist_worker helper.""" """Tests for _get_xdist_worker helper."""

View File

@@ -201,6 +201,88 @@ class TestOffsetPagination:
assert data["page"] == 2 assert data["page"] == 2
assert data["has_more"] is True assert data["has_more"] is True
def test_total_count_can_be_none(self):
"""total_count accepts None (include_total=False mode)."""
pagination = OffsetPagination(
total_count=None,
items_per_page=20,
page=1,
has_more=True,
)
assert pagination.total_count is None
def test_serialization_with_none_total_count(self):
"""OffsetPagination serializes total_count=None correctly."""
pagination = OffsetPagination(
total_count=None,
items_per_page=20,
page=1,
has_more=False,
)
data = pagination.model_dump()
assert data["total_count"] is None
def test_pages_computed(self):
"""pages is ceil(total_count / items_per_page)."""
pagination = OffsetPagination(
total_count=42,
items_per_page=10,
page=1,
has_more=True,
)
assert pagination.pages == 5
def test_pages_exact_division(self):
"""pages is exact when total_count is evenly divisible."""
pagination = OffsetPagination(
total_count=40,
items_per_page=10,
page=1,
has_more=False,
)
assert pagination.pages == 4
def test_pages_zero_total(self):
"""pages is 0 when total_count is 0."""
pagination = OffsetPagination(
total_count=0,
items_per_page=10,
page=1,
has_more=False,
)
assert pagination.pages == 0
def test_pages_zero_items_per_page(self):
"""pages is 0 when items_per_page is 0."""
pagination = OffsetPagination(
total_count=100,
items_per_page=0,
page=1,
has_more=False,
)
assert pagination.pages == 0
def test_pages_none_when_total_count_none(self):
"""pages is None when total_count is None (include_total=False)."""
pagination = OffsetPagination(
total_count=None,
items_per_page=20,
page=1,
has_more=True,
)
assert pagination.pages is None
def test_pages_in_serialization(self):
"""pages appears in model_dump output."""
pagination = OffsetPagination(
total_count=25,
items_per_page=10,
page=1,
has_more=True,
)
data = pagination.model_dump()
assert data["pages"] == 3
class TestCursorPagination: class TestCursorPagination:
"""Tests for CursorPagination schema.""" """Tests for CursorPagination schema."""
@@ -469,7 +551,7 @@ class TestOffsetPaginatedResponse:
pagination=OffsetPagination( pagination=OffsetPagination(
total_count=0, items_per_page=10, page=1, has_more=False total_count=0, items_per_page=10, page=1, has_more=False
), ),
pagination_type=PaginationType.CURSOR, # type: ignore[arg-type] pagination_type=PaginationType.CURSOR, # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
) )
def test_filter_attributes_defaults_to_none(self): def test_filter_attributes_defaults_to_none(self):
@@ -556,7 +638,7 @@ class TestCursorPaginatedResponse:
pagination=CursorPagination( pagination=CursorPagination(
next_cursor=None, items_per_page=10, has_more=False next_cursor=None, items_per_page=10, has_more=False
), ),
pagination_type=PaginationType.OFFSET, # type: ignore[arg-type] pagination_type=PaginationType.OFFSET, # type: ignore[arg-type] # ty:ignore[invalid-argument-type]
) )
def test_full_serialization(self): def test_full_serialization(self):

137
uv.lock generated
View File

@@ -251,7 +251,7 @@ wheels = [
[[package]] [[package]]
name = "fastapi-toolsets" name = "fastapi-toolsets"
version = "2.4.0" version = "2.4.3"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "asyncpg" }, { name = "asyncpg" },
@@ -286,6 +286,7 @@ dev = [
{ name = "fastapi-toolsets", extra = ["all"] }, { name = "fastapi-toolsets", extra = ["all"] },
{ name = "httpx" }, { name = "httpx" },
{ name = "mkdocstrings-python" }, { name = "mkdocstrings-python" },
{ name = "prek" },
{ name = "pytest" }, { name = "pytest" },
{ name = "pytest-anyio" }, { name = "pytest-anyio" },
{ name = "pytest-cov" }, { name = "pytest-cov" },
@@ -328,6 +329,7 @@ dev = [
{ name = "fastapi-toolsets", extras = ["all"] }, { name = "fastapi-toolsets", extras = ["all"] },
{ name = "httpx", specifier = ">=0.25.0" }, { name = "httpx", specifier = ">=0.25.0" },
{ name = "mkdocstrings-python", specifier = ">=2.0.2" }, { name = "mkdocstrings-python", specifier = ">=2.0.2" },
{ name = "prek", specifier = ">=0.3.8" },
{ name = "pytest", specifier = ">=8.0.0" }, { name = "pytest", specifier = ">=8.0.0" },
{ name = "pytest-anyio", specifier = ">=0.0.0" }, { name = "pytest-anyio", specifier = ">=0.0.0" },
{ name = "pytest-cov", specifier = ">=4.0.0" }, { name = "pytest-cov", specifier = ">=4.0.0" },
@@ -417,6 +419,7 @@ wheels = [
name = "griffelib" name = "griffelib"
version = "2.0.0" version = "2.0.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ad/06/eccbd311c9e2b3ca45dbc063b93134c57a1ccc7607c5e545264ad092c4a9/griffelib-2.0.0.tar.gz", hash = "sha256:e504d637a089f5cab9b5daf18f7645970509bf4f53eda8d79ed71cce8bd97934", size = 166312, upload-time = "2026-03-23T21:06:55.954Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/4d/51/c936033e16d12b627ea334aaaaf42229c37620d0f15593456ab69ab48161/griffelib-2.0.0-py3-none-any.whl", hash = "sha256:01284878c966508b6d6f1dbff9b6fa607bc062d8261c5c7253cb285b06422a7f", size = 142004, upload-time = "2026-02-09T19:09:40.561Z" }, { url = "https://files.pythonhosted.org/packages/4d/51/c936033e16d12b627ea334aaaaf42229c37620d0f15593456ab69ab48161/griffelib-2.0.0-py3-none-any.whl", hash = "sha256:01284878c966508b6d6f1dbff9b6fa607bc062d8261c5c7253cb285b06422a7f", size = 142004, upload-time = "2026-02-09T19:09:40.561Z" },
] ]
@@ -720,6 +723,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
] ]
[[package]]
name = "prek"
version = "0.3.8"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/62/ee/03e8180e3fda9de25b6480bd15cc2bde40d573868d50648b0e527b35562f/prek-0.3.8.tar.gz", hash = "sha256:434a214256516f187a3ab15f869d950243be66b94ad47987ee4281b69643a2d9", size = 400224, upload-time = "2026-03-23T08:23:35.981Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/84/40d2ddf362d12c4cd4a25a8c89a862edf87cdfbf1422aa41aac8e315d409/prek-0.3.8-py3-none-linux_armv6l.whl", hash = "sha256:6fb646ada60658fa6dd7771b2e0fb097f005151be222f869dada3eb26d79ed33", size = 5226646, upload-time = "2026-03-23T08:23:18.306Z" },
{ url = "https://files.pythonhosted.org/packages/e1/52/7308a033fa43b7e8e188797bd2b3b017c0f0adda70fa7af575b1f43ea888/prek-0.3.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f3d7fdadb15efc19c09953c7a33cf2061a70f367d1e1957358d3ad5cc49d0616", size = 5620104, upload-time = "2026-03-23T08:23:40.053Z" },
{ url = "https://files.pythonhosted.org/packages/ff/b1/f106ac000a91511a9cd80169868daf2f5b693480ef5232cec5517a38a512/prek-0.3.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:72728c3295e79ca443f8c1ec037d2a5b914ec73a358f69cf1bc1964511876bf8", size = 5199867, upload-time = "2026-03-23T08:23:38.066Z" },
{ url = "https://files.pythonhosted.org/packages/b3/e9/970713f4b019f69de9844e1bab37b8ddb67558e410916f4eb5869a696165/prek-0.3.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:48efc28f2f53b5b8087efca9daaed91572d62df97d5f24a1c7a087fecb5017de", size = 5441801, upload-time = "2026-03-23T08:23:32.617Z" },
{ url = "https://files.pythonhosted.org/packages/12/a4/7ef44032b181753e19452ec3b09abb3a32607cf6b0a0508f0604becaaf2b/prek-0.3.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6ca9d63bacbc448a5c18e955c78d3ac5176c3a17c3baacdd949b1a623e08a36", size = 5155107, upload-time = "2026-03-23T08:23:31.021Z" },
{ url = "https://files.pythonhosted.org/packages/bd/77/4d9c8985dbba84149760785dfe07093ea1e29d710257dfb7c89615e2234c/prek-0.3.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1000f7029696b4fe712fb1fefd4c55b9c4de72b65509c8e50296370a06f9dc3f", size = 5566541, upload-time = "2026-03-23T08:23:45.694Z" },
{ url = "https://files.pythonhosted.org/packages/1a/1a/81e6769ac1f7f8346d09ce2ab0b47cf06466acd9ff72e87e5d1f0d98cd32/prek-0.3.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ff0bed0e2c1286522987d982168a86cbbd0d069d840506a46c9fda983515517", size = 6552991, upload-time = "2026-03-23T08:23:21.958Z" },
{ url = "https://files.pythonhosted.org/packages/6f/fa/ce2df0dd2dc75a9437a52463239d0782998943d7b04e191fb89b83016c34/prek-0.3.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fb087ac0ffda3ac65bbbae9a38326a7fd27ee007bb4a94323ce1eb539d8bbec", size = 5832972, upload-time = "2026-03-23T08:23:20.258Z" },
{ url = "https://files.pythonhosted.org/packages/18/6b/9d4269df9073216d296244595a21c253b6475dfc9076c0bd2906be7a436c/prek-0.3.8-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:2e1e5e206ff7b31bd079cce525daddc96cd6bc544d20dc128921ad92f7a4c85d", size = 5448371, upload-time = "2026-03-23T08:23:41.835Z" },
{ url = "https://files.pythonhosted.org/packages/60/1d/1e4d8a78abefa5b9d086e5a9f1638a74b5e540eec8a648d9946707701f29/prek-0.3.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dcea3fe23832a4481bccb7c45f55650cb233be7c805602e788bb7dba60f2d861", size = 5270546, upload-time = "2026-03-23T08:23:24.231Z" },
{ url = "https://files.pythonhosted.org/packages/77/07/34f36551a6319ae36e272bea63a42f59d41d2d47ab0d5fb00eb7b4e88e87/prek-0.3.8-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:4d25e647e9682f6818ab5c31e7a4b842993c14782a6ffcd128d22b784e0d677f", size = 5124032, upload-time = "2026-03-23T08:23:26.368Z" },
{ url = "https://files.pythonhosted.org/packages/e3/01/6d544009bb655e709993411796af77339f439526db4f3b3509c583ad8eb9/prek-0.3.8-py3-none-musllinux_1_1_i686.whl", hash = "sha256:de528b82935e33074815acff3c7c86026754d1212136295bc88fe9c43b4231d5", size = 5432245, upload-time = "2026-03-23T08:23:47.877Z" },
{ url = "https://files.pythonhosted.org/packages/54/96/1237ee269e9bfa283ffadbcba1f401f48a47aed2b2563eb1002740d6079d/prek-0.3.8-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6d660f1c25a126e6d9f682fe61449441226514f412a4469f5d71f8f8cad56db2", size = 5950550, upload-time = "2026-03-23T08:23:43.8Z" },
{ url = "https://files.pythonhosted.org/packages/ca/6b/a574411459049bc691047c9912f375deda10c44a707b6ce98df2b658f0b3/prek-0.3.8-py3-none-win32.whl", hash = "sha256:b0c291c577615d9f8450421dff0b32bfd77a6b0d223ee4115a1f820cb636fdf1", size = 4949501, upload-time = "2026-03-23T08:23:16.338Z" },
{ url = "https://files.pythonhosted.org/packages/0c/b4/46b59fe49f635acd9f6530778ce577f9d8b49452835726a5311ffc902c67/prek-0.3.8-py3-none-win_amd64.whl", hash = "sha256:bc147fdbdd4ec33fc7a987b893ecb69b1413ac100d95c9889a70f3fd58c73d06", size = 5346551, upload-time = "2026-03-23T08:23:34.501Z" },
{ url = "https://files.pythonhosted.org/packages/53/05/9cca1708bb8c65264124eb4b04251e0f65ce5bfc707080bb6b492d5a0df7/prek-0.3.8-py3-none-win_arm64.whl", hash = "sha256:a2614647aeafa817a5802ccb9561e92eedc20dcf840639a1b00826e2c2442515", size = 5190872, upload-time = "2026-03-23T08:23:29.463Z" },
]
[[package]] [[package]]
name = "prometheus-client" name = "prometheus-client"
version = "0.24.1" version = "0.24.1"
@@ -894,16 +921,16 @@ wheels = [
[[package]] [[package]]
name = "pytest-cov" name = "pytest-cov"
version = "7.0.0" version = "7.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "coverage", extra = ["toml"] }, { name = "coverage", extra = ["toml"] },
{ name = "pluggy" }, { name = "pluggy" },
{ name = "pytest" }, { name = "pytest" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" },
] ]
[[package]] [[package]]
@@ -1013,27 +1040,27 @@ wheels = [
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.15.6" version = "0.15.7"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, { url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" },
{ url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, { url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" },
{ url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, { url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" },
{ url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, { url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" },
{ url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, { url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" },
{ url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, { url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" },
{ url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, { url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" },
{ url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, { url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" },
{ url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, { url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" },
{ url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, { url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" },
{ url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, { url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" },
{ url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, { url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" },
{ url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, { url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" },
{ url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, { url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" },
{ url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, { url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" },
{ url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, { url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" },
{ url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" },
] ]
[[package]] [[package]]
@@ -1177,26 +1204,26 @@ wheels = [
[[package]] [[package]]
name = "ty" name = "ty"
version = "0.0.23" version = "0.0.25"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/75/ba/d3c998ff4cf6b5d75b39356db55fe1b7caceecc522b9586174e6a5dee6f7/ty-0.0.23.tar.gz", hash = "sha256:5fb05db58f202af366f80ef70f806e48f5237807fe424ec787c9f289e3f3a4ef", size = 5341461, upload-time = "2026-03-13T12:34:23.125Z" } sdist = { url = "https://files.pythonhosted.org/packages/12/bf/3c3147c7237277b0e8a911ff89de7183408be96b31fb42b38edb666d287f/ty-0.0.25.tar.gz", hash = "sha256:8ae3891be17dfb6acab51a2df3a8f8f6c551eb60ea674c10946dc92aae8d4401", size = 5375500, upload-time = "2026-03-24T22:32:34.608Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/21/aab32603dfdfacd4819e52fa8c6074e7bd578218a5142729452fc6a62db6/ty-0.0.23-py3-none-linux_armv6l.whl", hash = "sha256:e810eef1a5f1cfc0731a58af8d2f334906a96835829767aed00026f1334a8dd7", size = 10329096, upload-time = "2026-03-13T12:34:09.432Z" }, { url = "https://files.pythonhosted.org/packages/97/a4/6c289cbd1474285223124a4ffb55c078dbe9ae1d925d0b6a948643c7f115/ty-0.0.25-py3-none-linux_armv6l.whl", hash = "sha256:26d6d5aede5d54fb055779460f896d9c1473c6fb996716bd11cb90f027d8fee7", size = 10452747, upload-time = "2026-03-24T22:32:32.662Z" },
{ url = "https://files.pythonhosted.org/packages/9f/a9/dd3287a82dce3df546ec560296208d4905dcf06346b6e18c2f3c63523bd1/ty-0.0.23-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e43d36bd89a151ddcad01acaeff7dcc507cb73ff164c1878d2d11549d39a061c", size = 10156631, upload-time = "2026-03-13T12:34:53.122Z" }, { url = "https://files.pythonhosted.org/packages/00/13/74cb9de356b9ceb3f281ab048f8c4ac2207122161b0ac0066886ce129abe/ty-0.0.25-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aedcfbc7b6b96dbc55b0da78fa02bd049373ff3d8a827f613dadd8bd17d10758", size = 10271349, upload-time = "2026-03-24T22:32:13.041Z" },
{ url = "https://files.pythonhosted.org/packages/0f/01/3f25909b02fac29bb0a62b2251f8d62e65d697781ffa4cf6b47a4c075c85/ty-0.0.23-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bd6a340969577b4645f231572c4e46012acba2d10d4c0c6570fe1ab74e76ae00", size = 9653211, upload-time = "2026-03-13T12:34:15.049Z" }, { url = "https://files.pythonhosted.org/packages/0e/93/ffc5a20cc9e14fa9b32b0c54884864bede30d144ce2ae013805bce0c86d0/ty-0.0.25-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0a8fb3c1e28f73618941811e2568dca195178a1a6314651d4ee97086a4497253", size = 9730308, upload-time = "2026-03-24T22:32:19.24Z" },
{ url = "https://files.pythonhosted.org/packages/d5/60/bfc0479572a6f4b90501c869635faf8d84c8c68ffc5dd87d04f049affabc/ty-0.0.23-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341441783e626eeb7b1ec2160432956aed5734932ab2d1c26f94d0c98b229937", size = 10156143, upload-time = "2026-03-13T12:34:34.468Z" }, { url = "https://files.pythonhosted.org/packages/6d/78/52e05ef32a5f172fce70633a4e19d8e04364271a4322ae12382c7344b0de/ty-0.0.25-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814870b7f347b5d0276304cddb98a0958f08de183bf159abc920ebe321247ad4", size = 10247664, upload-time = "2026-03-24T22:32:08.669Z" },
{ url = "https://files.pythonhosted.org/packages/3a/81/8a93e923535a340f54bea20ff196f6b2787782b2f2f399bd191c4bc132d6/ty-0.0.23-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ce1dc66c26d4167e2c78d12fa870ef5a7ec9cc344d2baaa6243297cfa88bd52", size = 10136632, upload-time = "2026-03-13T12:34:28.832Z" }, { url = "https://files.pythonhosted.org/packages/c2/64/0d0a47ed0aa1d634c666c2cc15d3b0af4b95d0fd3dbb796032bd493f3433/ty-0.0.25-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:781150e23825dc110cd5e1f50ca3d61664f7a5db5b4a55d5dbf7d3b1e246b917", size = 10261961, upload-time = "2026-03-24T22:32:43.935Z" },
{ url = "https://files.pythonhosted.org/packages/da/cb/2ac81c850c58acc9f976814404d28389c9c1c939676e32287b9cff61381e/ty-0.0.23-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bae1e7a294bf8528836f7617dc5c360ea2dddb63789fc9471ae6753534adca05", size = 10655025, upload-time = "2026-03-13T12:34:37.105Z" }, { url = "https://files.pythonhosted.org/packages/3e/ba/4666b96f0499465efb97c244554107c541d74a1add393e62276b3de9b54f/ty-0.0.25-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc81ff2a0143911321251dc81d1c259fa5cdc56d043019a733c845d55409e2a", size = 10746076, upload-time = "2026-03-24T22:32:26.37Z" },
{ url = "https://files.pythonhosted.org/packages/b5/9b/bac771774c198c318ae699fc013d8cd99ed9caf993f661fba11238759244/ty-0.0.23-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b162768764d9dc177c83fb497a51532bb67cbebe57b8fa0f2668436bf53f3c", size = 11230107, upload-time = "2026-03-13T12:34:20.751Z" }, { url = "https://files.pythonhosted.org/packages/e7/ed/aa958ccbcd85cc206600e48fbf0a1c27aef54b4b90112d9a73f69ed0c739/ty-0.0.25-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03c5c5b5c10355ea030cbe3cd93b2e759b9492c66688288ea03a68086069f2e", size = 11287331, upload-time = "2026-03-24T22:32:21.607Z" },
{ url = "https://files.pythonhosted.org/packages/14/09/7644fb0e297265e18243f878aca343593323b9bb19ed5278dcbc63781be0/ty-0.0.23-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d28384e48ca03b34e4e2beee0e230c39bbfb68994bb44927fec61ef3642900da", size = 10934177, upload-time = "2026-03-13T12:34:17.904Z" }, { url = "https://files.pythonhosted.org/packages/26/e4/f4a004e1952e6042f5bfeeb7d09cffb379270ef009d9f8568471863e86e6/ty-0.0.25-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fc1ef49cd6262eb9223ccf6e258ac899aaa53e7dc2151ba65a2c9fa248dfa75", size = 11028804, upload-time = "2026-03-24T22:32:39.088Z" },
{ url = "https://files.pythonhosted.org/packages/18/14/69a25a0cad493fb6a947302471b579a03516a3b00e7bece77fdc6b4afb9b/ty-0.0.23-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:559d9a299df793cb7a7902caed5eda8a720ff69164c31c979673e928f02251ee", size = 10752487, upload-time = "2026-03-13T12:34:31.785Z" }, { url = "https://files.pythonhosted.org/packages/56/32/5c15bb8ea20ed54d43c734f253a2a5da95d41474caecf4ef3682df9f68f5/ty-0.0.25-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad98da1393161096235a387cc36abecd31861060c68416761eccdb7c1bc326b", size = 10845246, upload-time = "2026-03-24T22:32:41.33Z" },
{ url = "https://files.pythonhosted.org/packages/9d/2a/42fc3cbccf95af0a62308ebed67e084798ab7a85ef073c9986ef18032743/ty-0.0.23-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:32a7b8a14a98e1d20a9d8d2af23637ed7efdb297ac1fa2450b8e465d05b94482", size = 10133007, upload-time = "2026-03-13T12:34:42.838Z" }, { url = "https://files.pythonhosted.org/packages/6f/fe/4ddd83e810c8682fcfada0d1c9d38936a34a024d32d7736075c1e53a038e/ty-0.0.25-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2d4336aa5381eb4eab107c3dec75fe22943a648ef6646f5a8431ef1c8cdabb66", size = 10233515, upload-time = "2026-03-24T22:32:17.012Z" },
{ url = "https://files.pythonhosted.org/packages/e1/69/307833f1b52fa3670e0a1d496e43ef7df556ecde838192d3fcb9b35e360d/ty-0.0.23-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6f803b9b9cca87af793467973b9abdd4b83e6b96d9b5e749d662cff7ead70b6d", size = 10169698, upload-time = "2026-03-13T12:34:12.351Z" }, { url = "https://files.pythonhosted.org/packages/ad/db/9fe54f6fb952e5b218f2e661e64ed656512edf2046cfbb9c159558e255db/ty-0.0.25-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e10ed39564227de2b7bd89398250b65daaedbef15a25cef8eee70078f5d9e0b2", size = 10275289, upload-time = "2026-03-24T22:32:28.21Z" },
{ url = "https://files.pythonhosted.org/packages/89/ae/5dd379ec22d0b1cba410d7af31c366fcedff191d5b867145913a64889f66/ty-0.0.23-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4a0bf086ec8e2197b7ea7ebfcf4be36cb6a52b235f8be61647ef1b2d99d6ffd3", size = 10346080, upload-time = "2026-03-13T12:34:40.012Z" }, { url = "https://files.pythonhosted.org/packages/b1/e0/090d7b33791b42bc7ec29463ac6a634738e16b289e027608ebe542682773/ty-0.0.25-py3-none-musllinux_1_2_i686.whl", hash = "sha256:aca04e9ed9b61c706064a1c0b71a247c3f92f373d0222103f3bc54b649421796", size = 10461195, upload-time = "2026-03-24T22:32:24.252Z" },
{ url = "https://files.pythonhosted.org/packages/98/c7/dfc83203d37998620bba9c4873a080c8850a784a8a46f56f8163c5b4e320/ty-0.0.23-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:252539c3fcd7aeb9b8d5c14e2040682c3e1d7ff640906d63fd2c4ce35865a4ba", size = 10848162, upload-time = "2026-03-13T12:34:45.421Z" }, { url = "https://files.pythonhosted.org/packages/42/31/5bf12bce01b80b72a7a4e627380779b41510e730f6000862a1d078e423f7/ty-0.0.25-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:18a5443e4ef339c1bd8c57fc13112c22080617ea582bfc22b497d82d65361325", size = 10931471, upload-time = "2026-03-24T22:32:14.985Z" },
{ url = "https://files.pythonhosted.org/packages/89/08/05481511cfbcc1fd834b6c67aaae090cb609a079189ddf2032139ccfc490/ty-0.0.23-py3-none-win32.whl", hash = "sha256:51b591d19eef23bbc3807aef77d38fa1f003c354e1da908aa80ea2dca0993f77", size = 9748283, upload-time = "2026-03-13T12:34:50.607Z" }, { url = "https://files.pythonhosted.org/packages/6a/5e/ab60c11f8a6dd2a0ae96daac83458ef2e9be1ae70481d1ad9c59d3eaf20f/ty-0.0.25-py3-none-win32.whl", hash = "sha256:a685b9a611b69195b5a557e05dbb7ebcd12815f6c32fb27fdf15edeb1fa33d8f", size = 9835974, upload-time = "2026-03-24T22:32:36.86Z" },
{ url = "https://files.pythonhosted.org/packages/31/2e/eaed4ff5c85e857a02415084c394e02c30476b65e158eec1938fdaa9a205/ty-0.0.23-py3-none-win_amd64.whl", hash = "sha256:1e137e955f05c501cfbb81dd2190c8fb7d01ec037c7e287024129c722a83c9ad", size = 10698355, upload-time = "2026-03-13T12:34:26.134Z" }, { url = "https://files.pythonhosted.org/packages/41/55/625acc2ef34646268bc2baa8fdd6e22fb47cd5965e2acd3be92c687fb6b0/ty-0.0.25-py3-none-win_amd64.whl", hash = "sha256:0d4d37a1f1ab7f2669c941c38c65144ff223eb51ececd7ccfc0d623afbc0f729", size = 10815449, upload-time = "2026-03-24T22:32:11.031Z" },
{ url = "https://files.pythonhosted.org/packages/91/29/b32cb7b4c7d56b9ed50117f8ad6e45834aec293e4cb14749daab4e9236d5/ty-0.0.23-py3-none-win_arm64.whl", hash = "sha256:a0399bd13fd2cd6683fd0a2d59b9355155d46546d8203e152c556ddbdeb20842", size = 10155890, upload-time = "2026-03-13T12:34:48.082Z" }, { url = "https://files.pythonhosted.org/packages/82/c7/0147bfb543df97740b45b222c54ff79ef20fa57f14b9d2c1dab3cd7d3faa/ty-0.0.25-py3-none-win_arm64.whl", hash = "sha256:d80b8cd965cbacbfd887ac2d985f5b6da09b7aa3569371e2894e0b30b26b89cd", size = 10225494, upload-time = "2026-03-24T22:32:30.611Z" },
] ]
[[package]] [[package]]
@@ -1264,7 +1291,7 @@ wheels = [
[[package]] [[package]]
name = "zensical" name = "zensical"
version = "0.0.27" version = "0.0.29"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "click" }, { name = "click" },
@@ -1274,18 +1301,18 @@ dependencies = [
{ name = "pymdown-extensions" }, { name = "pymdown-extensions" },
{ name = "pyyaml" }, { name = "pyyaml" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/8f/83/969152d927b522a0fed1f20b1730575d86b920ce51530b669d9fad4537de/zensical-0.0.27.tar.gz", hash = "sha256:6d8d74aba4a9f9505e6ba1c43d4c828ba4ff7bb1ff9b005e5174c5b92cf23419", size = 3841776, upload-time = "2026-03-13T17:56:14.494Z" } sdist = { url = "https://files.pythonhosted.org/packages/78/bd/5786ab618a60bd7469ab243a7fd2c9eecb0790c85c784abb8b97edb77a54/zensical-0.0.29.tar.gz", hash = "sha256:0d6282be7cb551e12d5806badf5e94c54a5e2f2cf07057a3e36d1eaf97c33ada", size = 3842641, upload-time = "2026-03-24T13:37:27.587Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/fe/0335f1a521eb6c0ab96028bf67148390eb1d5c742c23e6a4b0f8381508bd/zensical-0.0.27-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d51ebf4b038f3eea99fd337119b99d92ad92bbe674372d5262e6dbbabbe4e9b5", size = 12262017, upload-time = "2026-03-13T17:55:36.403Z" }, { url = "https://files.pythonhosted.org/packages/4b/9c/8b681daa024abca9763017bec09ecee8008e110cae1254217c8dd22cc339/zensical-0.0.29-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:20ae0709ea14fce25ab33d0a82acdaf454a7a2e232a9ee20c019942205174476", size = 12311399, upload-time = "2026-03-24T13:36:53.809Z" },
{ url = "https://files.pythonhosted.org/packages/02/cb/ac24334fc7959b49496c97cb9d2bed82a8db8b84eafaf68189048e7fe69a/zensical-0.0.27-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:a627cd4599cf2c5a5a5205f0510667227d1fe4579b6f7445adba2d84bab9fbc8", size = 12147361, upload-time = "2026-03-13T17:55:39.736Z" }, { url = "https://files.pythonhosted.org/packages/81/ae/4ebb4d8bb2ef0164d473698b92f11caf431fc436e1625524acd5641102ca/zensical-0.0.29-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:599af3ba66fcd0146d7019f3493ed3c316051fae6c4d5599bc59f3a8f4b8a6f0", size = 12191845, upload-time = "2026-03-24T13:36:56.909Z" },
{ url = "https://files.pythonhosted.org/packages/a2/0f/31c981f61006fdaf0460d15bde1248a045178d67307bad61a4588414855d/zensical-0.0.27-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99cbc493022f8749504ef10c71772d360b705b4e2fd1511421393157d07bdccf", size = 12505771, upload-time = "2026-03-13T17:55:42.993Z" }, { url = "https://files.pythonhosted.org/packages/d5/35/67f89db06571a52283b3ecbe3bcf32fd3115ca50436b3ae177a948b83ea7/zensical-0.0.29-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eea7e48a00a71c0586e875079b5f83a070c33a147e52ad4383e4b63ab524332b", size = 12554105, upload-time = "2026-03-24T13:36:59.945Z" },
{ url = "https://files.pythonhosted.org/packages/30/1e/f6842c94ec89e5e9184f407dbbab2a497b444b28d4fb5b8df631894be896/zensical-0.0.27-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ecc20a85e8a23ad9ab809b2f268111321be7b2e214021b3b00f138936a87a434", size = 12455689, upload-time = "2026-03-13T17:55:46.055Z" }, { url = "https://files.pythonhosted.org/packages/7c/f6/ac79e5d9c18b28557c9ff1c7c23d695fbdd82645d69bfe02292f46d935e7/zensical-0.0.29-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59a57db35542e98d2896b833de07d199320f8ada3b4e7ddccb7fe892292d8b74", size = 12498643, upload-time = "2026-03-24T13:37:02.376Z" },
{ url = "https://files.pythonhosted.org/packages/4c/ad/866c3336381cca7528e792469958fbe2e65b9206a2657bef3dd8ed4ac88b/zensical-0.0.27-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da11e0f0861dbd7d3b5e6fe1e3a53b361b2181c53f3abe9fb4cdf2ed0cea47bf", size = 12791263, upload-time = "2026-03-13T17:55:49.193Z" }, { url = "https://files.pythonhosted.org/packages/b1/70/5c22a96a69e0e91e569c26236918bb9bab1170f59b29ad04105ead64f199/zensical-0.0.29-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d42c2b2a96a80cf64c98ba7242f59ef95109914bd4c9499d7ebc12544663852c", size = 12854531, upload-time = "2026-03-24T13:37:04.962Z" },
{ url = "https://files.pythonhosted.org/packages/e5/df/fca5ed6bebdb61aa656dfa65cce4b4d03324a79c75857728230872fbdf7c/zensical-0.0.27-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e11d220181477040a4b22bf2b8678d5b0c878e7aae194fad4133561cb976d69", size = 12549796, upload-time = "2026-03-13T17:55:52.55Z" }, { url = "https://files.pythonhosted.org/packages/79/25/e32237a8fcb0ceae1ef8e192e7f8db53b38f1e48f1c7cdbacd0a7b713892/zensical-0.0.29-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2fca39c5f6b1782c77cf6591cf346357cabee85ebdb956c5ddc0fd5169f3d9", size = 12596828, upload-time = "2026-03-24T13:37:07.817Z" },
{ url = "https://files.pythonhosted.org/packages/4a/e2/43398b5ec64ed78204a5a5929a3990769fc0f6a3094a30395882bda1399a/zensical-0.0.27-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06b9e308aec8c5db1cd623e2e98e1b25c3f5cab6b25fcc9bac1e16c0c2b93837", size = 12683568, upload-time = "2026-03-13T17:55:56.151Z" }, { url = "https://files.pythonhosted.org/packages/ff/74/89ac909cbb258903ea53802c184e4986c17ce0ba79b1c7f77b7e78a2dce3/zensical-0.0.29-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dfc23a74ef672aa51088c080286319da1dc0b989cd5051e9e5e6d7d4abbc2fc1", size = 12732059, upload-time = "2026-03-24T13:37:11.651Z" },
{ url = "https://files.pythonhosted.org/packages/b3/3c/5c98f9964c7e30735aacd22a389dacec12bcc5bc8162c58e76b76d20db6e/zensical-0.0.27-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:682085155126965b091cb9f915cd2e4297383ac500122fd4b632cf4511733eb2", size = 12725214, upload-time = "2026-03-13T17:55:59.286Z" }, { url = "https://files.pythonhosted.org/packages/8c/31/2429de6a9328eed4acc7e9a3789f160294a15115be15f9870a0d02649302/zensical-0.0.29-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c9336d4e4b232e3c9a70e30258e916dd7e60c0a2a08c8690065e60350c302028", size = 12768542, upload-time = "2026-03-24T13:37:14.39Z" },
{ url = "https://files.pythonhosted.org/packages/50/0f/ebaa159cac6d64b53bf7134420c2b43399acc7096cb79795be4fb10768fc/zensical-0.0.27-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:b367c285157c8e1099ae9e2b36564e07d3124bf891e96194a093bc836f3058d2", size = 12860416, upload-time = "2026-03-13T17:56:02.456Z" }, { url = "https://files.pythonhosted.org/packages/10/8a/55588b2a1dcbe86dad0404506c9ba367a06c663b1ff47147c84d26f7510e/zensical-0.0.29-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:30661148f0681199f3b598cbeb1d54f5cba773e54ae840bac639250d85907b84", size = 12917991, upload-time = "2026-03-24T13:37:16.795Z" },
{ url = "https://files.pythonhosted.org/packages/88/06/d82bfccbf5a1f43256dbc4d1984e398035a65f84f7c1e48b69ba15ea7281/zensical-0.0.27-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:847c881209e65e1db1291c59a9db77966ac50f7c66bf9a733c3c7832144dbfca", size = 12819533, upload-time = "2026-03-13T17:56:05.487Z" }, { url = "https://files.pythonhosted.org/packages/ec/5d/653901f0d3a3ca72daebc62746a148797f4e422cc3a2b66a4e6718e4398f/zensical-0.0.29-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6a566ac1fd4bfac5d711a7bd1ae06666712127c2718daa5083c7bf3f107e8578", size = 12868392, upload-time = "2026-03-24T13:37:19.42Z" },
{ url = "https://files.pythonhosted.org/packages/4d/1f/d25e421d91f063a9404c59dd032f65a67c7c700e9f5f40436ab98e533482/zensical-0.0.27-cp310-abi3-win32.whl", hash = "sha256:f31ec13c700794be3f9c0b7d90f09a7d23575a3a27c464994b9bb441a22d880b", size = 11862822, upload-time = "2026-03-13T17:56:08.933Z" }, { url = "https://files.pythonhosted.org/packages/29/58/d7449bc88a174b98daa3f2fbdfbdac3493768a557d8987e88bdaa6c78b1a/zensical-0.0.29-cp310-abi3-win32.whl", hash = "sha256:a231a3a02a3851741dc4d2de8910b5c39fe81e55bf026d8edf4d803e91a922fb", size = 11905486, upload-time = "2026-03-24T13:37:22.154Z" },
{ url = "https://files.pythonhosted.org/packages/5a/b5/5b86d126fcc42b96c5dbecde5074d6ea766a1a884e3b25b3524843c5e6a5/zensical-0.0.27-cp310-abi3-win_amd64.whl", hash = "sha256:9d3b1fca7ea99a7b2a8db272dd7f7839587c4ebf4f56b84ff01c97b3893ec9f8", size = 12059658, upload-time = "2026-03-13T17:56:11.859Z" }, { url = "https://files.pythonhosted.org/packages/f5/09/3fd082d016497c4d26ff20f42a8be2cc91e27191c0c5f3cd6507827f666f/zensical-0.0.29-cp310-abi3-win_amd64.whl", hash = "sha256:7145c5504380a344b8cd4586da815cdde77ef4a42319fa4f35e78250f01985af", size = 12101510, upload-time = "2026-03-24T13:37:24.77Z" },
] ]