mirror of
https://github.com/fastapi/fastapi.git
synced 2025-12-29 17:19:00 -05:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4a1ab5036 | ||
|
|
22c7200ebb | ||
|
|
6e42bcd8ce | ||
|
|
6513d4daa1 | ||
|
|
1d93d531bc | ||
|
|
c2c1cc8aec | ||
|
|
5289259275 | ||
|
|
5783910d0c | ||
|
|
026b43e5d3 | ||
|
|
6b591ddd7e | ||
|
|
10252b1937 | ||
|
|
55ec28b81b | ||
|
|
1cb4e25651 | ||
|
|
eac57f6908 | ||
|
|
e2cd8a4201 | ||
|
|
5c7dceb80f | ||
|
|
d70ed5eceb | ||
|
|
261c11b218 | ||
|
|
75d4f9c098 | ||
|
|
19abc42efe | ||
|
|
09ab90ed35 | ||
|
|
f58d846015 | ||
|
|
caee1d3123 |
16
.github/workflows/pre-commit.yml
vendored
16
.github/workflows/pre-commit.yml
vendored
@@ -21,14 +21,21 @@ jobs:
|
||||
name: Checkout PR for own repo
|
||||
if: env.IS_FORK == 'false'
|
||||
with:
|
||||
# To be able to commit it needs more than the last commit
|
||||
# To be able to commit it needs to fetch the head of the branch, not the
|
||||
# merge commit
|
||||
ref: ${{ github.head_ref }}
|
||||
# And it needs the full history to be able to compute diffs
|
||||
fetch-depth: 0
|
||||
# A token other than the default GITHUB_TOKEN is needed to be able to trigger CI
|
||||
token: ${{ secrets.PRE_COMMIT }}
|
||||
# pre-commit lite ci needs the default checkout configs to work
|
||||
- uses: actions/checkout@v5
|
||||
name: Checkout PR for fork
|
||||
if: env.IS_FORK == 'true'
|
||||
with:
|
||||
# To be able to commit it needs the head branch of the PR, the remote one
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
@@ -44,12 +51,9 @@ jobs:
|
||||
run: |
|
||||
uv venv
|
||||
uv pip install -r requirements.txt
|
||||
- name: Run pre-commit
|
||||
- name: Run prek - pre-commit
|
||||
id: precommit
|
||||
run: |
|
||||
# Fetch the base branch for comparison
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
uvx pre-commit run --from-ref origin/${{ github.base_ref }} --to-ref HEAD --show-diff-on-failure
|
||||
run: uvx prek run --from-ref origin/${GITHUB_BASE_REF} --to-ref HEAD --show-diff-on-failure
|
||||
continue-on-error: true
|
||||
- name: Commit and push changes
|
||||
if: env.IS_FORK == 'false'
|
||||
|
||||
28
.github/workflows/test.yml
vendored
28
.github/workflows/test.yml
vendored
@@ -44,31 +44,22 @@ jobs:
|
||||
matrix:
|
||||
os: [ windows-latest, macos-latest ]
|
||||
python-version: [ "3.14" ]
|
||||
pydantic-version: [ "pydantic>=2.0.2,<3.0.0" ]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
python-version: "3.9"
|
||||
pydantic-version: "pydantic>=1.10.0,<2.0.0"
|
||||
coverage: coverage
|
||||
- os: macos-latest
|
||||
python-version: "3.10"
|
||||
pydantic-version: "pydantic>=2.0.2,<3.0.0"
|
||||
coverage: coverage
|
||||
- os: windows-latest
|
||||
python-version: "3.11"
|
||||
pydantic-version: "pydantic>=1.10.0,<2.0.0"
|
||||
- os: ubuntu-latest
|
||||
python-version: "3.12"
|
||||
pydantic-version: "pydantic>=2.0.2,<3.0.0"
|
||||
- os: macos-latest
|
||||
coverage: coverage
|
||||
# Ubuntu with 3.13 needs coverage for CodSpeed benchmarks
|
||||
- os: ubuntu-latest
|
||||
python-version: "3.13"
|
||||
pydantic-version: "pydantic>=1.10.0,<2.0.0"
|
||||
- os: windows-latest
|
||||
python-version: "3.13"
|
||||
pydantic-version: "pydantic>=2.0.2,<3.0.0"
|
||||
coverage: coverage
|
||||
- os: ubuntu-latest
|
||||
python-version: "3.14"
|
||||
pydantic-version: "pydantic>=2.0.2,<3.0.0"
|
||||
coverage: coverage
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -92,14 +83,21 @@ jobs:
|
||||
pyproject.toml
|
||||
- name: Install Dependencies
|
||||
run: uv pip install -r requirements-tests.txt
|
||||
- name: Install Pydantic
|
||||
run: uv pip install "${{ matrix.pydantic-version }}"
|
||||
- run: mkdir coverage
|
||||
- name: Test
|
||||
run: bash scripts/test.sh
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}
|
||||
CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}
|
||||
- name: CodSpeed benchmarks
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'
|
||||
uses: CodSpeedHQ/action@v4
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}
|
||||
CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}
|
||||
with:
|
||||
mode: simulation
|
||||
run: coverage run -m pytest tests/ --codspeed
|
||||
# Do not store coverage for all possible combinations to avoid file size max errors in Smokeshow
|
||||
- name: Store coverage files
|
||||
if: matrix.coverage == 'coverage'
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -31,3 +31,5 @@ archive.zip
|
||||
|
||||
# Ignore while the setup still depends on requirements.txt files
|
||||
uv.lock
|
||||
|
||||
.codspeed
|
||||
|
||||
@@ -5,6 +5,7 @@ repos:
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
args: ['--maxkb=750']
|
||||
- id: check-toml
|
||||
- id: check-yaml
|
||||
args:
|
||||
|
||||
@@ -153,48 +153,16 @@ And you could do this even if the data type in the request is not JSON.
|
||||
|
||||
For example, in this application we don't use FastAPI's integrated functionality to extract the JSON Schema from Pydantic models nor the automatic validation for JSON. In fact, we are declaring the request content type as YAML, not JSON:
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
{* ../../docs_src/path_operation_advanced_configuration/tutorial007_py39.py hl[15:20, 22] *}
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
{* ../../docs_src/path_operation_advanced_configuration/tutorial007_pv1_py39.py hl[15:20, 22] *}
|
||||
|
||||
////
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic version 1 the method to get the JSON Schema for a model was called `Item.schema()`, in Pydantic version 2, the method is called `Item.model_json_schema()`.
|
||||
|
||||
///
|
||||
|
||||
Nevertheless, although we are not using the default integrated functionality, we are still using a Pydantic model to manually generate the JSON Schema for the data that we want to receive in YAML.
|
||||
|
||||
Then we use the request directly, and extract the body as `bytes`. This means that FastAPI won't even try to parse the request payload as JSON.
|
||||
|
||||
And then in our code, we parse that YAML content directly, and then we are again using the same Pydantic model to validate the YAML content:
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
{* ../../docs_src/path_operation_advanced_configuration/tutorial007_py39.py hl[24:31] *}
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
{* ../../docs_src/path_operation_advanced_configuration/tutorial007_pv1_py39.py hl[24:31] *}
|
||||
|
||||
////
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic version 1 the method to parse and validate an object was `Item.parse_obj()`, in Pydantic version 2, the method is called `Item.model_validate()`.
|
||||
|
||||
///
|
||||
|
||||
/// tip
|
||||
|
||||
Here we reuse the same Pydantic model.
|
||||
|
||||
@@ -46,12 +46,6 @@ $ pip install "fastapi[all]"
|
||||
|
||||
</div>
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 it came included with the main package. Now it is distributed as this independent package so that you can choose to install it or not if you don't need that functionality.
|
||||
|
||||
///
|
||||
|
||||
### Create the `Settings` object { #create-the-settings-object }
|
||||
|
||||
Import `BaseSettings` from Pydantic and create a sub-class, very much like with a Pydantic model.
|
||||
@@ -60,24 +54,8 @@ The same way as with Pydantic models, you declare class attributes with type ann
|
||||
|
||||
You can use all the same validation features and tools you use for Pydantic models, like different data types and additional validations with `Field()`.
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
{* ../../docs_src/settings/tutorial001_py39.py hl[2,5:8,11] *}
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 you would import `BaseSettings` directly from `pydantic` instead of from `pydantic_settings`.
|
||||
|
||||
///
|
||||
|
||||
{* ../../docs_src/settings/tutorial001_pv1_py39.py hl[2,5:8,11] *}
|
||||
|
||||
////
|
||||
|
||||
/// tip
|
||||
|
||||
If you want something quick to copy and paste, don't use this example, use the last one below.
|
||||
@@ -215,8 +193,6 @@ APP_NAME="ChimichangApp"
|
||||
|
||||
And then update your `config.py` with:
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
{* ../../docs_src/settings/app03_an_py39/config.py hl[9] *}
|
||||
|
||||
/// tip
|
||||
@@ -225,26 +201,6 @@ The `model_config` attribute is used just for Pydantic configuration. You can re
|
||||
|
||||
///
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
{* ../../docs_src/settings/app03_an_py39/config_pv1.py hl[9:10] *}
|
||||
|
||||
/// tip
|
||||
|
||||
The `Config` class is used just for Pydantic configuration. You can read more at <a href="https://docs.pydantic.dev/1.10/usage/model_config/" class="external-link" target="_blank">Pydantic Model Config</a>.
|
||||
|
||||
///
|
||||
|
||||
////
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic version 1 the configuration was done in an internal class `Config`, in Pydantic version 2 it's done in an attribute `model_config`. This attribute takes a `dict`, and to get autocompletion and inline errors you can import and use `SettingsConfigDict` to define that `dict`.
|
||||
|
||||
///
|
||||
|
||||
Here we define the config `env_file` inside of your Pydantic `Settings` class, and set the value to the filename with the dotenv file we want to use.
|
||||
|
||||
### Creating the `Settings` only once with `lru_cache` { #creating-the-settings-only-once-with-lru-cache }
|
||||
|
||||
@@ -2,21 +2,23 @@
|
||||
|
||||
If you have an old FastAPI app, you might be using Pydantic version 1.
|
||||
|
||||
FastAPI has had support for either Pydantic v1 or v2 since version 0.100.0.
|
||||
FastAPI version 0.100.0 had support for either Pydantic v1 or v2. It would use whichever you had installed.
|
||||
|
||||
If you had installed Pydantic v2, it would use it. If instead you had Pydantic v1, it would use that.
|
||||
FastAPI version 0.119.0 introduced partial support for Pydantic v1 from inside of Pydantic v2 (as `pydantic.v1`), to facilitate the migration to v2.
|
||||
|
||||
Pydantic v1 is now deprecated and support for it will be removed in the next versions of FastAPI, you should **migrate to Pydantic v2**. This way you will get the latest features, improvements, and fixes.
|
||||
FastAPI 0.126.0 dropped support for Pydantic v1, while still supporting `pydantic.v1` for a little while.
|
||||
|
||||
/// warning
|
||||
|
||||
Also, the Pydantic team stopped support for Pydantic v1 for the latest versions of Python, starting with **Python 3.14**.
|
||||
The Pydantic team stopped support for Pydantic v1 for the latest versions of Python, starting with **Python 3.14**.
|
||||
|
||||
This includes `pydantic.v1`, which is no longer supported in Python 3.14 and above.
|
||||
|
||||
If you want to use the latest features of Python, you will need to make sure you use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
If you have an old FastAPI app with Pydantic v1, here I'll show you how to migrate it to Pydantic v2, and the **new features in FastAPI 0.119.0** to help you with a gradual migration.
|
||||
If you have an old FastAPI app with Pydantic v1, here I'll show you how to migrate it to Pydantic v2, and the **features in FastAPI 0.119.0** to help you with a gradual migration.
|
||||
|
||||
## Official Guide { #official-guide }
|
||||
|
||||
@@ -44,7 +46,7 @@ After this, you can run the tests and check if everything works. If it does, you
|
||||
|
||||
## Pydantic v1 in v2 { #pydantic-v1-in-v2 }
|
||||
|
||||
Pydantic v2 includes everything from Pydantic v1 as a submodule `pydantic.v1`.
|
||||
Pydantic v2 includes everything from Pydantic v1 as a submodule `pydantic.v1`. But this is no longer supported in versions above Python 3.13.
|
||||
|
||||
This means that you can install the latest version of Pydantic v2 and import and use the old Pydantic v1 components from this submodule, as if you had the old Pydantic v1 installed.
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Separate OpenAPI Schemas for Input and Output or Not { #separate-openapi-schemas-for-input-and-output-or-not }
|
||||
|
||||
When using **Pydantic v2**, the generated OpenAPI is a bit more exact and **correct** than before. 😎
|
||||
Since **Pydantic v2** was released, the generated OpenAPI is a bit more exact and **correct** than before. 😎
|
||||
|
||||
In fact, in some cases, it will even have **two JSON Schemas** in OpenAPI for the same Pydantic model, for input and output, depending on if they have **default values**.
|
||||
|
||||
@@ -100,5 +100,3 @@ And now there will be one single schema for input and output for the model, only
|
||||
<div class="screenshot">
|
||||
<img src="/img/tutorial/separate-openapi-schemas/image05.png">
|
||||
</div>
|
||||
|
||||
This is the same behavior as in Pydantic v1. 🤓
|
||||
|
||||
@@ -7,6 +7,43 @@ hide:
|
||||
|
||||
## Latest Changes
|
||||
|
||||
## 0.127.0
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* 🔊 Add deprecation warnings when using `pydantic.v1`. PR [#14583](https://github.com/fastapi/fastapi/pull/14583) by [@tiangolo](https://github.com/tiangolo).
|
||||
|
||||
### Translations
|
||||
|
||||
* 🔧 Add LLM prompt file for Korean, generated from the existing translations. PR [#14546](https://github.com/fastapi/fastapi/pull/14546) by [@tiangolo](https://github.com/tiangolo).
|
||||
* 🔧 Add LLM prompt file for Japanese, generated from the existing translations. PR [#14545](https://github.com/fastapi/fastapi/pull/14545) by [@tiangolo](https://github.com/tiangolo).
|
||||
|
||||
### Internal
|
||||
|
||||
* ⬆️ Upgrade OpenAI model for translations to gpt-5.2. PR [#14579](https://github.com/fastapi/fastapi/pull/14579) by [@tiangolo](https://github.com/tiangolo).
|
||||
|
||||
## 0.126.0
|
||||
|
||||
### Upgrades
|
||||
|
||||
* ➖ Drop support for Pydantic v1, keeping short temporary support for Pydantic v2's `pydantic.v1`. PR [#14575](https://github.com/fastapi/fastapi/pull/14575) by [@tiangolo](https://github.com/tiangolo).
|
||||
* The minimum version of Pydantic installed is now `pydantic >=2.7.0`.
|
||||
* The `standard` dependencies now include `pydantic-settings >=2.0.0` and `pydantic-extra-types >=2.0.0`.
|
||||
|
||||
### Docs
|
||||
|
||||
* 📝 Fix duplicated variable in `docs_src/python_types/tutorial005_py39.py`. PR [#14565](https://github.com/fastapi/fastapi/pull/14565) by [@paras-verma7454](https://github.com/paras-verma7454).
|
||||
|
||||
### Translations
|
||||
|
||||
* 🔧 Add LLM prompt file for Ukrainian, generated from the existing translations. PR [#14548](https://github.com/fastapi/fastapi/pull/14548) by [@tiangolo](https://github.com/tiangolo).
|
||||
|
||||
### Internal
|
||||
|
||||
* 🔧 Tweak pre-commit to allow committing release-notes. PR [#14577](https://github.com/fastapi/fastapi/pull/14577) by [@tiangolo](https://github.com/tiangolo).
|
||||
* ⬆️ Use prek as a pre-commit alternative. PR [#14572](https://github.com/fastapi/fastapi/pull/14572) by [@tiangolo](https://github.com/tiangolo).
|
||||
* 👷 Add performance tests with CodSpeed. PR [#14558](https://github.com/fastapi/fastapi/pull/14558) by [@tiangolo](https://github.com/tiangolo).
|
||||
|
||||
## 0.125.0
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
@@ -50,14 +50,6 @@ If you want to receive partial updates, it's very useful to use the parameter `e
|
||||
|
||||
Like `item.model_dump(exclude_unset=True)`.
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 the method was called `.dict()`, it was deprecated (but still supported) in Pydantic v2, and renamed to `.model_dump()`.
|
||||
|
||||
The examples here use `.dict()` for compatibility with Pydantic v1, but you should use `.model_dump()` instead if you can use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
That would generate a `dict` with only the data that was set when creating the `item` model, excluding default values.
|
||||
|
||||
Then you can use this to generate a `dict` with only the data that was set (sent in the request), omitting default values:
|
||||
@@ -68,14 +60,6 @@ Then you can use this to generate a `dict` with only the data that was set (sent
|
||||
|
||||
Now, you can create a copy of the existing model using `.model_copy()`, and pass the `update` parameter with a `dict` containing the data to update.
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 the method was called `.copy()`, it was deprecated (but still supported) in Pydantic v2, and renamed to `.model_copy()`.
|
||||
|
||||
The examples here use `.copy()` for compatibility with Pydantic v1, but you should use `.model_copy()` instead if you can use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
Like `stored_item_model.model_copy(update=update_data)`:
|
||||
|
||||
{* ../../docs_src/body_updates/tutorial002_py310.py hl[33] *}
|
||||
|
||||
@@ -128,14 +128,6 @@ Inside of the function, you can access all the attributes of the model object di
|
||||
|
||||
{* ../../docs_src/body/tutorial002_py310.py *}
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 the method was called `.dict()`, it was deprecated (but still supported) in Pydantic v2, and renamed to `.model_dump()`.
|
||||
|
||||
The examples here use `.dict()` for compatibility with Pydantic v1, but you should use `.model_dump()` instead if you can use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
## Request body + path parameters { #request-body-path-parameters }
|
||||
|
||||
You can declare path parameters and request body at the same time.
|
||||
|
||||
@@ -22,22 +22,13 @@ Here's a general idea of how the models could look like with their password fiel
|
||||
|
||||
{* ../../docs_src/extra_models/tutorial001_py310.py hl[7,9,14,20,22,27:28,31:33,38:39] *}
|
||||
|
||||
### About `**user_in.model_dump()` { #about-user-in-model-dump }
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 the method was called `.dict()`, it was deprecated (but still supported) in Pydantic v2, and renamed to `.model_dump()`.
|
||||
|
||||
The examples here use `.dict()` for compatibility with Pydantic v1, but you should use `.model_dump()` instead if you can use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
### About `**user_in.dict()` { #about-user-in-dict }
|
||||
|
||||
#### Pydantic's `.dict()` { #pydantics-dict }
|
||||
#### Pydantic's `.model_dump()` { #pydantics-model-dump }
|
||||
|
||||
`user_in` is a Pydantic model of class `UserIn`.
|
||||
|
||||
Pydantic models have a `.dict()` method that returns a `dict` with the model's data.
|
||||
Pydantic models have a `.model_dump()` method that returns a `dict` with the model's data.
|
||||
|
||||
So, if we create a Pydantic object `user_in` like:
|
||||
|
||||
@@ -48,7 +39,7 @@ user_in = UserIn(username="john", password="secret", email="john.doe@example.com
|
||||
and then we call:
|
||||
|
||||
```Python
|
||||
user_dict = user_in.dict()
|
||||
user_dict = user_in.model_dump()
|
||||
```
|
||||
|
||||
we now have a `dict` with the data in the variable `user_dict` (it's a `dict` instead of a Pydantic model object).
|
||||
@@ -104,20 +95,20 @@ UserInDB(
|
||||
|
||||
#### A Pydantic model from the contents of another { #a-pydantic-model-from-the-contents-of-another }
|
||||
|
||||
As in the example above we got `user_dict` from `user_in.dict()`, this code:
|
||||
As in the example above we got `user_dict` from `user_in.model_dump()`, this code:
|
||||
|
||||
```Python
|
||||
user_dict = user_in.dict()
|
||||
user_dict = user_in.model_dump()
|
||||
UserInDB(**user_dict)
|
||||
```
|
||||
|
||||
would be equivalent to:
|
||||
|
||||
```Python
|
||||
UserInDB(**user_in.dict())
|
||||
UserInDB(**user_in.model_dump())
|
||||
```
|
||||
|
||||
...because `user_in.dict()` is a `dict`, and then we make Python "unpack" it by passing it to `UserInDB` prefixed with `**`.
|
||||
...because `user_in.model_dump()` is a `dict`, and then we make Python "unpack" it by passing it to `UserInDB` prefixed with `**`.
|
||||
|
||||
So, we get a Pydantic model from the data in another Pydantic model.
|
||||
|
||||
@@ -126,7 +117,7 @@ So, we get a Pydantic model from the data in another Pydantic model.
|
||||
And then adding the extra keyword argument `hashed_password=hashed_password`, like in:
|
||||
|
||||
```Python
|
||||
UserInDB(**user_in.dict(), hashed_password=hashed_password)
|
||||
UserInDB(**user_in.model_dump(), hashed_password=hashed_password)
|
||||
```
|
||||
|
||||
...ends up being like:
|
||||
@@ -181,7 +172,6 @@ When defining a <a href="https://docs.pydantic.dev/latest/concepts/types/#unions
|
||||
|
||||
{* ../../docs_src/extra_models/tutorial003_py310.py hl[1,14:15,18:20,33] *}
|
||||
|
||||
|
||||
### `Union` in Python 3.10 { #union-in-python-3-10 }
|
||||
|
||||
In this example we pass `Union[PlaneItem, CarItem]` as the value of the argument `response_model`.
|
||||
@@ -204,7 +194,6 @@ For that, use the standard Python `typing.List` (or just `list` in Python 3.9 an
|
||||
|
||||
{* ../../docs_src/extra_models/tutorial004_py39.py hl[18] *}
|
||||
|
||||
|
||||
## Response with arbitrary `dict` { #response-with-arbitrary-dict }
|
||||
|
||||
You can also declare a response using a plain arbitrary `dict`, declaring just the type of the keys and values, without using a Pydantic model.
|
||||
@@ -215,7 +204,6 @@ In this case, you can use `typing.Dict` (or just `dict` in Python 3.9 and above)
|
||||
|
||||
{* ../../docs_src/extra_models/tutorial005_py39.py hl[6] *}
|
||||
|
||||
|
||||
## Recap { #recap }
|
||||
|
||||
Use multiple Pydantic models and inherit freely for each case.
|
||||
|
||||
@@ -206,20 +206,6 @@ If you feel lost with all these **"regular expression"** ideas, don't worry. The
|
||||
|
||||
Now you know that whenever you need them you can use them in **FastAPI**.
|
||||
|
||||
### Pydantic v1 `regex` instead of `pattern` { #pydantic-v1-regex-instead-of-pattern }
|
||||
|
||||
Before Pydantic version 2 and before FastAPI 0.100.0, the parameter was called `regex` instead of `pattern`, but it's now deprecated.
|
||||
|
||||
You could still see some code using it:
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
{* ../../docs_src/query_params_str_validations/tutorial004_regex_an_py310.py hl[11] *}
|
||||
|
||||
////
|
||||
|
||||
But know that this is deprecated and it should be updated to use the new parameter `pattern`. 🤓
|
||||
|
||||
## Default values { #default-values }
|
||||
|
||||
You can, of course, use default values other than `None`.
|
||||
|
||||
@@ -252,20 +252,6 @@ So, if you send a request to that *path operation* for the item with ID `foo`, t
|
||||
|
||||
/// info
|
||||
|
||||
In Pydantic v1 the method was called `.dict()`, it was deprecated (but still supported) in Pydantic v2, and renamed to `.model_dump()`.
|
||||
|
||||
The examples here use `.dict()` for compatibility with Pydantic v1, but you should use `.model_dump()` instead if you can use Pydantic v2.
|
||||
|
||||
///
|
||||
|
||||
/// info
|
||||
|
||||
FastAPI uses Pydantic model's `.dict()` with <a href="https://docs.pydantic.dev/1.10/usage/exporting_models/#modeldict" class="external-link" target="_blank">its `exclude_unset` parameter</a> to achieve this.
|
||||
|
||||
///
|
||||
|
||||
/// info
|
||||
|
||||
You can also use:
|
||||
|
||||
* `response_model_exclude_defaults=True`
|
||||
|
||||
@@ -8,36 +8,14 @@ Here are several ways to do it.
|
||||
|
||||
You can declare `examples` for a Pydantic model that will be added to the generated JSON Schema.
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
{* ../../docs_src/schema_extra_example/tutorial001_py310.py hl[13:24] *}
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
{* ../../docs_src/schema_extra_example/tutorial001_pv1_py310.py hl[13:23] *}
|
||||
|
||||
////
|
||||
|
||||
That extra info will be added as-is to the output **JSON Schema** for that model, and it will be used in the API docs.
|
||||
|
||||
//// tab | Pydantic v2
|
||||
|
||||
In Pydantic version 2, you would use the attribute `model_config`, that takes a `dict` as described in <a href="https://docs.pydantic.dev/latest/api/config/" class="external-link" target="_blank">Pydantic's docs: Configuration</a>.
|
||||
You can use the attribute `model_config` that takes a `dict` as described in <a href="https://docs.pydantic.dev/latest/api/config/" class="external-link" target="_blank">Pydantic's docs: Configuration</a>.
|
||||
|
||||
You can set `"json_schema_extra"` with a `dict` containing any additional data you would like to show up in the generated JSON Schema, including `examples`.
|
||||
|
||||
////
|
||||
|
||||
//// tab | Pydantic v1
|
||||
|
||||
In Pydantic version 1, you would use an internal class `Config` and `schema_extra`, as described in <a href="https://docs.pydantic.dev/1.10/usage/schema/#schema-customization" class="external-link" target="_blank">Pydantic's docs: Schema customization</a>.
|
||||
|
||||
You can set `schema_extra` with a `dict` containing any additional data you would like to show up in the generated JSON Schema, including `examples`.
|
||||
|
||||
////
|
||||
|
||||
/// tip
|
||||
|
||||
You could use the same technique to extend the JSON Schema and add your own custom extra info.
|
||||
|
||||
47
docs/ja/llm-prompt.md
Normal file
47
docs/ja/llm-prompt.md
Normal file
@@ -0,0 +1,47 @@
|
||||
### Target language
|
||||
|
||||
Translate to Japanese (日本語).
|
||||
|
||||
Language code: ja.
|
||||
|
||||
### Grammar and tone
|
||||
|
||||
1) Use polite, instructional Japanese (です/ます調).
|
||||
2) Keep the tone concise and technical (match existing Japanese FastAPI docs).
|
||||
|
||||
### Headings
|
||||
|
||||
1) Follow the existing Japanese style: short, descriptive headings (often noun phrases), e.g. 「チェック」.
|
||||
2) Do not add a trailing period at the end of headings.
|
||||
|
||||
### Quotes
|
||||
|
||||
1) Prefer Japanese corner brackets 「」 in normal prose when quoting a term.
|
||||
2) Do not change quotes inside inline code, code blocks, URLs, or file paths.
|
||||
|
||||
### Ellipsis
|
||||
|
||||
1) Keep ellipsis style consistent with existing Japanese docs (commonly `...`).
|
||||
2) Never change `...` in code, URLs, or CLI examples.
|
||||
|
||||
### Preferred translations / glossary
|
||||
|
||||
Use the following preferred translations when they apply in documentation prose:
|
||||
|
||||
- request (HTTP): リクエスト
|
||||
- response (HTTP): レスポンス
|
||||
- path operation: パスオペレーション
|
||||
- path operation function: パスオペレーション関数
|
||||
|
||||
### `///` admonitions
|
||||
|
||||
1) Keep the admonition keyword in English (do not translate `note`, `tip`, etc.).
|
||||
2) If a title is present, prefer these canonical titles:
|
||||
|
||||
- `/// note | 備考`
|
||||
- `/// note | 技術詳細`
|
||||
- `/// tip | 豆知識`
|
||||
- `/// warning | 注意`
|
||||
- `/// info | 情報`
|
||||
- `/// check | 確認`
|
||||
- `/// danger | 警告`
|
||||
51
docs/ko/llm-prompt.md
Normal file
51
docs/ko/llm-prompt.md
Normal file
@@ -0,0 +1,51 @@
|
||||
### Target language
|
||||
|
||||
Translate to Korean (한국어).
|
||||
|
||||
Language code: ko.
|
||||
|
||||
### Grammar and tone
|
||||
|
||||
1) Use polite, instructional Korean (e.g. 합니다/하세요 style).
|
||||
2) Keep the tone consistent with the existing Korean FastAPI docs.
|
||||
|
||||
### Headings
|
||||
|
||||
1) Follow existing Korean heading style (short, action-oriented headings like “확인하기”).
|
||||
2) Do not add trailing punctuation to headings.
|
||||
|
||||
### Quotes
|
||||
|
||||
1) Keep quote style consistent with the existing Korean docs.
|
||||
2) Never change quotes inside inline code, code blocks, URLs, or file paths.
|
||||
|
||||
### Ellipsis
|
||||
|
||||
1) Keep ellipsis style consistent with existing Korean docs (often `...`).
|
||||
2) Never change `...` in code, URLs, or CLI examples.
|
||||
|
||||
### Preferred translations / glossary
|
||||
|
||||
Use the following preferred translations when they apply in documentation prose:
|
||||
|
||||
- request (HTTP): 요청
|
||||
- response (HTTP): 응답
|
||||
- path operation: 경로 처리
|
||||
- path operation function: 경로 처리 함수
|
||||
|
||||
### `///` admonitions
|
||||
|
||||
1) Keep the admonition keyword in English (do not translate `note`, `tip`, etc.).
|
||||
2) If a title is present, prefer these canonical titles:
|
||||
|
||||
- `/// note | 참고`
|
||||
- `/// tip | 팁`
|
||||
- `/// warning | 경고`
|
||||
- `/// info | 정보`
|
||||
- `/// danger | 위험`
|
||||
- `/// note Technical Details | 기술 세부사항`
|
||||
- `/// check | 확인`
|
||||
Notes:
|
||||
|
||||
- `details` blocks exist in Korean docs; keep `/// details` as-is and translate only the title after `|`.
|
||||
- Example canonical title used: `/// details | 상세 설명`
|
||||
46
docs/uk/llm-prompt.md
Normal file
46
docs/uk/llm-prompt.md
Normal file
@@ -0,0 +1,46 @@
|
||||
### Target language
|
||||
|
||||
Translate to Ukrainian (українська).
|
||||
|
||||
Language code: uk.
|
||||
|
||||
### Grammar and tone
|
||||
|
||||
1) Use polite/formal address consistent with existing Ukrainian docs (use “ви/ваш”).
|
||||
2) Keep the tone concise and technical.
|
||||
|
||||
### Headings
|
||||
|
||||
1) Follow existing Ukrainian heading style; keep headings short and instructional.
|
||||
2) Do not add trailing punctuation to headings.
|
||||
|
||||
### Quotes
|
||||
|
||||
1) Prefer Ukrainian guillemets «…» for quoted terms in prose, matching existing Ukrainian docs.
|
||||
2) Never change quotes inside inline code, code blocks, URLs, or file paths.
|
||||
|
||||
### Ellipsis
|
||||
|
||||
1) Keep ellipsis style consistent with existing Ukrainian docs.
|
||||
2) Never change `...` in code, URLs, or CLI examples.
|
||||
|
||||
### Preferred translations / glossary
|
||||
|
||||
Use the following preferred translations when they apply in documentation prose:
|
||||
|
||||
- request (HTTP): запит
|
||||
- response (HTTP): відповідь
|
||||
- path operation: операція шляху
|
||||
- path operation function: функція операції шляху
|
||||
|
||||
### `///` admonitions
|
||||
|
||||
1) Keep the admonition keyword in English (do not translate `note`, `tip`, etc.).
|
||||
2) If a title is present, prefer these canonical titles (choose one canonical form where variants exist):
|
||||
|
||||
- `/// note | Примітка`
|
||||
- `/// note | Технічні деталі`
|
||||
- `/// tip | Порада`
|
||||
- `/// warning | Попередження`
|
||||
- `/// info | Інформація`
|
||||
- `/// danger | Обережно`
|
||||
@@ -14,7 +14,7 @@ app = FastAPI()
|
||||
|
||||
@app.post("/items/")
|
||||
async def create_item(item: Item):
|
||||
item_dict = item.dict()
|
||||
item_dict = item.model_dump()
|
||||
if item.tax is not None:
|
||||
price_with_tax = item.price + item.tax
|
||||
item_dict.update({"price_with_tax": price_with_tax})
|
||||
|
||||
@@ -16,7 +16,7 @@ app = FastAPI()
|
||||
|
||||
@app.post("/items/")
|
||||
async def create_item(item: Item):
|
||||
item_dict = item.dict()
|
||||
item_dict = item.model_dump()
|
||||
if item.tax is not None:
|
||||
price_with_tax = item.price + item.tax
|
||||
item_dict.update({"price_with_tax": price_with_tax})
|
||||
|
||||
@@ -14,4 +14,4 @@ app = FastAPI()
|
||||
|
||||
@app.put("/items/{item_id}")
|
||||
async def update_item(item_id: int, item: Item):
|
||||
return {"item_id": item_id, **item.dict()}
|
||||
return {"item_id": item_id, **item.model_dump()}
|
||||
|
||||
@@ -16,4 +16,4 @@ app = FastAPI()
|
||||
|
||||
@app.put("/items/{item_id}")
|
||||
async def update_item(item_id: int, item: Item):
|
||||
return {"item_id": item_id, **item.dict()}
|
||||
return {"item_id": item_id, **item.model_dump()}
|
||||
|
||||
@@ -14,7 +14,7 @@ app = FastAPI()
|
||||
|
||||
@app.put("/items/{item_id}")
|
||||
async def update_item(item_id: int, item: Item, q: str | None = None):
|
||||
result = {"item_id": item_id, **item.dict()}
|
||||
result = {"item_id": item_id, **item.model_dump()}
|
||||
if q:
|
||||
result.update({"q": q})
|
||||
return result
|
||||
|
||||
@@ -16,7 +16,7 @@ app = FastAPI()
|
||||
|
||||
@app.put("/items/{item_id}")
|
||||
async def update_item(item_id: int, item: Item, q: Union[str, None] = None):
|
||||
result = {"item_id": item_id, **item.dict()}
|
||||
result = {"item_id": item_id, **item.model_dump()}
|
||||
if q:
|
||||
result.update({"q": q})
|
||||
return result
|
||||
|
||||
@@ -29,7 +29,7 @@ async def read_item(item_id: str):
|
||||
async def update_item(item_id: str, item: Item):
|
||||
stored_item_data = items[item_id]
|
||||
stored_item_model = Item(**stored_item_data)
|
||||
update_data = item.dict(exclude_unset=True)
|
||||
updated_item = stored_item_model.copy(update=update_data)
|
||||
update_data = item.model_dump(exclude_unset=True)
|
||||
updated_item = stored_item_model.model_copy(update=update_data)
|
||||
items[item_id] = jsonable_encoder(updated_item)
|
||||
return updated_item
|
||||
|
||||
@@ -31,7 +31,7 @@ async def read_item(item_id: str):
|
||||
async def update_item(item_id: str, item: Item):
|
||||
stored_item_data = items[item_id]
|
||||
stored_item_model = Item(**stored_item_data)
|
||||
update_data = item.dict(exclude_unset=True)
|
||||
updated_item = stored_item_model.copy(update=update_data)
|
||||
update_data = item.model_dump(exclude_unset=True)
|
||||
updated_item = stored_item_model.model_copy(update=update_data)
|
||||
items[item_id] = jsonable_encoder(updated_item)
|
||||
return updated_item
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Cookie, FastAPI
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class Cookies(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
session_id: str
|
||||
fatebook_tracker: str | None = None
|
||||
googall_tracker: str | None = None
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(cookies: Annotated[Cookies, Cookie()]):
|
||||
return cookies
|
||||
@@ -1,20 +0,0 @@
|
||||
from typing import Annotated, Union
|
||||
|
||||
from fastapi import Cookie, FastAPI
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class Cookies(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
session_id: str
|
||||
fatebook_tracker: Union[str, None] = None
|
||||
googall_tracker: Union[str, None] = None
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(cookies: Annotated[Cookies, Cookie()]):
|
||||
return cookies
|
||||
@@ -1,18 +0,0 @@
|
||||
from fastapi import Cookie, FastAPI
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class Cookies(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
session_id: str
|
||||
fatebook_tracker: str | None = None
|
||||
googall_tracker: str | None = None
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(cookies: Cookies = Cookie()):
|
||||
return cookies
|
||||
@@ -1,20 +0,0 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi import Cookie, FastAPI
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class Cookies(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
session_id: str
|
||||
fatebook_tracker: Union[str, None] = None
|
||||
googall_tracker: Union[str, None] = None
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(cookies: Cookies = Cookie()):
|
||||
return cookies
|
||||
@@ -30,7 +30,7 @@ def fake_password_hasher(raw_password: str):
|
||||
|
||||
def fake_save_user(user_in: UserIn):
|
||||
hashed_password = fake_password_hasher(user_in.password)
|
||||
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
|
||||
user_in_db = UserInDB(**user_in.model_dump(), hashed_password=hashed_password)
|
||||
print("User saved! ..not really")
|
||||
return user_in_db
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ def fake_password_hasher(raw_password: str):
|
||||
|
||||
def fake_save_user(user_in: UserIn):
|
||||
hashed_password = fake_password_hasher(user_in.password)
|
||||
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
|
||||
user_in_db = UserInDB(**user_in.model_dump(), hashed_password=hashed_password)
|
||||
print("User saved! ..not really")
|
||||
return user_in_db
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ def fake_password_hasher(raw_password: str):
|
||||
|
||||
def fake_save_user(user_in: UserIn):
|
||||
hashed_password = fake_password_hasher(user_in.password)
|
||||
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
|
||||
user_in_db = UserInDB(**user_in.model_dump(), hashed_password=hashed_password)
|
||||
print("User saved! ..not really")
|
||||
return user_in_db
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def fake_password_hasher(raw_password: str):
|
||||
|
||||
def fake_save_user(user_in: UserIn):
|
||||
hashed_password = fake_password_hasher(user_in.password)
|
||||
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
|
||||
user_in_db = UserInDB(**user_in.model_dump(), hashed_password=hashed_password)
|
||||
print("User saved! ..not really")
|
||||
return user_in_db
|
||||
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import FastAPI, Header
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class CommonHeaders(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
host: str
|
||||
save_data: bool
|
||||
if_modified_since: str | None = None
|
||||
traceparent: str | None = None
|
||||
x_tag: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(headers: Annotated[CommonHeaders, Header()]):
|
||||
return headers
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import Annotated, Union
|
||||
|
||||
from fastapi import FastAPI, Header
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class CommonHeaders(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
host: str
|
||||
save_data: bool
|
||||
if_modified_since: Union[str, None] = None
|
||||
traceparent: Union[str, None] = None
|
||||
x_tag: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(headers: Annotated[CommonHeaders, Header()]):
|
||||
return headers
|
||||
@@ -1,20 +0,0 @@
|
||||
from fastapi import FastAPI, Header
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class CommonHeaders(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
host: str
|
||||
save_data: bool
|
||||
if_modified_since: str | None = None
|
||||
traceparent: str | None = None
|
||||
x_tag: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(headers: CommonHeaders = Header()):
|
||||
return headers
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi import FastAPI, Header
|
||||
from pydantic import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class CommonHeaders(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
host: str
|
||||
save_data: bool
|
||||
if_modified_since: Union[str, None] = None
|
||||
traceparent: Union[str, None] = None
|
||||
x_tag: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(headers: CommonHeaders = Header()):
|
||||
return headers
|
||||
@@ -1,6 +1,6 @@
|
||||
import yaml
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic.v1 import BaseModel, ValidationError
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
def get_items(item_a: str, item_b: int, item_c: float, item_d: bool, item_e: bytes):
|
||||
return item_a, item_b, item_c, item_d, item_d, item_e
|
||||
return item_a, item_b, item_c, item_d, item_e
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
from typing import Annotated, Literal
|
||||
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class FilterParams(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
limit: int = Field(100, gt=0, le=100)
|
||||
offset: int = Field(0, ge=0)
|
||||
order_by: Literal["created_at", "updated_at"] = "created_at"
|
||||
tags: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(filter_query: Annotated[FilterParams, Query()]):
|
||||
return filter_query
|
||||
@@ -1,21 +0,0 @@
|
||||
from typing import Annotated, Literal
|
||||
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class FilterParams(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
limit: int = Field(100, gt=0, le=100)
|
||||
offset: int = Field(0, ge=0)
|
||||
order_by: Literal["created_at", "updated_at"] = "created_at"
|
||||
tags: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(filter_query: Annotated[FilterParams, Query()]):
|
||||
return filter_query
|
||||
@@ -1,21 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class FilterParams(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
limit: int = Field(100, gt=0, le=100)
|
||||
offset: int = Field(0, ge=0)
|
||||
order_by: Literal["created_at", "updated_at"] = "created_at"
|
||||
tags: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(filter_query: FilterParams = Query()):
|
||||
return filter_query
|
||||
@@ -1,21 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import FastAPI, Query
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
class FilterParams(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
limit: int = Field(100, gt=0, le=100)
|
||||
offset: int = Field(0, ge=0)
|
||||
order_by: Literal["created_at", "updated_at"] = "created_at"
|
||||
tags: list[str] = []
|
||||
|
||||
|
||||
@app.get("/items/")
|
||||
async def read_items(filter_query: FilterParams = Query()):
|
||||
return filter_query
|
||||
@@ -1,7 +1,8 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import FastAPI, Form
|
||||
from pydantic import BaseModel
|
||||
from fastapi import FastAPI
|
||||
from fastapi.temp_pydantic_v1_params import Form
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from fastapi import FastAPI, Form
|
||||
from pydantic import BaseModel
|
||||
from fastapi import FastAPI
|
||||
from fastapi.temp_pydantic_v1_params import Form
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from pydantic import BaseSettings
|
||||
from pydantic.v1 import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from pydantic import BaseSettings
|
||||
from pydantic.v1 import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseSettings
|
||||
from pydantic.v1 import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""FastAPI framework, high performance, easy to learn, fast to code, ready for production"""
|
||||
|
||||
__version__ = "0.125.0"
|
||||
__version__ = "0.127.0"
|
||||
|
||||
from starlette import status as status
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from .main import _is_model_class as _is_model_class
|
||||
from .main import _is_model_field as _is_model_field
|
||||
from .main import _is_undefined as _is_undefined
|
||||
from .main import _model_dump as _model_dump
|
||||
from .main import _model_rebuild as _model_rebuild
|
||||
from .main import copy_field_info as copy_field_info
|
||||
from .main import create_body_model as create_body_model
|
||||
from .main import evaluate_forwardref as evaluate_forwardref
|
||||
|
||||
@@ -6,43 +6,26 @@ from typing import (
|
||||
)
|
||||
|
||||
from fastapi._compat import may_v1
|
||||
from fastapi._compat.shared import PYDANTIC_V2, lenient_issubclass
|
||||
from fastapi._compat.shared import lenient_issubclass
|
||||
from fastapi.types import ModelNameMap
|
||||
from pydantic import BaseModel
|
||||
from typing_extensions import Literal
|
||||
|
||||
from . import v2
|
||||
from .model_field import ModelField
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from .v2 import BaseConfig as BaseConfig
|
||||
from .v2 import FieldInfo as FieldInfo
|
||||
from .v2 import PydanticSchemaGenerationError as PydanticSchemaGenerationError
|
||||
from .v2 import RequiredParam as RequiredParam
|
||||
from .v2 import Undefined as Undefined
|
||||
from .v2 import UndefinedType as UndefinedType
|
||||
from .v2 import Url as Url
|
||||
from .v2 import Validator as Validator
|
||||
from .v2 import evaluate_forwardref as evaluate_forwardref
|
||||
from .v2 import get_missing_field_error as get_missing_field_error
|
||||
from .v2 import (
|
||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||
)
|
||||
else:
|
||||
from .v1 import BaseConfig as BaseConfig # type: ignore[assignment]
|
||||
from .v1 import FieldInfo as FieldInfo
|
||||
from .v1 import ( # type: ignore[assignment]
|
||||
PydanticSchemaGenerationError as PydanticSchemaGenerationError,
|
||||
)
|
||||
from .v1 import RequiredParam as RequiredParam
|
||||
from .v1 import Undefined as Undefined
|
||||
from .v1 import UndefinedType as UndefinedType
|
||||
from .v1 import Url as Url # type: ignore[assignment]
|
||||
from .v1 import Validator as Validator
|
||||
from .v1 import evaluate_forwardref as evaluate_forwardref
|
||||
from .v1 import get_missing_field_error as get_missing_field_error
|
||||
from .v1 import ( # type: ignore[assignment]
|
||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||
)
|
||||
from .v2 import BaseConfig as BaseConfig
|
||||
from .v2 import FieldInfo as FieldInfo
|
||||
from .v2 import PydanticSchemaGenerationError as PydanticSchemaGenerationError
|
||||
from .v2 import RequiredParam as RequiredParam
|
||||
from .v2 import Undefined as Undefined
|
||||
from .v2 import UndefinedType as UndefinedType
|
||||
from .v2 import Url as Url
|
||||
from .v2 import Validator as Validator
|
||||
from .v2 import evaluate_forwardref as evaluate_forwardref
|
||||
from .v2 import get_missing_field_error as get_missing_field_error
|
||||
from .v2 import (
|
||||
with_info_plain_validator_function as with_info_plain_validator_function,
|
||||
)
|
||||
|
||||
|
||||
@lru_cache
|
||||
@@ -50,7 +33,7 @@ def get_cached_model_fields(model: type[BaseModel]) -> list[ModelField]:
|
||||
if lenient_issubclass(model, may_v1.BaseModel):
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.get_model_fields(model)
|
||||
return v1.get_model_fields(model) # type: ignore[arg-type,return-value]
|
||||
else:
|
||||
from . import v2
|
||||
|
||||
@@ -60,11 +43,8 @@ def get_cached_model_fields(model: type[BaseModel]) -> list[ModelField]:
|
||||
def _is_undefined(value: object) -> bool:
|
||||
if isinstance(value, may_v1.UndefinedType):
|
||||
return True
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return isinstance(value, v2.UndefinedType)
|
||||
return False
|
||||
return isinstance(value, v2.UndefinedType)
|
||||
|
||||
|
||||
def _get_model_config(model: BaseModel) -> Any:
|
||||
@@ -72,10 +52,8 @@ def _get_model_config(model: BaseModel) -> Any:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1._get_model_config(model)
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return v2._get_model_config(model)
|
||||
return v2._get_model_config(model)
|
||||
|
||||
|
||||
def _model_dump(
|
||||
@@ -85,20 +63,15 @@ def _model_dump(
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1._model_dump(model, mode=mode, **kwargs)
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return v2._model_dump(model, mode=mode, **kwargs)
|
||||
return v2._model_dump(model, mode=mode, **kwargs)
|
||||
|
||||
|
||||
def _is_error_wrapper(exc: Exception) -> bool:
|
||||
if isinstance(exc, may_v1.ErrorWrapper):
|
||||
return True
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return isinstance(exc, v2.ErrorWrapper)
|
||||
return False
|
||||
return isinstance(exc, v2.ErrorWrapper)
|
||||
|
||||
|
||||
def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
|
||||
@@ -106,11 +79,8 @@ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.copy_field_info(field_info=field_info, annotation=annotation)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.copy_field_info(field_info=field_info, annotation=annotation)
|
||||
return v2.copy_field_info(field_info=field_info, annotation=annotation)
|
||||
|
||||
|
||||
def create_body_model(
|
||||
@@ -120,11 +90,8 @@ def create_body_model(
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.create_body_model(fields=fields, model_name=model_name)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.create_body_model(fields=fields, model_name=model_name) # type: ignore[arg-type]
|
||||
return v2.create_body_model(fields=fields, model_name=model_name) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def get_annotation_from_field_info(
|
||||
@@ -136,13 +103,10 @@ def get_annotation_from_field_info(
|
||||
return v1.get_annotation_from_field_info(
|
||||
annotation=annotation, field_info=field_info, field_name=field_name
|
||||
)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.get_annotation_from_field_info(
|
||||
annotation=annotation, field_info=field_info, field_name=field_name
|
||||
)
|
||||
return v2.get_annotation_from_field_info(
|
||||
annotation=annotation, field_info=field_info, field_name=field_name
|
||||
)
|
||||
|
||||
|
||||
def is_bytes_field(field: ModelField) -> bool:
|
||||
@@ -150,11 +114,8 @@ def is_bytes_field(field: ModelField) -> bool:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.is_bytes_field(field)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.is_bytes_field(field) # type: ignore[arg-type]
|
||||
return v2.is_bytes_field(field) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def is_bytes_sequence_field(field: ModelField) -> bool:
|
||||
@@ -162,11 +123,8 @@ def is_bytes_sequence_field(field: ModelField) -> bool:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.is_bytes_sequence_field(field)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.is_bytes_sequence_field(field) # type: ignore[arg-type]
|
||||
return v2.is_bytes_sequence_field(field) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def is_scalar_field(field: ModelField) -> bool:
|
||||
@@ -174,23 +132,12 @@ def is_scalar_field(field: ModelField) -> bool:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.is_scalar_field(field)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.is_scalar_field(field) # type: ignore[arg-type]
|
||||
return v2.is_scalar_field(field) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def is_scalar_sequence_field(field: ModelField) -> bool:
|
||||
if isinstance(field, may_v1.ModelField):
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.is_scalar_sequence_field(field)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.is_scalar_sequence_field(field) # type: ignore[arg-type]
|
||||
return v2.is_scalar_sequence_field(field) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def is_sequence_field(field: ModelField) -> bool:
|
||||
@@ -198,11 +145,8 @@ def is_sequence_field(field: ModelField) -> bool:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.is_sequence_field(field)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.is_sequence_field(field) # type: ignore[arg-type]
|
||||
return v2.is_sequence_field(field) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
||||
@@ -210,22 +154,8 @@ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
||||
from fastapi._compat import v1
|
||||
|
||||
return v1.serialize_sequence_value(field=field, value=value)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.serialize_sequence_value(field=field, value=value) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def _model_rebuild(model: type[BaseModel]) -> None:
|
||||
if lenient_issubclass(model, may_v1.BaseModel):
|
||||
from fastapi._compat import v1
|
||||
|
||||
v1._model_rebuild(model)
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
v2._model_rebuild(model)
|
||||
return v2.serialize_sequence_value(field=field, value=value) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def get_compat_model_name_map(fields: list[ModelField]) -> ModelNameMap:
|
||||
@@ -236,27 +166,18 @@ def get_compat_model_name_map(fields: list[ModelField]) -> ModelNameMap:
|
||||
from fastapi._compat import v1
|
||||
|
||||
v1_flat_models = v1.get_flat_models_from_fields(
|
||||
v1_model_fields, known_models=set()
|
||||
v1_model_fields, # type: ignore[arg-type]
|
||||
known_models=set(),
|
||||
)
|
||||
all_flat_models = v1_flat_models
|
||||
else:
|
||||
all_flat_models = set()
|
||||
if PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
v2_model_fields = [
|
||||
field for field in fields if isinstance(field, v2.ModelField)
|
||||
]
|
||||
v2_flat_models = v2.get_flat_models_from_fields(
|
||||
v2_model_fields, known_models=set()
|
||||
)
|
||||
all_flat_models = all_flat_models.union(v2_flat_models)
|
||||
v2_model_fields = [field for field in fields if isinstance(field, v2.ModelField)]
|
||||
v2_flat_models = v2.get_flat_models_from_fields(v2_model_fields, known_models=set())
|
||||
all_flat_models = all_flat_models.union(v2_flat_models) # type: ignore[arg-type]
|
||||
|
||||
model_name_map = v2.get_model_name_map(all_flat_models)
|
||||
return model_name_map
|
||||
from fastapi._compat import v1
|
||||
|
||||
model_name_map = v1.get_model_name_map(all_flat_models)
|
||||
model_name_map = v2.get_model_name_map(all_flat_models) # type: ignore[arg-type]
|
||||
return model_name_map
|
||||
|
||||
|
||||
@@ -275,29 +196,23 @@ def get_definitions(
|
||||
if sys.version_info < (3, 14):
|
||||
v1_fields = [field for field in fields if isinstance(field, may_v1.ModelField)]
|
||||
v1_field_maps, v1_definitions = may_v1.get_definitions(
|
||||
fields=v1_fields,
|
||||
fields=v1_fields, # type: ignore[arg-type]
|
||||
model_name_map=model_name_map,
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
if not PYDANTIC_V2:
|
||||
return v1_field_maps, v1_definitions
|
||||
else:
|
||||
from . import v2
|
||||
|
||||
v2_fields = [field for field in fields if isinstance(field, v2.ModelField)]
|
||||
v2_field_maps, v2_definitions = v2.get_definitions(
|
||||
fields=v2_fields,
|
||||
model_name_map=model_name_map,
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
all_definitions = {**v1_definitions, **v2_definitions}
|
||||
all_field_maps = {**v1_field_maps, **v2_field_maps}
|
||||
return all_field_maps, all_definitions
|
||||
v2_fields = [field for field in fields if isinstance(field, v2.ModelField)]
|
||||
v2_field_maps, v2_definitions = v2.get_definitions(
|
||||
fields=v2_fields,
|
||||
model_name_map=model_name_map,
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
all_definitions = {**v1_definitions, **v2_definitions}
|
||||
all_field_maps = {**v1_field_maps, **v2_field_maps} # type: ignore[misc]
|
||||
return all_field_maps, all_definitions
|
||||
|
||||
# Pydantic v1 is not supported since Python 3.14
|
||||
else:
|
||||
from . import v2
|
||||
|
||||
v2_fields = [field for field in fields if isinstance(field, v2.ModelField)]
|
||||
v2_field_maps, v2_definitions = v2.get_definitions(
|
||||
fields=v2_fields,
|
||||
@@ -326,33 +241,24 @@ def get_schema_from_model_field(
|
||||
field_mapping=field_mapping,
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
else:
|
||||
assert PYDANTIC_V2
|
||||
from . import v2
|
||||
|
||||
return v2.get_schema_from_model_field(
|
||||
field=field, # type: ignore[arg-type]
|
||||
model_name_map=model_name_map,
|
||||
field_mapping=field_mapping, # type: ignore[arg-type]
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
return v2.get_schema_from_model_field(
|
||||
field=field, # type: ignore[arg-type]
|
||||
model_name_map=model_name_map,
|
||||
field_mapping=field_mapping, # type: ignore[arg-type]
|
||||
separate_input_output_schemas=separate_input_output_schemas,
|
||||
)
|
||||
|
||||
|
||||
def _is_model_field(value: Any) -> bool:
|
||||
if isinstance(value, may_v1.ModelField):
|
||||
return True
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return isinstance(value, v2.ModelField)
|
||||
return False
|
||||
return isinstance(value, v2.ModelField)
|
||||
|
||||
|
||||
def _is_model_class(value: Any) -> bool:
|
||||
if lenient_issubclass(value, may_v1.BaseModel):
|
||||
return True
|
||||
elif PYDANTIC_V2:
|
||||
from . import v2
|
||||
|
||||
return lenient_issubclass(value, v2.BaseModel) # type: ignore[attr-defined]
|
||||
return False
|
||||
return lenient_issubclass(value, v2.BaseModel) # type: ignore[attr-defined]
|
||||
|
||||
@@ -102,7 +102,7 @@ def _normalize_errors(errors: Sequence[Any]) -> list[dict[str, Any]]:
|
||||
use_errors: list[Any] = []
|
||||
for error in errors:
|
||||
if isinstance(error, ErrorWrapper):
|
||||
new_errors = ValidationError( # type: ignore[call-arg]
|
||||
new_errors = ValidationError(
|
||||
errors=[error], model=RequestErrorModel
|
||||
).errors()
|
||||
use_errors.extend(new_errors)
|
||||
|
||||
@@ -11,6 +11,44 @@ from typing import (
|
||||
from fastapi._compat import shared
|
||||
from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX
|
||||
from fastapi.types import ModelNameMap
|
||||
from pydantic.v1 import BaseConfig as BaseConfig
|
||||
from pydantic.v1 import BaseModel as BaseModel
|
||||
from pydantic.v1 import ValidationError as ValidationError
|
||||
from pydantic.v1 import create_model as create_model
|
||||
from pydantic.v1.class_validators import Validator as Validator
|
||||
from pydantic.v1.color import Color as Color
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper as ErrorWrapper
|
||||
from pydantic.v1.fields import (
|
||||
SHAPE_FROZENSET,
|
||||
SHAPE_LIST,
|
||||
SHAPE_SEQUENCE,
|
||||
SHAPE_SET,
|
||||
SHAPE_SINGLETON,
|
||||
SHAPE_TUPLE,
|
||||
SHAPE_TUPLE_ELLIPSIS,
|
||||
)
|
||||
from pydantic.v1.fields import FieldInfo as FieldInfo
|
||||
from pydantic.v1.fields import ModelField as ModelField
|
||||
from pydantic.v1.fields import Undefined as Undefined
|
||||
from pydantic.v1.fields import UndefinedType as UndefinedType
|
||||
from pydantic.v1.networks import AnyUrl as AnyUrl
|
||||
from pydantic.v1.networks import NameEmail as NameEmail
|
||||
from pydantic.v1.schema import TypeModelSet as TypeModelSet
|
||||
from pydantic.v1.schema import field_schema, model_process_schema
|
||||
from pydantic.v1.schema import (
|
||||
get_annotation_from_field_info as get_annotation_from_field_info,
|
||||
)
|
||||
from pydantic.v1.schema import (
|
||||
get_flat_models_from_field as get_flat_models_from_field,
|
||||
)
|
||||
from pydantic.v1.schema import (
|
||||
get_flat_models_from_fields as get_flat_models_from_fields,
|
||||
)
|
||||
from pydantic.v1.schema import get_model_name_map as get_model_name_map
|
||||
from pydantic.v1.types import SecretBytes as SecretBytes
|
||||
from pydantic.v1.types import SecretStr as SecretStr
|
||||
from pydantic.v1.typing import evaluate_forwardref as evaluate_forwardref
|
||||
from pydantic.v1.utils import lenient_issubclass as lenient_issubclass
|
||||
from pydantic.version import VERSION as PYDANTIC_VERSION
|
||||
from typing_extensions import Literal
|
||||
|
||||
@@ -20,103 +58,6 @@ PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2
|
||||
# shadowing typing.Required.
|
||||
RequiredParam: Any = Ellipsis
|
||||
|
||||
if not PYDANTIC_V2:
|
||||
from pydantic import BaseConfig as BaseConfig
|
||||
from pydantic import BaseModel as BaseModel
|
||||
from pydantic import ValidationError as ValidationError
|
||||
from pydantic import create_model as create_model
|
||||
from pydantic.class_validators import Validator as Validator
|
||||
from pydantic.color import Color as Color
|
||||
from pydantic.error_wrappers import ErrorWrapper as ErrorWrapper
|
||||
from pydantic.errors import MissingError
|
||||
from pydantic.fields import ( # type: ignore[attr-defined]
|
||||
SHAPE_FROZENSET,
|
||||
SHAPE_LIST,
|
||||
SHAPE_SEQUENCE,
|
||||
SHAPE_SET,
|
||||
SHAPE_SINGLETON,
|
||||
SHAPE_TUPLE,
|
||||
SHAPE_TUPLE_ELLIPSIS,
|
||||
)
|
||||
from pydantic.fields import FieldInfo as FieldInfo
|
||||
from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined]
|
||||
from pydantic.fields import Undefined as Undefined # type: ignore[attr-defined]
|
||||
from pydantic.fields import ( # type: ignore[attr-defined]
|
||||
UndefinedType as UndefinedType,
|
||||
)
|
||||
from pydantic.networks import AnyUrl as AnyUrl
|
||||
from pydantic.networks import NameEmail as NameEmail
|
||||
from pydantic.schema import TypeModelSet as TypeModelSet
|
||||
from pydantic.schema import (
|
||||
field_schema,
|
||||
model_process_schema,
|
||||
)
|
||||
from pydantic.schema import (
|
||||
get_annotation_from_field_info as get_annotation_from_field_info,
|
||||
)
|
||||
from pydantic.schema import get_flat_models_from_field as get_flat_models_from_field
|
||||
from pydantic.schema import (
|
||||
get_flat_models_from_fields as get_flat_models_from_fields,
|
||||
)
|
||||
from pydantic.schema import get_model_name_map as get_model_name_map
|
||||
from pydantic.types import SecretBytes as SecretBytes
|
||||
from pydantic.types import SecretStr as SecretStr
|
||||
from pydantic.typing import evaluate_forwardref as evaluate_forwardref
|
||||
from pydantic.utils import lenient_issubclass as lenient_issubclass
|
||||
|
||||
|
||||
else:
|
||||
from pydantic.v1 import BaseConfig as BaseConfig # type: ignore[assignment]
|
||||
from pydantic.v1 import BaseModel as BaseModel # type: ignore[assignment]
|
||||
from pydantic.v1 import ( # type: ignore[assignment]
|
||||
ValidationError as ValidationError,
|
||||
)
|
||||
from pydantic.v1 import create_model as create_model # type: ignore[no-redef]
|
||||
from pydantic.v1.class_validators import Validator as Validator
|
||||
from pydantic.v1.color import Color as Color # type: ignore[assignment]
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper as ErrorWrapper
|
||||
from pydantic.v1.errors import MissingError
|
||||
from pydantic.v1.fields import (
|
||||
SHAPE_FROZENSET,
|
||||
SHAPE_LIST,
|
||||
SHAPE_SEQUENCE,
|
||||
SHAPE_SET,
|
||||
SHAPE_SINGLETON,
|
||||
SHAPE_TUPLE,
|
||||
SHAPE_TUPLE_ELLIPSIS,
|
||||
)
|
||||
from pydantic.v1.fields import FieldInfo as FieldInfo # type: ignore[assignment]
|
||||
from pydantic.v1.fields import ModelField as ModelField
|
||||
from pydantic.v1.fields import Undefined as Undefined
|
||||
from pydantic.v1.fields import UndefinedType as UndefinedType
|
||||
from pydantic.v1.networks import AnyUrl as AnyUrl
|
||||
from pydantic.v1.networks import ( # type: ignore[assignment]
|
||||
NameEmail as NameEmail,
|
||||
)
|
||||
from pydantic.v1.schema import TypeModelSet as TypeModelSet
|
||||
from pydantic.v1.schema import (
|
||||
field_schema,
|
||||
model_process_schema,
|
||||
)
|
||||
from pydantic.v1.schema import (
|
||||
get_annotation_from_field_info as get_annotation_from_field_info,
|
||||
)
|
||||
from pydantic.v1.schema import (
|
||||
get_flat_models_from_field as get_flat_models_from_field,
|
||||
)
|
||||
from pydantic.v1.schema import (
|
||||
get_flat_models_from_fields as get_flat_models_from_fields,
|
||||
)
|
||||
from pydantic.v1.schema import get_model_name_map as get_model_name_map
|
||||
from pydantic.v1.types import ( # type: ignore[assignment]
|
||||
SecretBytes as SecretBytes,
|
||||
)
|
||||
from pydantic.v1.types import ( # type: ignore[assignment]
|
||||
SecretStr as SecretStr,
|
||||
)
|
||||
from pydantic.v1.typing import evaluate_forwardref as evaluate_forwardref
|
||||
from pydantic.v1.utils import lenient_issubclass as lenient_issubclass
|
||||
|
||||
|
||||
GetJsonSchemaHandler = Any
|
||||
JsonSchemaValue = dict[str, Any]
|
||||
@@ -200,24 +141,6 @@ def is_pv1_scalar_field(field: ModelField) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
|
||||
if (field.shape in sequence_shapes) and not lenient_issubclass(
|
||||
field.type_, BaseModel
|
||||
):
|
||||
if field.sub_fields is not None:
|
||||
for sub_field in field.sub_fields:
|
||||
if not is_pv1_scalar_field(sub_field):
|
||||
return False
|
||||
return True
|
||||
if shared._annotation_is_sequence(field.type_):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _model_rebuild(model: type[BaseModel]) -> None:
|
||||
model.update_forward_refs()
|
||||
|
||||
|
||||
def _model_dump(
|
||||
model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
|
||||
) -> Any:
|
||||
@@ -225,7 +148,7 @@ def _model_dump(
|
||||
|
||||
|
||||
def _get_model_config(model: BaseModel) -> Any:
|
||||
return model.__config__ # type: ignore[attr-defined]
|
||||
return model.__config__
|
||||
|
||||
|
||||
def get_schema_from_model_field(
|
||||
@@ -237,8 +160,10 @@ def get_schema_from_model_field(
|
||||
],
|
||||
separate_input_output_schemas: bool = True,
|
||||
) -> dict[str, Any]:
|
||||
return field_schema( # type: ignore[no-any-return]
|
||||
field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
|
||||
return field_schema(
|
||||
field,
|
||||
model_name_map=model_name_map, # type: ignore[arg-type]
|
||||
ref_prefix=REF_PREFIX,
|
||||
)[0]
|
||||
|
||||
|
||||
@@ -257,7 +182,7 @@ def get_definitions(
|
||||
dict[str, dict[str, Any]],
|
||||
]:
|
||||
models = get_flat_models_from_fields(fields, known_models=set())
|
||||
return {}, get_model_definitions(flat_models=models, model_name_map=model_name_map)
|
||||
return {}, get_model_definitions(flat_models=models, model_name_map=model_name_map) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def is_scalar_field(field: ModelField) -> bool:
|
||||
@@ -268,12 +193,8 @@ def is_sequence_field(field: ModelField) -> bool:
|
||||
return field.shape in sequence_shapes or shared._annotation_is_sequence(field.type_)
|
||||
|
||||
|
||||
def is_scalar_sequence_field(field: ModelField) -> bool:
|
||||
return is_pv1_scalar_sequence_field(field)
|
||||
|
||||
|
||||
def is_bytes_field(field: ModelField) -> bool:
|
||||
return lenient_issubclass(field.type_, bytes) # type: ignore[no-any-return]
|
||||
return lenient_issubclass(field.type_, bytes)
|
||||
|
||||
|
||||
def is_bytes_sequence_field(field: ModelField) -> bool:
|
||||
@@ -288,20 +209,14 @@ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
||||
return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def get_missing_field_error(loc: tuple[str, ...]) -> dict[str, Any]:
|
||||
missing_field_error = ErrorWrapper(MissingError(), loc=loc)
|
||||
new_error = ValidationError([missing_field_error], RequestErrorModel)
|
||||
return new_error.errors()[0] # type: ignore[return-value]
|
||||
|
||||
|
||||
def create_body_model(
|
||||
*, fields: Sequence[ModelField], model_name: str
|
||||
) -> type[BaseModel]:
|
||||
BodyModel = create_model(model_name)
|
||||
for f in fields:
|
||||
BodyModel.__fields__[f.name] = f # type: ignore[index]
|
||||
BodyModel.__fields__[f.name] = f
|
||||
return BodyModel
|
||||
|
||||
|
||||
def get_model_fields(model: type[BaseModel]) -> list[ModelField]:
|
||||
return list(model.__fields__.values()) # type: ignore[attr-defined]
|
||||
return list(model.__fields__.values())
|
||||
|
||||
@@ -216,10 +216,6 @@ def get_annotation_from_field_info(
|
||||
return annotation
|
||||
|
||||
|
||||
def _model_rebuild(model: type[BaseModel]) -> None:
|
||||
model.model_rebuild()
|
||||
|
||||
|
||||
def _model_dump(
|
||||
model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
|
||||
) -> Any:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from collections.abc import Iterable
|
||||
from typing import (
|
||||
Annotated,
|
||||
Any,
|
||||
@@ -135,27 +134,12 @@ class UploadFile(StarletteUploadFile):
|
||||
"""
|
||||
return await super().close()
|
||||
|
||||
@classmethod
|
||||
def __get_validators__(cls: type["UploadFile"]) -> Iterable[Callable[..., Any]]:
|
||||
yield cls.validate
|
||||
|
||||
@classmethod
|
||||
def validate(cls: type["UploadFile"], v: Any) -> Any:
|
||||
if not isinstance(v, StarletteUploadFile):
|
||||
raise ValueError(f"Expected UploadFile, received: {type(v)}")
|
||||
return v
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: Any, _: Any) -> "UploadFile":
|
||||
if not isinstance(__input_value, StarletteUploadFile):
|
||||
raise ValueError(f"Expected UploadFile, received: {type(__input_value)}")
|
||||
return cast(UploadFile, __input_value)
|
||||
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
@classmethod
|
||||
def __modify_schema__(cls, field_schema: dict[str, Any]) -> None:
|
||||
field_schema.update({"type": "string", "format": "binary"})
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import dataclasses
|
||||
import inspect
|
||||
import sys
|
||||
import warnings
|
||||
from collections.abc import Coroutine, Mapping, Sequence
|
||||
from contextlib import AsyncExitStack, contextmanager
|
||||
from copy import copy, deepcopy
|
||||
@@ -18,7 +19,6 @@ from typing import (
|
||||
import anyio
|
||||
from fastapi import params
|
||||
from fastapi._compat import (
|
||||
PYDANTIC_V2,
|
||||
ModelField,
|
||||
RequiredParam,
|
||||
Undefined,
|
||||
@@ -323,6 +323,13 @@ def get_dependant(
|
||||
)
|
||||
continue
|
||||
assert param_details.field is not None
|
||||
if isinstance(param_details.field, may_v1.ModelField):
|
||||
warnings.warn(
|
||||
"pydantic.v1 is deprecated and will soon stop being supported by FastAPI."
|
||||
f" Please update the param {param_name}: {param_details.type_annotation!r}.",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=5,
|
||||
)
|
||||
if isinstance(
|
||||
param_details.field.field_info, (params.Body, temp_pydantic_v1_params.Body)
|
||||
):
|
||||
@@ -410,7 +417,8 @@ def analyze_param(
|
||||
if isinstance(fastapi_annotation, (FieldInfo, may_v1.FieldInfo)):
|
||||
# Copy `field_info` because we mutate `field_info.default` below.
|
||||
field_info = copy_field_info(
|
||||
field_info=fastapi_annotation, annotation=use_annotation
|
||||
field_info=fastapi_annotation, # type: ignore[arg-type]
|
||||
annotation=use_annotation,
|
||||
)
|
||||
assert field_info.default in {
|
||||
Undefined,
|
||||
@@ -444,10 +452,9 @@ def analyze_param(
|
||||
"Cannot specify FastAPI annotations in `Annotated` and default value"
|
||||
f" together for {param_name!r}"
|
||||
)
|
||||
field_info = value
|
||||
if PYDANTIC_V2:
|
||||
if isinstance(field_info, FieldInfo):
|
||||
field_info.annotation = type_annotation
|
||||
field_info = value # type: ignore[assignment]
|
||||
if isinstance(field_info, FieldInfo):
|
||||
field_info.annotation = type_annotation
|
||||
|
||||
# Get Depends from type annotation
|
||||
if depends is not None and depends.dependency is None:
|
||||
@@ -485,7 +492,7 @@ def analyze_param(
|
||||
field_info = params.File(annotation=use_annotation, default=default_value)
|
||||
elif not field_annotation_is_scalar(annotation=type_annotation):
|
||||
if annotation_is_pydantic_v1(use_annotation):
|
||||
field_info = temp_pydantic_v1_params.Body(
|
||||
field_info = temp_pydantic_v1_params.Body( # type: ignore[assignment]
|
||||
annotation=use_annotation, default=default_value
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -228,11 +228,11 @@ def jsonable_encoder(
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
encoders: dict[Any, Any] = {}
|
||||
if isinstance(obj, may_v1.BaseModel):
|
||||
encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined]
|
||||
encoders = getattr(obj.__config__, "json_encoders", {})
|
||||
if custom_encoder:
|
||||
encoders = {**encoders, **custom_encoder}
|
||||
obj_dict = _model_dump(
|
||||
obj,
|
||||
obj, # type: ignore[arg-type]
|
||||
mode="json",
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
|
||||
@@ -3,11 +3,9 @@ from enum import Enum
|
||||
from typing import Annotated, Any, Callable, Optional, Union
|
||||
|
||||
from fastapi._compat import (
|
||||
PYDANTIC_V2,
|
||||
CoreSchema,
|
||||
GetJsonSchemaHandler,
|
||||
JsonSchemaValue,
|
||||
_model_rebuild,
|
||||
with_info_plain_validator_function,
|
||||
)
|
||||
from fastapi.logger import logger
|
||||
@@ -57,13 +55,7 @@ except ImportError: # pragma: no cover
|
||||
|
||||
|
||||
class BaseModelWithConfig(BaseModel):
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
else:
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
|
||||
class Contact(BaseModelWithConfig):
|
||||
@@ -226,13 +218,7 @@ class Example(TypedDict, total=False):
|
||||
value: Optional[Any]
|
||||
externalValue: Optional[AnyUrl]
|
||||
|
||||
if PYDANTIC_V2: # type: ignore [misc]
|
||||
__pydantic_config__ = {"extra": "allow"}
|
||||
|
||||
else:
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
__pydantic_config__ = {"extra": "allow"} # type: ignore[misc]
|
||||
|
||||
|
||||
class ParameterInType(Enum):
|
||||
@@ -447,6 +433,6 @@ class OpenAPI(BaseModelWithConfig):
|
||||
externalDocs: Optional[ExternalDocumentation] = None
|
||||
|
||||
|
||||
_model_rebuild(Schema)
|
||||
_model_rebuild(Operation)
|
||||
_model_rebuild(Encoding)
|
||||
Schema.model_rebuild()
|
||||
Operation.model_rebuild()
|
||||
Encoding.model_rebuild()
|
||||
|
||||
@@ -9,8 +9,6 @@ from pydantic.fields import FieldInfo
|
||||
from typing_extensions import Literal, deprecated
|
||||
|
||||
from ._compat import (
|
||||
PYDANTIC_V2,
|
||||
PYDANTIC_VERSION_MINOR_TUPLE,
|
||||
Undefined,
|
||||
)
|
||||
|
||||
@@ -111,29 +109,24 @@ class Param(FieldInfo): # type: ignore[misc]
|
||||
stacklevel=4,
|
||||
)
|
||||
current_json_schema_extra = json_schema_extra or extra
|
||||
if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7):
|
||||
self.deprecated = deprecated
|
||||
else:
|
||||
kwargs["deprecated"] = deprecated
|
||||
if PYDANTIC_V2:
|
||||
if serialization_alias in (_Unset, None) and isinstance(alias, str):
|
||||
serialization_alias = alias
|
||||
if validation_alias in (_Unset, None):
|
||||
validation_alias = alias
|
||||
kwargs.update(
|
||||
{
|
||||
"annotation": annotation,
|
||||
"alias_priority": alias_priority,
|
||||
"validation_alias": validation_alias,
|
||||
"serialization_alias": serialization_alias,
|
||||
"strict": strict,
|
||||
"json_schema_extra": current_json_schema_extra,
|
||||
}
|
||||
)
|
||||
kwargs["pattern"] = pattern or regex
|
||||
else:
|
||||
kwargs["regex"] = pattern or regex
|
||||
kwargs.update(**current_json_schema_extra)
|
||||
kwargs["deprecated"] = deprecated
|
||||
|
||||
if serialization_alias in (_Unset, None) and isinstance(alias, str):
|
||||
serialization_alias = alias
|
||||
if validation_alias in (_Unset, None):
|
||||
validation_alias = alias
|
||||
kwargs.update(
|
||||
{
|
||||
"annotation": annotation,
|
||||
"alias_priority": alias_priority,
|
||||
"validation_alias": validation_alias,
|
||||
"serialization_alias": serialization_alias,
|
||||
"strict": strict,
|
||||
"json_schema_extra": current_json_schema_extra,
|
||||
}
|
||||
)
|
||||
kwargs["pattern"] = pattern or regex
|
||||
|
||||
use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset}
|
||||
|
||||
super().__init__(**use_kwargs)
|
||||
@@ -571,29 +564,22 @@ class Body(FieldInfo): # type: ignore[misc]
|
||||
stacklevel=4,
|
||||
)
|
||||
current_json_schema_extra = json_schema_extra or extra
|
||||
if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7):
|
||||
self.deprecated = deprecated
|
||||
else:
|
||||
kwargs["deprecated"] = deprecated
|
||||
if PYDANTIC_V2:
|
||||
if serialization_alias in (_Unset, None) and isinstance(alias, str):
|
||||
serialization_alias = alias
|
||||
if validation_alias in (_Unset, None):
|
||||
validation_alias = alias
|
||||
kwargs.update(
|
||||
{
|
||||
"annotation": annotation,
|
||||
"alias_priority": alias_priority,
|
||||
"validation_alias": validation_alias,
|
||||
"serialization_alias": serialization_alias,
|
||||
"strict": strict,
|
||||
"json_schema_extra": current_json_schema_extra,
|
||||
}
|
||||
)
|
||||
kwargs["pattern"] = pattern or regex
|
||||
else:
|
||||
kwargs["regex"] = pattern or regex
|
||||
kwargs.update(**current_json_schema_extra)
|
||||
kwargs["deprecated"] = deprecated
|
||||
if serialization_alias in (_Unset, None) and isinstance(alias, str):
|
||||
serialization_alias = alias
|
||||
if validation_alias in (_Unset, None):
|
||||
validation_alias = alias
|
||||
kwargs.update(
|
||||
{
|
||||
"annotation": annotation,
|
||||
"alias_priority": alias_priority,
|
||||
"validation_alias": validation_alias,
|
||||
"serialization_alias": serialization_alias,
|
||||
"strict": strict,
|
||||
"json_schema_extra": current_json_schema_extra,
|
||||
}
|
||||
)
|
||||
kwargs["pattern"] = pattern or regex
|
||||
|
||||
use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import dataclasses
|
||||
import email.message
|
||||
import functools
|
||||
import inspect
|
||||
import json
|
||||
import warnings
|
||||
from collections.abc import (
|
||||
AsyncIterator,
|
||||
Awaitable,
|
||||
@@ -29,7 +29,9 @@ from fastapi._compat import (
|
||||
_get_model_config,
|
||||
_model_dump,
|
||||
_normalize_errors,
|
||||
annotation_is_pydantic_v1,
|
||||
lenient_issubclass,
|
||||
may_v1,
|
||||
)
|
||||
from fastapi.datastructures import Default, DefaultPlaceholder
|
||||
from fastapi.dependencies.models import Dependant
|
||||
@@ -58,7 +60,6 @@ from fastapi.utils import (
|
||||
get_value_or_default,
|
||||
is_body_allowed_for_status_code,
|
||||
)
|
||||
from pydantic import BaseModel
|
||||
from starlette import routing
|
||||
from starlette._exception_handler import wrap_app_handling_exceptions
|
||||
from starlette._utils import is_async_callable
|
||||
@@ -153,8 +154,8 @@ def _prepare_response_content(
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
) -> Any:
|
||||
if isinstance(res, BaseModel):
|
||||
read_with_orm_mode = getattr(_get_model_config(res), "read_with_orm_mode", None)
|
||||
if isinstance(res, may_v1.BaseModel):
|
||||
read_with_orm_mode = getattr(_get_model_config(res), "read_with_orm_mode", None) # type: ignore[arg-type]
|
||||
if read_with_orm_mode:
|
||||
# Let from_orm extract the data from this model instead of converting
|
||||
# it now to a dict.
|
||||
@@ -162,7 +163,7 @@ def _prepare_response_content(
|
||||
# access instead of dict iteration, e.g. lazy relationships.
|
||||
return res
|
||||
return _model_dump(
|
||||
res,
|
||||
res, # type: ignore[arg-type]
|
||||
by_alias=True,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
@@ -188,9 +189,6 @@ def _prepare_response_content(
|
||||
)
|
||||
for k, v in res.items()
|
||||
}
|
||||
elif dataclasses.is_dataclass(res):
|
||||
assert not isinstance(res, type)
|
||||
return dataclasses.asdict(res)
|
||||
return res
|
||||
|
||||
|
||||
@@ -638,6 +636,13 @@ class APIRoute(routing.Route):
|
||||
f"Status code {status_code} must not have a response body"
|
||||
)
|
||||
response_name = "Response_" + self.unique_id
|
||||
if annotation_is_pydantic_v1(self.response_model):
|
||||
warnings.warn(
|
||||
"pydantic.v1 is deprecated and will soon stop being supported by FastAPI."
|
||||
f" Please update the response model {self.response_model!r}.",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=4,
|
||||
)
|
||||
self.response_field = create_model_field(
|
||||
name=response_name,
|
||||
type_=self.response_model,
|
||||
@@ -671,6 +676,13 @@ class APIRoute(routing.Route):
|
||||
f"Status code {additional_status_code} must not have a response body"
|
||||
)
|
||||
response_name = f"Response_{additional_status_code}_{self.unique_id}"
|
||||
if annotation_is_pydantic_v1(model):
|
||||
warnings.warn(
|
||||
"pydantic.v1 is deprecated and will soon stop being supported by FastAPI."
|
||||
f" In responses={{}}, please update {model}.",
|
||||
category=DeprecationWarning,
|
||||
stacklevel=4,
|
||||
)
|
||||
response_field = create_model_field(
|
||||
name=response_name, type_=model, mode="serialization"
|
||||
)
|
||||
|
||||
@@ -6,12 +6,11 @@ from fastapi.params import ParamTypes
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from ._compat.may_v1 import FieldInfo, Undefined
|
||||
from ._compat.shared import PYDANTIC_VERSION_MINOR_TUPLE
|
||||
|
||||
_Unset: Any = Undefined
|
||||
|
||||
|
||||
class Param(FieldInfo): # type: ignore[misc]
|
||||
class Param(FieldInfo):
|
||||
in_: ParamTypes
|
||||
|
||||
def __init__(
|
||||
@@ -98,10 +97,7 @@ class Param(FieldInfo): # type: ignore[misc]
|
||||
stacklevel=4,
|
||||
)
|
||||
current_json_schema_extra = json_schema_extra or extra
|
||||
if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7):
|
||||
self.deprecated = deprecated
|
||||
else:
|
||||
kwargs["deprecated"] = deprecated
|
||||
kwargs["deprecated"] = deprecated
|
||||
kwargs["regex"] = pattern or regex
|
||||
kwargs.update(**current_json_schema_extra)
|
||||
use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset}
|
||||
@@ -112,7 +108,7 @@ class Param(FieldInfo): # type: ignore[misc]
|
||||
return f"{self.__class__.__name__}({self.default})"
|
||||
|
||||
|
||||
class Path(Param): # type: ignore[misc]
|
||||
class Path(Param):
|
||||
in_ = ParamTypes.path
|
||||
|
||||
def __init__(
|
||||
@@ -198,7 +194,7 @@ class Path(Param): # type: ignore[misc]
|
||||
)
|
||||
|
||||
|
||||
class Query(Param): # type: ignore[misc]
|
||||
class Query(Param):
|
||||
in_ = ParamTypes.query
|
||||
|
||||
def __init__(
|
||||
@@ -282,7 +278,7 @@ class Query(Param): # type: ignore[misc]
|
||||
)
|
||||
|
||||
|
||||
class Header(Param): # type: ignore[misc]
|
||||
class Header(Param):
|
||||
in_ = ParamTypes.header
|
||||
|
||||
def __init__(
|
||||
@@ -368,7 +364,7 @@ class Header(Param): # type: ignore[misc]
|
||||
)
|
||||
|
||||
|
||||
class Cookie(Param): # type: ignore[misc]
|
||||
class Cookie(Param):
|
||||
in_ = ParamTypes.cookie
|
||||
|
||||
def __init__(
|
||||
@@ -452,7 +448,7 @@ class Cookie(Param): # type: ignore[misc]
|
||||
)
|
||||
|
||||
|
||||
class Body(FieldInfo): # type: ignore[misc]
|
||||
class Body(FieldInfo):
|
||||
def __init__(
|
||||
self,
|
||||
default: Any = Undefined,
|
||||
@@ -541,10 +537,7 @@ class Body(FieldInfo): # type: ignore[misc]
|
||||
stacklevel=4,
|
||||
)
|
||||
current_json_schema_extra = json_schema_extra or extra
|
||||
if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7):
|
||||
self.deprecated = deprecated
|
||||
else:
|
||||
kwargs["deprecated"] = deprecated
|
||||
kwargs["deprecated"] = deprecated
|
||||
kwargs["regex"] = pattern or regex
|
||||
kwargs.update(**current_json_schema_extra)
|
||||
|
||||
@@ -556,7 +549,7 @@ class Body(FieldInfo): # type: ignore[misc]
|
||||
return f"{self.__class__.__name__}({self.default})"
|
||||
|
||||
|
||||
class Form(Body): # type: ignore[misc]
|
||||
class Form(Body):
|
||||
def __init__(
|
||||
self,
|
||||
default: Any = Undefined,
|
||||
@@ -640,7 +633,7 @@ class Form(Body): # type: ignore[misc]
|
||||
)
|
||||
|
||||
|
||||
class File(Form): # type: ignore[misc]
|
||||
class File(Form):
|
||||
def __init__(
|
||||
self,
|
||||
default: Any = Undefined,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import re
|
||||
import warnings
|
||||
from collections.abc import MutableMapping
|
||||
from dataclasses import is_dataclass
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -13,7 +12,6 @@ from weakref import WeakKeyDictionary
|
||||
|
||||
import fastapi
|
||||
from fastapi._compat import (
|
||||
PYDANTIC_V2,
|
||||
BaseConfig,
|
||||
ModelField,
|
||||
PydanticSchemaGenerationError,
|
||||
@@ -29,6 +27,8 @@ from pydantic import BaseModel
|
||||
from pydantic.fields import FieldInfo
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ._compat import v2
|
||||
|
||||
if TYPE_CHECKING: # pragma: nocover
|
||||
from .routing import APIRoute
|
||||
|
||||
@@ -105,14 +105,12 @@ def create_model_field(
|
||||
from fastapi._compat import v1
|
||||
|
||||
try:
|
||||
return v1.ModelField(**v1_kwargs) # type: ignore[no-any-return]
|
||||
return v1.ModelField(**v1_kwargs) # type: ignore[return-value]
|
||||
except RuntimeError:
|
||||
raise fastapi.exceptions.FastAPIError(
|
||||
_invalid_args_message.format(type_=type_)
|
||||
) from None
|
||||
elif PYDANTIC_V2:
|
||||
from ._compat import v2
|
||||
|
||||
else:
|
||||
field_info = field_info or FieldInfo(
|
||||
annotation=type_, default=default, alias=alias
|
||||
)
|
||||
@@ -128,7 +126,7 @@ def create_model_field(
|
||||
from fastapi._compat import v1
|
||||
|
||||
try:
|
||||
return v1.ModelField(**v1_kwargs) # type: ignore[no-any-return]
|
||||
return v1.ModelField(**v1_kwargs)
|
||||
except RuntimeError:
|
||||
raise fastapi.exceptions.FastAPIError(
|
||||
_invalid_args_message.format(type_=type_)
|
||||
@@ -140,11 +138,8 @@ def create_cloned_field(
|
||||
*,
|
||||
cloned_types: Optional[MutableMapping[type[BaseModel], type[BaseModel]]] = None,
|
||||
) -> ModelField:
|
||||
if PYDANTIC_V2:
|
||||
from ._compat import v2
|
||||
|
||||
if isinstance(field, v2.ModelField):
|
||||
return field
|
||||
if isinstance(field, v2.ModelField):
|
||||
return field
|
||||
|
||||
from fastapi._compat import v1
|
||||
|
||||
@@ -154,8 +149,6 @@ def create_cloned_field(
|
||||
cloned_types = _CLONED_TYPES_CACHE
|
||||
|
||||
original_type = field.type_
|
||||
if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"):
|
||||
original_type = original_type.__pydantic_model__
|
||||
use_type = original_type
|
||||
if lenient_issubclass(original_type, v1.BaseModel):
|
||||
original_type = cast(type[v1.BaseModel], original_type)
|
||||
|
||||
@@ -30,7 +30,6 @@ classifiers = [
|
||||
"Framework :: AsyncIO",
|
||||
"Framework :: FastAPI",
|
||||
"Framework :: Pydantic",
|
||||
"Framework :: Pydantic :: 1",
|
||||
"Framework :: Pydantic :: 2",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
@@ -45,7 +44,7 @@ classifiers = [
|
||||
]
|
||||
dependencies = [
|
||||
"starlette>=0.40.0,<0.51.0",
|
||||
"pydantic>=1.7.4,!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0",
|
||||
"pydantic>=2.7.0",
|
||||
"typing-extensions>=4.8.0",
|
||||
"annotated-doc>=0.0.2",
|
||||
]
|
||||
@@ -71,11 +70,10 @@ standard = [
|
||||
"email-validator >=2.0.0",
|
||||
# Uvicorn with uvloop
|
||||
"uvicorn[standard] >=0.12.0",
|
||||
# TODO: this should be part of some pydantic optional extra dependencies
|
||||
# # Settings management
|
||||
# "pydantic-settings >=2.0.0",
|
||||
"pydantic-settings >=2.0.0",
|
||||
# # Extra Pydantic data types
|
||||
# "pydantic-extra-types >=2.0.0",
|
||||
"pydantic-extra-types >=2.0.0",
|
||||
]
|
||||
|
||||
standard-no-fastapi-cloud-cli = [
|
||||
@@ -90,11 +88,10 @@ standard-no-fastapi-cloud-cli = [
|
||||
"email-validator >=2.0.0",
|
||||
# Uvicorn with uvloop
|
||||
"uvicorn[standard] >=0.12.0",
|
||||
# TODO: this should be part of some pydantic optional extra dependencies
|
||||
# # Settings management
|
||||
# "pydantic-settings >=2.0.0",
|
||||
"pydantic-settings >=2.0.0",
|
||||
# # Extra Pydantic data types
|
||||
# "pydantic-extra-types >=2.0.0",
|
||||
"pydantic-extra-types >=2.0.0",
|
||||
]
|
||||
|
||||
all = [
|
||||
@@ -183,8 +180,6 @@ filterwarnings = [
|
||||
# Ref: https://github.com/python-trio/trio/pull/3054
|
||||
# Remove once there's a new version of Trio
|
||||
'ignore:The `hash` argument is deprecated*:DeprecationWarning:trio',
|
||||
# Ignore flaky coverage / pytest warning about SQLite connection, only applies to Python 3.13 and Pydantic v1
|
||||
'ignore:Exception ignored in. <sqlite3\.Connection object.*:pytest.PytestUnraisableExceptionWarning',
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
|
||||
@@ -11,6 +11,7 @@ PyJWT==2.9.0
|
||||
pyyaml >=5.3.1,<7.0.0
|
||||
pwdlib[argon2] >=0.2.1
|
||||
inline-snapshot>=0.21.1
|
||||
pytest-codspeed==4.2.0
|
||||
# types
|
||||
types-ujson ==5.10.0.20240515
|
||||
types-orjson ==3.6.2
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
-r requirements-tests.txt
|
||||
-r requirements-docs.txt
|
||||
-r requirements-translations.txt
|
||||
pre-commit >=4.5.0,<5.0.0
|
||||
prek==0.2.22
|
||||
# For generating screenshots
|
||||
playwright
|
||||
|
||||
@@ -727,7 +727,7 @@ def translate_page(
|
||||
print(f"Found existing translation: {out_path}")
|
||||
old_translation = out_path.read_text(encoding="utf-8")
|
||||
print(f"Translating {en_path} to {language} ({language_name})")
|
||||
agent = Agent("openai:gpt-5")
|
||||
agent = Agent("openai:gpt-5.2")
|
||||
|
||||
prompt_segments = [
|
||||
general_prompt,
|
||||
|
||||
0
tests/benchmarks/__init__.py
Normal file
0
tests/benchmarks/__init__.py
Normal file
404
tests/benchmarks/test_general_performance.py
Normal file
404
tests/benchmarks/test_general_performance.py
Normal file
@@ -0,0 +1,404 @@
|
||||
import json
|
||||
import sys
|
||||
import warnings
|
||||
from collections.abc import Iterator
|
||||
from typing import Annotated, Any
|
||||
|
||||
import pytest
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
if "--codspeed" not in sys.argv:
|
||||
pytest.skip(
|
||||
"Benchmark tests are skipped by default; run with --codspeed.",
|
||||
allow_module_level=True,
|
||||
)
|
||||
|
||||
LARGE_ITEMS: list[dict[str, Any]] = [
|
||||
{
|
||||
"id": i,
|
||||
"name": f"item-{i}",
|
||||
"values": list(range(25)),
|
||||
"meta": {
|
||||
"active": True,
|
||||
"group": i % 10,
|
||||
"tag": f"t{i % 5}",
|
||||
},
|
||||
}
|
||||
for i in range(300)
|
||||
]
|
||||
|
||||
LARGE_METADATA: dict[str, Any] = {
|
||||
"source": "benchmark",
|
||||
"version": 1,
|
||||
"flags": {"a": True, "b": False, "c": True},
|
||||
"notes": ["x" * 50, "y" * 50, "z" * 50],
|
||||
}
|
||||
|
||||
LARGE_PAYLOAD: dict[str, Any] = {"items": LARGE_ITEMS, "metadata": LARGE_METADATA}
|
||||
|
||||
|
||||
def dep_a():
|
||||
return 40
|
||||
|
||||
|
||||
def dep_b(a: Annotated[int, Depends(dep_a)]):
|
||||
return a + 2
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="module",
|
||||
params=[
|
||||
"pydantic-v2",
|
||||
"pydantic-v1",
|
||||
],
|
||||
)
|
||||
def basemodel_class(request: pytest.FixtureRequest) -> type[Any]:
|
||||
if request.param == "pydantic-v2":
|
||||
from pydantic import BaseModel
|
||||
|
||||
return BaseModel
|
||||
else:
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
return BaseModel
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def app(basemodel_class: type[Any]) -> FastAPI:
|
||||
class ItemIn(basemodel_class):
|
||||
name: str
|
||||
value: int
|
||||
|
||||
class ItemOut(basemodel_class):
|
||||
name: str
|
||||
value: int
|
||||
dep: int
|
||||
|
||||
class LargeIn(basemodel_class):
|
||||
items: list[dict[str, Any]]
|
||||
metadata: dict[str, Any]
|
||||
|
||||
class LargeOut(basemodel_class):
|
||||
items: list[dict[str, Any]]
|
||||
metadata: dict[str, Any]
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.filterwarnings(
|
||||
"ignore",
|
||||
message=r"pydantic\.v1 is deprecated and will soon stop being supported by FastAPI\..*",
|
||||
category=DeprecationWarning,
|
||||
)
|
||||
|
||||
@app.post("/sync/validated", response_model=ItemOut)
|
||||
def sync_validated(item: ItemIn, dep: Annotated[int, Depends(dep_b)]):
|
||||
return ItemOut(name=item.name, value=item.value, dep=dep)
|
||||
|
||||
@app.get("/sync/dict-no-response-model")
|
||||
def sync_dict_no_response_model():
|
||||
return {"name": "foo", "value": 123}
|
||||
|
||||
@app.get("/sync/dict-with-response-model", response_model=ItemOut)
|
||||
def sync_dict_with_response_model(
|
||||
dep: Annotated[int, Depends(dep_b)],
|
||||
):
|
||||
return {"name": "foo", "value": 123, "dep": dep}
|
||||
|
||||
@app.get("/sync/model-no-response-model")
|
||||
def sync_model_no_response_model(dep: Annotated[int, Depends(dep_b)]):
|
||||
return ItemOut(name="foo", value=123, dep=dep)
|
||||
|
||||
@app.get("/sync/model-with-response-model", response_model=ItemOut)
|
||||
def sync_model_with_response_model(dep: Annotated[int, Depends(dep_b)]):
|
||||
return ItemOut(name="foo", value=123, dep=dep)
|
||||
|
||||
@app.post("/async/validated", response_model=ItemOut)
|
||||
async def async_validated(
|
||||
item: ItemIn,
|
||||
dep: Annotated[int, Depends(dep_b)],
|
||||
):
|
||||
return ItemOut(name=item.name, value=item.value, dep=dep)
|
||||
|
||||
@app.post("/sync/large-receive")
|
||||
def sync_large_receive(payload: LargeIn):
|
||||
return {"received": len(payload.items)}
|
||||
|
||||
@app.post("/async/large-receive")
|
||||
async def async_large_receive(payload: LargeIn):
|
||||
return {"received": len(payload.items)}
|
||||
|
||||
@app.get("/sync/large-dict-no-response-model")
|
||||
def sync_large_dict_no_response_model():
|
||||
return LARGE_PAYLOAD
|
||||
|
||||
@app.get("/sync/large-dict-with-response-model", response_model=LargeOut)
|
||||
def sync_large_dict_with_response_model():
|
||||
return LARGE_PAYLOAD
|
||||
|
||||
@app.get("/sync/large-model-no-response-model")
|
||||
def sync_large_model_no_response_model():
|
||||
return LargeOut(items=LARGE_ITEMS, metadata=LARGE_METADATA)
|
||||
|
||||
@app.get("/sync/large-model-with-response-model", response_model=LargeOut)
|
||||
def sync_large_model_with_response_model():
|
||||
return LargeOut(items=LARGE_ITEMS, metadata=LARGE_METADATA)
|
||||
|
||||
@app.get("/async/large-dict-no-response-model")
|
||||
async def async_large_dict_no_response_model():
|
||||
return LARGE_PAYLOAD
|
||||
|
||||
@app.get("/async/large-dict-with-response-model", response_model=LargeOut)
|
||||
async def async_large_dict_with_response_model():
|
||||
return LARGE_PAYLOAD
|
||||
|
||||
@app.get("/async/large-model-no-response-model")
|
||||
async def async_large_model_no_response_model():
|
||||
return LargeOut(items=LARGE_ITEMS, metadata=LARGE_METADATA)
|
||||
|
||||
@app.get("/async/large-model-with-response-model", response_model=LargeOut)
|
||||
async def async_large_model_with_response_model():
|
||||
return LargeOut(items=LARGE_ITEMS, metadata=LARGE_METADATA)
|
||||
|
||||
@app.get("/async/dict-no-response-model")
|
||||
async def async_dict_no_response_model():
|
||||
return {"name": "foo", "value": 123}
|
||||
|
||||
@app.get("/async/dict-with-response-model", response_model=ItemOut)
|
||||
async def async_dict_with_response_model(
|
||||
dep: Annotated[int, Depends(dep_b)],
|
||||
):
|
||||
return {"name": "foo", "value": 123, "dep": dep}
|
||||
|
||||
@app.get("/async/model-no-response-model")
|
||||
async def async_model_no_response_model(
|
||||
dep: Annotated[int, Depends(dep_b)],
|
||||
):
|
||||
return ItemOut(name="foo", value=123, dep=dep)
|
||||
|
||||
@app.get("/async/model-with-response-model", response_model=ItemOut)
|
||||
async def async_model_with_response_model(
|
||||
dep: Annotated[int, Depends(dep_b)],
|
||||
):
|
||||
return ItemOut(name="foo", value=123, dep=dep)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client(app: FastAPI) -> Iterator[TestClient]:
|
||||
with TestClient(app) as client:
|
||||
yield client
|
||||
|
||||
|
||||
def _bench_get(benchmark, client: TestClient, path: str) -> tuple[int, bytes]:
|
||||
warmup = client.get(path)
|
||||
assert warmup.status_code == 200
|
||||
|
||||
def do_request() -> tuple[int, bytes]:
|
||||
response = client.get(path)
|
||||
return response.status_code, response.content
|
||||
|
||||
return benchmark(do_request)
|
||||
|
||||
|
||||
def _bench_post_json(
|
||||
benchmark, client: TestClient, path: str, json: dict[str, Any]
|
||||
) -> tuple[int, bytes]:
|
||||
warmup = client.post(path, json=json)
|
||||
assert warmup.status_code == 200
|
||||
|
||||
def do_request() -> tuple[int, bytes]:
|
||||
response = client.post(path, json=json)
|
||||
return response.status_code, response.content
|
||||
|
||||
return benchmark(do_request)
|
||||
|
||||
|
||||
def test_sync_receiving_validated_pydantic_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_post_json(
|
||||
benchmark,
|
||||
client,
|
||||
"/sync/validated",
|
||||
json={"name": "foo", "value": 123},
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_sync_return_dict_without_response_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/sync/dict-no-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123}'
|
||||
|
||||
|
||||
def test_sync_return_dict_with_response_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/sync/dict-with-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_sync_return_model_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/sync/model-no-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_sync_return_model_with_response_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/sync/model-with-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_async_receiving_validated_pydantic_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_post_json(
|
||||
benchmark, client, "/async/validated", json={"name": "foo", "value": 123}
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_async_return_dict_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/async/dict-no-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123}'
|
||||
|
||||
|
||||
def test_async_return_dict_with_response_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/async/dict-with-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_async_return_model_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(benchmark, client, "/async/model-no-response-model")
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_async_return_model_with_response_model(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/async/model-with-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == b'{"name":"foo","value":123,"dep":42}'
|
||||
|
||||
|
||||
def test_sync_receiving_large_payload(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_post_json(
|
||||
benchmark,
|
||||
client,
|
||||
"/sync/large-receive",
|
||||
json=LARGE_PAYLOAD,
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == b'{"received":300}'
|
||||
|
||||
|
||||
def test_async_receiving_large_payload(benchmark, client: TestClient) -> None:
|
||||
status_code, body = _bench_post_json(
|
||||
benchmark,
|
||||
client,
|
||||
"/async/large-receive",
|
||||
json=LARGE_PAYLOAD,
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == b'{"received":300}'
|
||||
|
||||
|
||||
def _expected_large_payload_json_bytes() -> bytes:
|
||||
return json.dumps(
|
||||
LARGE_PAYLOAD,
|
||||
ensure_ascii=False,
|
||||
allow_nan=False,
|
||||
separators=(",", ":"),
|
||||
).encode("utf-8")
|
||||
|
||||
|
||||
def test_sync_return_large_dict_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/sync/large-dict-no-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_sync_return_large_dict_with_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/sync/large-dict-with-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_sync_return_large_model_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/sync/large-model-no-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_sync_return_large_model_with_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/sync/large-model-with-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_async_return_large_dict_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/async/large-dict-no-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_async_return_large_dict_with_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/async/large-dict-with-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_async_return_large_model_without_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/async/large-model-no-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
|
||||
|
||||
def test_async_return_large_model_with_response_model(
|
||||
benchmark, client: TestClient
|
||||
) -> None:
|
||||
status_code, body = _bench_get(
|
||||
benchmark, client, "/async/large-model-with-response-model"
|
||||
)
|
||||
assert status_code == 200
|
||||
assert body == _expected_large_payload_json_bytes()
|
||||
@@ -2,18 +2,12 @@ from typing import Union
|
||||
|
||||
from dirty_equals import IsDict
|
||||
from fastapi import FastAPI
|
||||
from fastapi._compat import PYDANTIC_V2
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class FooBaseModel(BaseModel):
|
||||
if PYDANTIC_V2:
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
else:
|
||||
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
|
||||
class Foo(FooBaseModel):
|
||||
|
||||
@@ -4,7 +4,6 @@ import pytest
|
||||
from fastapi import Depends, FastAPI, Path
|
||||
from fastapi.param_functions import Query
|
||||
from fastapi.testclient import TestClient
|
||||
from fastapi.utils import PYDANTIC_V2
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@@ -71,6 +70,5 @@ def test_multiple_annotations():
|
||||
response = client.get("/multi-query", params={"foo": "123"})
|
||||
assert response.status_code == 422
|
||||
|
||||
if PYDANTIC_V2:
|
||||
response = client.get("/multi-query", params={"foo": "1"})
|
||||
assert response.status_code == 422
|
||||
response = client.get("/multi-query", params={"foo": "1"})
|
||||
assert response.status_code == 422
|
||||
|
||||
@@ -5,8 +5,6 @@ from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from inline_snapshot import snapshot
|
||||
|
||||
from .utils import needs_pydanticv2
|
||||
|
||||
|
||||
@pytest.fixture(name="client")
|
||||
def get_client():
|
||||
@@ -42,13 +40,11 @@ def get_client():
|
||||
return client
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_get(client: TestClient):
|
||||
response = client.get("/")
|
||||
assert response.json() == {"custom_field": [1.0, 2.0, 3.0]}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_typeadapter():
|
||||
# This test is only to confirm that Pydantic alone is working as expected
|
||||
from pydantic import (
|
||||
@@ -93,7 +89,6 @@ def test_typeadapter():
|
||||
)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_openapi_schema(client: TestClient):
|
||||
response = client.get("openapi.json")
|
||||
assert response.json() == snapshot(
|
||||
|
||||
@@ -14,10 +14,9 @@ from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from .utils import needs_py310, needs_py_lt_314, needs_pydanticv2
|
||||
from .utils import needs_py310, needs_py_lt_314
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_model_field_default_required():
|
||||
from fastapi._compat import v2
|
||||
|
||||
@@ -46,7 +45,6 @@ def test_is_model_field():
|
||||
assert not _is_model_field(str)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_get_model_config():
|
||||
# For coverage in Pydantic v2
|
||||
class Foo(BaseModel):
|
||||
@@ -75,7 +73,6 @@ def test_complex():
|
||||
assert response2.json() == [1, 2]
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_propagates_pydantic2_model_config():
|
||||
app = FastAPI()
|
||||
|
||||
@@ -136,7 +133,6 @@ def test_is_uploadfile_sequence_annotation():
|
||||
assert is_uploadfile_sequence_annotation(Union[list[str], list[UploadFile]])
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_serialize_sequence_value_with_optional_list():
|
||||
"""Test that serialize_sequence_value handles optional lists correctly."""
|
||||
from fastapi._compat import v2
|
||||
@@ -148,7 +144,6 @@ def test_serialize_sequence_value_with_optional_list():
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@needs_py310
|
||||
def test_serialize_sequence_value_with_optional_list_pipe_union():
|
||||
"""Test that serialize_sequence_value handles optional lists correctly (with new syntax)."""
|
||||
@@ -161,7 +156,6 @@ def test_serialize_sequence_value_with_optional_list_pipe_union():
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_serialize_sequence_value_with_none_first_in_union():
|
||||
"""Test that serialize_sequence_value handles Union[None, List[...]] correctly."""
|
||||
from fastapi._compat import v2
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -33,94 +34,90 @@ class Item(BaseModel):
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.get("/items/{item_id}")
|
||||
def get_item_with_path(
|
||||
item_id: Annotated[int, Path(title="The ID of the item", ge=1, le=1000)],
|
||||
):
|
||||
return {"item_id": item_id}
|
||||
@app.get("/items/{item_id}")
|
||||
def get_item_with_path(
|
||||
item_id: Annotated[int, Path(title="The ID of the item", ge=1, le=1000)],
|
||||
):
|
||||
return {"item_id": item_id}
|
||||
|
||||
@app.get("/items/")
|
||||
def get_items_with_query(
|
||||
q: Annotated[
|
||||
Optional[str],
|
||||
Query(min_length=3, max_length=50, pattern="^[a-zA-Z0-9 ]+$"),
|
||||
] = None,
|
||||
skip: Annotated[int, Query(ge=0)] = 0,
|
||||
limit: Annotated[int, Query(ge=1, le=100, examples=[5])] = 10,
|
||||
):
|
||||
return {"q": q, "skip": skip, "limit": limit}
|
||||
|
||||
@app.get("/items/")
|
||||
def get_items_with_query(
|
||||
q: Annotated[
|
||||
Optional[str], Query(min_length=3, max_length=50, pattern="^[a-zA-Z0-9 ]+$")
|
||||
] = None,
|
||||
skip: Annotated[int, Query(ge=0)] = 0,
|
||||
limit: Annotated[int, Query(ge=1, le=100, examples=[5])] = 10,
|
||||
):
|
||||
return {"q": q, "skip": skip, "limit": limit}
|
||||
@app.get("/users/")
|
||||
def get_user_with_header(
|
||||
x_custom: Annotated[Optional[str], Header()] = None,
|
||||
x_token: Annotated[Optional[str], Header(convert_underscores=True)] = None,
|
||||
):
|
||||
return {"x_custom": x_custom, "x_token": x_token}
|
||||
|
||||
@app.get("/cookies/")
|
||||
def get_cookies(
|
||||
session_id: Annotated[Optional[str], Cookie()] = None,
|
||||
tracking_id: Annotated[Optional[str], Cookie(min_length=10)] = None,
|
||||
):
|
||||
return {"session_id": session_id, "tracking_id": tracking_id}
|
||||
|
||||
@app.get("/users/")
|
||||
def get_user_with_header(
|
||||
x_custom: Annotated[Optional[str], Header()] = None,
|
||||
x_token: Annotated[Optional[str], Header(convert_underscores=True)] = None,
|
||||
):
|
||||
return {"x_custom": x_custom, "x_token": x_token}
|
||||
@app.post("/items/")
|
||||
def create_item(
|
||||
item: Annotated[
|
||||
Item,
|
||||
Body(
|
||||
examples=[{"name": "Foo", "price": 35.4, "description": "The Foo item"}]
|
||||
),
|
||||
],
|
||||
):
|
||||
return {"item": item}
|
||||
|
||||
@app.post("/items-embed/")
|
||||
def create_item_embed(
|
||||
item: Annotated[Item, Body(embed=True)],
|
||||
):
|
||||
return {"item": item}
|
||||
|
||||
@app.get("/cookies/")
|
||||
def get_cookies(
|
||||
session_id: Annotated[Optional[str], Cookie()] = None,
|
||||
tracking_id: Annotated[Optional[str], Cookie(min_length=10)] = None,
|
||||
):
|
||||
return {"session_id": session_id, "tracking_id": tracking_id}
|
||||
@app.put("/items/{item_id}")
|
||||
def update_item(
|
||||
item_id: Annotated[int, Path(ge=1)],
|
||||
item: Annotated[Item, Body()],
|
||||
importance: Annotated[int, Body(gt=0, le=10)],
|
||||
):
|
||||
return {"item": item, "importance": importance}
|
||||
|
||||
@app.post("/form-data/")
|
||||
def submit_form(
|
||||
username: Annotated[str, Form(min_length=3, max_length=50)],
|
||||
password: Annotated[str, Form(min_length=8)],
|
||||
email: Annotated[Optional[str], Form()] = None,
|
||||
):
|
||||
return {"username": username, "password": password, "email": email}
|
||||
|
||||
@app.post("/items/")
|
||||
def create_item(
|
||||
item: Annotated[
|
||||
Item,
|
||||
Body(examples=[{"name": "Foo", "price": 35.4, "description": "The Foo item"}]),
|
||||
],
|
||||
):
|
||||
return {"item": item}
|
||||
@app.post("/upload/")
|
||||
def upload_file(
|
||||
file: Annotated[bytes, File()],
|
||||
description: Annotated[Optional[str], Form()] = None,
|
||||
):
|
||||
return {"file_size": len(file), "description": description}
|
||||
|
||||
|
||||
@app.post("/items-embed/")
|
||||
def create_item_embed(
|
||||
item: Annotated[Item, Body(embed=True)],
|
||||
):
|
||||
return {"item": item}
|
||||
|
||||
|
||||
@app.put("/items/{item_id}")
|
||||
def update_item(
|
||||
item_id: Annotated[int, Path(ge=1)],
|
||||
item: Annotated[Item, Body()],
|
||||
importance: Annotated[int, Body(gt=0, le=10)],
|
||||
):
|
||||
return {"item": item, "importance": importance}
|
||||
|
||||
|
||||
@app.post("/form-data/")
|
||||
def submit_form(
|
||||
username: Annotated[str, Form(min_length=3, max_length=50)],
|
||||
password: Annotated[str, Form(min_length=8)],
|
||||
email: Annotated[Optional[str], Form()] = None,
|
||||
):
|
||||
return {"username": username, "password": password, "email": email}
|
||||
|
||||
|
||||
@app.post("/upload/")
|
||||
def upload_file(
|
||||
file: Annotated[bytes, File()],
|
||||
description: Annotated[Optional[str], Form()] = None,
|
||||
):
|
||||
return {"file_size": len(file), "description": description}
|
||||
|
||||
|
||||
@app.post("/upload-multiple/")
|
||||
def upload_multiple_files(
|
||||
files: Annotated[list[bytes], File()],
|
||||
note: Annotated[str, Form()] = "",
|
||||
):
|
||||
return {
|
||||
"file_count": len(files),
|
||||
"total_size": sum(len(f) for f in files),
|
||||
"note": note,
|
||||
}
|
||||
@app.post("/upload-multiple/")
|
||||
def upload_multiple_files(
|
||||
files: Annotated[list[bytes], File()],
|
||||
note: Annotated[str, Form()] = "",
|
||||
):
|
||||
return {
|
||||
"file_count": len(files),
|
||||
"total_size": sum(len(f) for f in files),
|
||||
"note": note,
|
||||
}
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
@@ -211,10 +208,10 @@ def test_header_params_none():
|
||||
|
||||
# Cookie parameter tests
|
||||
def test_cookie_params():
|
||||
with TestClient(app) as client:
|
||||
client.cookies.set("session_id", "abc123")
|
||||
client.cookies.set("tracking_id", "1234567890abcdef")
|
||||
response = client.get("/cookies/")
|
||||
with TestClient(app) as test_client:
|
||||
test_client.cookies.set("session_id", "abc123")
|
||||
test_client.cookies.set("tracking_id", "1234567890abcdef")
|
||||
response = test_client.get("/cookies/")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {
|
||||
"session_id": "abc123",
|
||||
@@ -223,9 +220,9 @@ def test_cookie_params():
|
||||
|
||||
|
||||
def test_cookie_tracking_id_too_short():
|
||||
with TestClient(app) as client:
|
||||
client.cookies.set("tracking_id", "short")
|
||||
response = client.get("/cookies/")
|
||||
with TestClient(app) as test_client:
|
||||
test_client.cookies.set("tracking_id", "short")
|
||||
response = test_client.get("/cookies/")
|
||||
assert response.status_code == 422
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
@@ -588,23 +585,14 @@ def test_openapi_schema():
|
||||
"required": True,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"title": "Body",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_update_item_items__item_id__put"
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"title": "Body",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_update_item_items__item_id__put"
|
||||
}
|
||||
],
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -809,23 +797,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_create_item_embed_items_embed__post"
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_create_item_embed_items_embed__post"
|
||||
}
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -855,23 +834,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/x-www-form-urlencoded": {
|
||||
"schema": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_submit_form_form_data__post"
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_submit_form_form_data__post"
|
||||
}
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -901,23 +871,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_upload_file_upload__post"
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_upload_file_upload__post"
|
||||
}
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
"title": "Body",
|
||||
},
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -947,23 +908,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"multipart/form-data": {
|
||||
"schema": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_upload_multiple_files_upload_multiple__post"
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Body_upload_multiple_files_upload_multiple__post"
|
||||
}
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
"title": "Body",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -990,21 +942,12 @@ def test_openapi_schema():
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Body_create_item_embed_items_embed__post": {
|
||||
"properties": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
{"item": {"$ref": "#/components/schemas/Item"}}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"item": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
],
|
||||
"title": "Item",
|
||||
}
|
||||
}
|
||||
),
|
||||
),
|
||||
"properties": {
|
||||
"item": {
|
||||
"allOf": [{"$ref": "#/components/schemas/Item"}],
|
||||
"title": "Item",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["item"],
|
||||
"title": "Body_create_item_embed_items_embed__post",
|
||||
@@ -1030,17 +973,10 @@ def test_openapi_schema():
|
||||
},
|
||||
"Body_update_item_items__item_id__put": {
|
||||
"properties": {
|
||||
"item": pydantic_snapshot(
|
||||
v1=snapshot({"$ref": "#/components/schemas/Item"}),
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
],
|
||||
"title": "Item",
|
||||
}
|
||||
),
|
||||
),
|
||||
"item": {
|
||||
"allOf": [{"$ref": "#/components/schemas/Item"}],
|
||||
"title": "Item",
|
||||
},
|
||||
"importance": {
|
||||
"type": "integer",
|
||||
"maximum": 10.0,
|
||||
|
||||
@@ -2,8 +2,6 @@ import pytest
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from .utils import needs_pydanticv2
|
||||
|
||||
|
||||
@pytest.fixture(name="client")
|
||||
def get_client(request):
|
||||
@@ -35,7 +33,6 @@ def get_client(request):
|
||||
|
||||
@pytest.mark.parametrize("client", [True, False], indirect=True)
|
||||
@pytest.mark.parametrize("path", ["/", "/responses"])
|
||||
@needs_pydanticv2
|
||||
def test_get(client: TestClient, path: str):
|
||||
response = client.get(path)
|
||||
assert response.status_code == 200, response.text
|
||||
@@ -43,7 +40,6 @@ def test_get(client: TestClient, path: str):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("client", [True, False], indirect=True)
|
||||
@needs_pydanticv2
|
||||
def test_openapi_schema(client: TestClient):
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200, response.text
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
from typing import Annotated, Optional
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi._compat import PYDANTIC_V2
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import WithJsonSchema
|
||||
from pydantic import BaseModel, WithJsonSchema
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@@ -14,23 +10,15 @@ app = FastAPI()
|
||||
class Item(BaseModel):
|
||||
name: str
|
||||
|
||||
if PYDANTIC_V2:
|
||||
description: Annotated[
|
||||
Optional[str], WithJsonSchema({"type": ["string", "null"]})
|
||||
] = None
|
||||
description: Annotated[
|
||||
Optional[str], WithJsonSchema({"type": ["string", "null"]})
|
||||
] = None
|
||||
|
||||
model_config = {
|
||||
"json_schema_extra": {
|
||||
"x-something-internal": {"level": 4},
|
||||
}
|
||||
model_config = {
|
||||
"json_schema_extra": {
|
||||
"x-something-internal": {"level": 4},
|
||||
}
|
||||
else:
|
||||
description: Optional[str] = None # type: ignore[no-redef]
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"x-something-internal": {"level": 4},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.get("/foo", response_model=Item)
|
||||
@@ -55,7 +43,7 @@ item_schema = {
|
||||
},
|
||||
"description": {
|
||||
"title": "Description",
|
||||
"type": ["string", "null"] if PYDANTIC_V2 else "string",
|
||||
"type": ["string", "null"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -7,12 +7,6 @@ from fastapi.datastructures import Default
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
def test_upload_file_invalid():
|
||||
with pytest.raises(ValueError):
|
||||
UploadFile.validate("not a Starlette UploadFile")
|
||||
|
||||
|
||||
def test_upload_file_invalid_pydantic_v2():
|
||||
with pytest.raises(ValueError):
|
||||
UploadFile._validate("not a Starlette UploadFile", {})
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import warnings
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .utils import needs_pydanticv1, needs_pydanticv2
|
||||
from .utils import needs_pydanticv1
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_pydanticv2():
|
||||
from pydantic import field_serializer
|
||||
|
||||
@@ -34,7 +34,9 @@ def test_pydanticv2():
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
@needs_pydanticv1
|
||||
def test_pydanticv1():
|
||||
class ModelWithDatetimeField(BaseModel):
|
||||
from pydantic import v1
|
||||
|
||||
class ModelWithDatetimeField(v1.BaseModel):
|
||||
dt_field: datetime
|
||||
|
||||
class Config:
|
||||
@@ -47,9 +49,12 @@ def test_pydanticv1():
|
||||
app = FastAPI()
|
||||
model = ModelWithDatetimeField(dt_field=datetime(2019, 1, 1, 8))
|
||||
|
||||
@app.get("/model", response_model=ModelWithDatetimeField)
|
||||
def get_model():
|
||||
return model
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.get("/model", response_model=ModelWithDatetimeField)
|
||||
def get_model():
|
||||
return model
|
||||
|
||||
client = TestClient(app)
|
||||
with client:
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import warnings
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Depends, FastAPI
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic.v1 import BaseModel, validator
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@@ -18,6 +19,7 @@ class ModelA(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
model_b: ModelB
|
||||
tags: dict[str, str] = {}
|
||||
|
||||
@validator("name")
|
||||
def lower_username(cls, name: str, values):
|
||||
@@ -30,6 +32,14 @@ async def get_model_c() -> ModelC:
|
||||
return ModelC(username="test-user", password="test-password")
|
||||
|
||||
|
||||
@app.get("/model/{name}", response_model=ModelA)
|
||||
async def get_model_a(name: str, model_c=Depends(get_model_c)):
|
||||
return {"name": name, "description": "model-a-desc", "model_b": model_c}
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.get("/model/{name}", response_model=ModelA)
|
||||
async def get_model_a(name: str, model_c=Depends(get_model_c)):
|
||||
return {
|
||||
"name": name,
|
||||
"description": "model-a-desc",
|
||||
"model_b": model_c,
|
||||
"tags": {"key1": "value1", "key2": "value2"},
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
from fastapi.exceptions import ResponseValidationError
|
||||
from fastapi.testclient import TestClient
|
||||
from inline_snapshot import snapshot
|
||||
|
||||
from ..utils import needs_pydanticv1
|
||||
|
||||
@@ -21,6 +22,7 @@ def test_filter_sub_model(client: TestClient):
|
||||
"name": "modelA",
|
||||
"description": "model-a-desc",
|
||||
"model_b": {"username": "test-user"},
|
||||
"tags": {"key1": "value1", "key2": "value2"},
|
||||
}
|
||||
|
||||
|
||||
@@ -41,90 +43,104 @@ def test_validator_is_cloned(client: TestClient):
|
||||
def test_openapi_schema(client: TestClient):
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {
|
||||
"openapi": "3.1.0",
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"paths": {
|
||||
"/model/{name}": {
|
||||
"get": {
|
||||
"summary": "Get Model A",
|
||||
"operationId": "get_model_a_model__name__get",
|
||||
"parameters": [
|
||||
{
|
||||
"required": True,
|
||||
"schema": {"title": "Name", "type": "string"},
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/ModelA"}
|
||||
}
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"paths": {
|
||||
"/model/{name}": {
|
||||
"get": {
|
||||
"summary": "Get Model A",
|
||||
"operationId": "get_model_a_model__name__get",
|
||||
"parameters": [
|
||||
{
|
||||
"required": True,
|
||||
"schema": {"title": "Name", "type": "string"},
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ModelA"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"HTTPValidationError": {
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"detail": {
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"ModelA": {
|
||||
"title": "ModelA",
|
||||
"required": ["name", "model_b"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"title": "Name", "type": "string"},
|
||||
"description": {"title": "Description", "type": "string"},
|
||||
"model_b": {"$ref": "#/components/schemas/ModelB"},
|
||||
"tags": {
|
||||
"additionalProperties": {"type": "string"},
|
||||
"type": "object",
|
||||
"title": "Tags",
|
||||
"default": {},
|
||||
},
|
||||
},
|
||||
},
|
||||
"ModelB": {
|
||||
"title": "ModelB",
|
||||
"required": ["username"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {"title": "Username", "type": "string"}
|
||||
},
|
||||
},
|
||||
"ValidationError": {
|
||||
"title": "ValidationError",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"loc": {
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [{"type": "string"}, {"type": "integer"}]
|
||||
},
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"HTTPValidationError": {
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"detail": {
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/components/schemas/ValidationError"},
|
||||
}
|
||||
},
|
||||
},
|
||||
"ModelA": {
|
||||
"title": "ModelA",
|
||||
"required": ["name", "model_b"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"title": "Name", "type": "string"},
|
||||
"description": {"title": "Description", "type": "string"},
|
||||
"model_b": {"$ref": "#/components/schemas/ModelB"},
|
||||
},
|
||||
},
|
||||
"ModelB": {
|
||||
"title": "ModelB",
|
||||
"required": ["username"],
|
||||
"type": "object",
|
||||
"properties": {"username": {"title": "Username", "type": "string"}},
|
||||
},
|
||||
"ValidationError": {
|
||||
"title": "ValidationError",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"loc": {
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [{"type": "string"}, {"type": "integer"}]
|
||||
},
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -5,8 +5,7 @@ from dirty_equals import HasRepr, IsDict, IsOneOf
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi.exceptions import ResponseValidationError
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from .utils import needs_pydanticv2
|
||||
from inline_snapshot import snapshot
|
||||
|
||||
|
||||
@pytest.fixture(name="client")
|
||||
@@ -25,6 +24,7 @@ def get_client():
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
foo: ModelB
|
||||
tags: dict[str, str] = {}
|
||||
|
||||
@field_validator("name")
|
||||
def lower_username(cls, name: str, info: ValidationInfo):
|
||||
@@ -37,13 +37,17 @@ def get_client():
|
||||
|
||||
@app.get("/model/{name}", response_model=ModelA)
|
||||
async def get_model_a(name: str, model_c=Depends(get_model_c)):
|
||||
return {"name": name, "description": "model-a-desc", "foo": model_c}
|
||||
return {
|
||||
"name": name,
|
||||
"description": "model-a-desc",
|
||||
"foo": model_c,
|
||||
"tags": {"key1": "value1", "key2": "value2"},
|
||||
}
|
||||
|
||||
client = TestClient(app)
|
||||
return client
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_filter_sub_model(client: TestClient):
|
||||
response = client.get("/model/modelA")
|
||||
assert response.status_code == 200, response.text
|
||||
@@ -51,10 +55,10 @@ def test_filter_sub_model(client: TestClient):
|
||||
"name": "modelA",
|
||||
"description": "model-a-desc",
|
||||
"foo": {"username": "test-user"},
|
||||
"tags": {"key1": "value1", "key2": "value2"},
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_validator_is_cloned(client: TestClient):
|
||||
with pytest.raises(ResponseValidationError) as err:
|
||||
client.get("/model/modelX")
|
||||
@@ -79,106 +83,119 @@ def test_validator_is_cloned(client: TestClient):
|
||||
]
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_openapi_schema(client: TestClient):
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {
|
||||
"openapi": "3.1.0",
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"paths": {
|
||||
"/model/{name}": {
|
||||
"get": {
|
||||
"summary": "Get Model A",
|
||||
"operationId": "get_model_a_model__name__get",
|
||||
"parameters": [
|
||||
{
|
||||
"required": True,
|
||||
"schema": {"title": "Name", "type": "string"},
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/ModelA"}
|
||||
}
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"paths": {
|
||||
"/model/{name}": {
|
||||
"get": {
|
||||
"summary": "Get Model A",
|
||||
"operationId": "get_model_a_model__name__get",
|
||||
"parameters": [
|
||||
{
|
||||
"required": True,
|
||||
"schema": {"title": "Name", "type": "string"},
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ModelA"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"HTTPValidationError": {
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"detail": {
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"ModelA": {
|
||||
"title": "ModelA",
|
||||
"required": IsOneOf(
|
||||
["name", "description", "foo"],
|
||||
# TODO remove when deprecating Pydantic v1
|
||||
["name", "foo"],
|
||||
),
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"title": "Name", "type": "string"},
|
||||
"description": IsDict(
|
||||
{
|
||||
"title": "Description",
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
}
|
||||
)
|
||||
|
|
||||
# TODO remove when deprecating Pydantic v1
|
||||
IsDict({"title": "Description", "type": "string"}),
|
||||
"foo": {"$ref": "#/components/schemas/ModelB"},
|
||||
"tags": {
|
||||
"additionalProperties": {"type": "string"},
|
||||
"type": "object",
|
||||
"title": "Tags",
|
||||
"default": {},
|
||||
},
|
||||
},
|
||||
},
|
||||
"ModelB": {
|
||||
"title": "ModelB",
|
||||
"required": ["username"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {"title": "Username", "type": "string"}
|
||||
},
|
||||
},
|
||||
"ValidationError": {
|
||||
"title": "ValidationError",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"loc": {
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [{"type": "string"}, {"type": "integer"}]
|
||||
},
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"HTTPValidationError": {
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"detail": {
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/components/schemas/ValidationError"},
|
||||
}
|
||||
},
|
||||
},
|
||||
"ModelA": {
|
||||
"title": "ModelA",
|
||||
"required": IsOneOf(
|
||||
["name", "description", "foo"],
|
||||
# TODO remove when deprecating Pydantic v1
|
||||
["name", "foo"],
|
||||
),
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"title": "Name", "type": "string"},
|
||||
"description": IsDict(
|
||||
{
|
||||
"title": "Description",
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
}
|
||||
)
|
||||
|
|
||||
# TODO remove when deprecating Pydantic v1
|
||||
IsDict({"title": "Description", "type": "string"}),
|
||||
"foo": {"$ref": "#/components/schemas/ModelB"},
|
||||
},
|
||||
},
|
||||
"ModelB": {
|
||||
"title": "ModelB",
|
||||
"required": ["username"],
|
||||
"type": "object",
|
||||
"properties": {"username": {"title": "Username", "type": "string"}},
|
||||
},
|
||||
"ValidationError": {
|
||||
"title": "ValidationError",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"loc": {
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [{"type": "string"}, {"type": "integer"}]
|
||||
},
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@ from typing import Annotated, Optional
|
||||
|
||||
from dirty_equals import IsDict
|
||||
from fastapi import FastAPI, Form
|
||||
from fastapi._compat import PYDANTIC_V2
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -20,12 +19,7 @@ class FormModel(BaseModel):
|
||||
class FormModelExtraAllow(BaseModel):
|
||||
param: str
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"extra": "allow"}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
|
||||
@app.post("/form/")
|
||||
|
||||
@@ -1,181 +1,187 @@
|
||||
from collections.abc import Iterator
|
||||
from typing import Any
|
||||
import warnings
|
||||
|
||||
import fastapi._compat
|
||||
import fastapi.openapi.utils
|
||||
import pydantic.schema
|
||||
import pytest
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel
|
||||
from starlette.testclient import TestClient
|
||||
from fastapi.testclient import TestClient
|
||||
from inline_snapshot import snapshot
|
||||
|
||||
from .utils import needs_pydanticv1
|
||||
|
||||
|
||||
class Address(BaseModel):
|
||||
"""
|
||||
This is a public description of an Address
|
||||
\f
|
||||
You can't see this part of the docstring, it's private!
|
||||
"""
|
||||
@pytest.fixture(
|
||||
name="client",
|
||||
params=[
|
||||
pytest.param("pydantic-v1", marks=needs_pydanticv1),
|
||||
"pydantic-v2",
|
||||
],
|
||||
)
|
||||
def client_fixture(request: pytest.FixtureRequest) -> TestClient:
|
||||
if request.param == "pydantic-v1":
|
||||
from pydantic.v1 import BaseModel
|
||||
else:
|
||||
from pydantic import BaseModel
|
||||
|
||||
line_1: str
|
||||
city: str
|
||||
state_province: str
|
||||
class Address(BaseModel):
|
||||
"""
|
||||
This is a public description of an Address
|
||||
\f
|
||||
You can't see this part of the docstring, it's private!
|
||||
"""
|
||||
|
||||
line_1: str
|
||||
city: str
|
||||
state_province: str
|
||||
|
||||
class Facility(BaseModel):
|
||||
id: str
|
||||
address: Address
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
if request.param == "pydantic-v1":
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.get("/facilities/{facility_id}")
|
||||
def get_facility(facility_id: str) -> Facility:
|
||||
return Facility(
|
||||
id=facility_id,
|
||||
address=Address(
|
||||
line_1="123 Main St", city="Anytown", state_province="CA"
|
||||
),
|
||||
)
|
||||
else:
|
||||
|
||||
@app.get("/facilities/{facility_id}")
|
||||
def get_facility(facility_id: str) -> Facility:
|
||||
return Facility(
|
||||
id=facility_id,
|
||||
address=Address(
|
||||
line_1="123 Main St", city="Anytown", state_province="CA"
|
||||
),
|
||||
)
|
||||
|
||||
client = TestClient(app)
|
||||
return client
|
||||
|
||||
|
||||
class Facility(BaseModel):
|
||||
id: str
|
||||
address: Address
|
||||
def test_get(client: TestClient):
|
||||
response = client.get("/facilities/42")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {
|
||||
"id": "42",
|
||||
"address": {
|
||||
"line_1": "123 Main St",
|
||||
"city": "Anytown",
|
||||
"state_province": "CA",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
@app.get("/facilities/{facility_id}")
|
||||
def get_facility(facility_id: str) -> Facility: ...
|
||||
|
||||
|
||||
openapi_schema = {
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Address": {
|
||||
# NOTE: the description of this model shows only the public-facing text, before the `\f` in docstring
|
||||
"description": "This is a public description of an Address\n",
|
||||
"properties": {
|
||||
"city": {"title": "City", "type": "string"},
|
||||
"line_1": {"title": "Line 1", "type": "string"},
|
||||
"state_province": {"title": "State Province", "type": "string"},
|
||||
},
|
||||
"required": ["line_1", "city", "state_province"],
|
||||
"title": "Address",
|
||||
"type": "object",
|
||||
},
|
||||
"Facility": {
|
||||
"properties": {
|
||||
"address": {"$ref": "#/components/schemas/Address"},
|
||||
"id": {"title": "Id", "type": "string"},
|
||||
},
|
||||
"required": ["id", "address"],
|
||||
"title": "Facility",
|
||||
"type": "object",
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {"$ref": "#/components/schemas/ValidationError"},
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
}
|
||||
},
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "integer"}]},
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
"required": ["loc", "msg", "type"],
|
||||
"title": "ValidationError",
|
||||
"type": "object",
|
||||
},
|
||||
}
|
||||
},
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"openapi": "3.1.0",
|
||||
"paths": {
|
||||
"/facilities/{facility_id}": {
|
||||
"get": {
|
||||
"operationId": "get_facility_facilities__facility_id__get",
|
||||
"parameters": [
|
||||
{
|
||||
"in": "path",
|
||||
"name": "facility_id",
|
||||
"required": True,
|
||||
"schema": {"title": "Facility Id", "type": "string"},
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/Facility"}
|
||||
}
|
||||
},
|
||||
"description": "Successful Response",
|
||||
},
|
||||
"422": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Validation Error",
|
||||
},
|
||||
},
|
||||
"summary": "Get Facility",
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_openapi_schema():
|
||||
def test_openapi_schema(client: TestClient):
|
||||
"""
|
||||
Sanity check to ensure our app's openapi schema renders as we expect
|
||||
"""
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == openapi_schema
|
||||
|
||||
|
||||
class SortedTypeSet(set):
|
||||
"""
|
||||
Set of Types whose `__iter__()` method yields results sorted by the type names
|
||||
"""
|
||||
|
||||
def __init__(self, seq: set[type[Any]], *, sort_reversed: bool):
|
||||
super().__init__(seq)
|
||||
self.sort_reversed = sort_reversed
|
||||
|
||||
def __iter__(self) -> Iterator[type[Any]]:
|
||||
members_sorted = sorted(
|
||||
super().__iter__(),
|
||||
key=lambda type_: type_.__name__,
|
||||
reverse=self.sort_reversed,
|
||||
)
|
||||
yield from members_sorted
|
||||
|
||||
|
||||
@needs_pydanticv1
|
||||
@pytest.mark.parametrize("sort_reversed", [True, False])
|
||||
def test_model_description_escaped_with_formfeed(sort_reversed: bool):
|
||||
"""
|
||||
Regression test for bug fixed by https://github.com/fastapi/fastapi/pull/6039.
|
||||
|
||||
Test `get_model_definitions` with models passed in different order.
|
||||
"""
|
||||
from fastapi._compat import v1
|
||||
|
||||
all_fields = fastapi.openapi.utils.get_fields_from_routes(app.routes)
|
||||
|
||||
flat_models = v1.get_flat_models_from_fields(all_fields, known_models=set())
|
||||
model_name_map = pydantic.schema.get_model_name_map(flat_models)
|
||||
|
||||
expected_address_description = "This is a public description of an Address\n"
|
||||
|
||||
models = v1.get_model_definitions(
|
||||
flat_models=SortedTypeSet(flat_models, sort_reversed=sort_reversed),
|
||||
model_name_map=model_name_map,
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Address": {
|
||||
# NOTE: the description of this model shows only the public-facing text, before the `\f` in docstring
|
||||
"description": "This is a public description of an Address\n",
|
||||
"properties": {
|
||||
"city": {"title": "City", "type": "string"},
|
||||
"line_1": {"title": "Line 1", "type": "string"},
|
||||
"state_province": {
|
||||
"title": "State Province",
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"required": ["line_1", "city", "state_province"],
|
||||
"title": "Address",
|
||||
"type": "object",
|
||||
},
|
||||
"Facility": {
|
||||
"properties": {
|
||||
"address": {"$ref": "#/components/schemas/Address"},
|
||||
"id": {"title": "Id", "type": "string"},
|
||||
},
|
||||
"required": ["id", "address"],
|
||||
"title": "Facility",
|
||||
"type": "object",
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
"title": "Detail",
|
||||
"type": "array",
|
||||
}
|
||||
},
|
||||
"title": "HTTPValidationError",
|
||||
"type": "object",
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {
|
||||
"anyOf": [{"type": "string"}, {"type": "integer"}]
|
||||
},
|
||||
"title": "Location",
|
||||
"type": "array",
|
||||
},
|
||||
"msg": {"title": "Message", "type": "string"},
|
||||
"type": {"title": "Error Type", "type": "string"},
|
||||
},
|
||||
"required": ["loc", "msg", "type"],
|
||||
"title": "ValidationError",
|
||||
"type": "object",
|
||||
},
|
||||
}
|
||||
},
|
||||
"info": {"title": "FastAPI", "version": "0.1.0"},
|
||||
"openapi": "3.1.0",
|
||||
"paths": {
|
||||
"/facilities/{facility_id}": {
|
||||
"get": {
|
||||
"operationId": "get_facility_facilities__facility_id__get",
|
||||
"parameters": [
|
||||
{
|
||||
"in": "path",
|
||||
"name": "facility_id",
|
||||
"required": True,
|
||||
"schema": {"title": "Facility Id", "type": "string"},
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Facility"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Successful Response",
|
||||
},
|
||||
"422": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "Validation Error",
|
||||
},
|
||||
},
|
||||
"summary": "Get Facility",
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
assert models["Address"]["description"] == expected_address_description
|
||||
|
||||
@@ -5,7 +5,7 @@ from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .utils import needs_pydanticv1, needs_pydanticv2
|
||||
from .utils import needs_pydanticv1
|
||||
|
||||
|
||||
class MyUuid:
|
||||
@@ -26,7 +26,6 @@ class MyUuid:
|
||||
raise TypeError("vars() argument must have __dict__ attribute")
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_pydanticv2():
|
||||
from pydantic import field_serializer
|
||||
|
||||
@@ -73,6 +72,8 @@ def test_pydanticv2():
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
@needs_pydanticv1
|
||||
def test_pydanticv1():
|
||||
from pydantic import v1
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@app.get("/fast_uuid")
|
||||
@@ -84,7 +85,7 @@ def test_pydanticv1():
|
||||
vars(asyncpg_uuid)
|
||||
return {"fast_uuid": asyncpg_uuid}
|
||||
|
||||
class SomeCustomClass(BaseModel):
|
||||
class SomeCustomClass(v1.BaseModel):
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
json_encoders = {uuid.UUID: str}
|
||||
|
||||
@@ -8,11 +8,11 @@ from pathlib import PurePath, PurePosixPath, PureWindowsPath
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
from fastapi._compat import PYDANTIC_V2, Undefined
|
||||
from fastapi._compat import Undefined
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel, Field, ValidationError
|
||||
|
||||
from .utils import needs_pydanticv1, needs_pydanticv2
|
||||
from .utils import needs_pydanticv1
|
||||
|
||||
|
||||
class Person:
|
||||
@@ -59,12 +59,7 @@ class RoleEnum(Enum):
|
||||
class ModelWithConfig(BaseModel):
|
||||
role: Optional[RoleEnum] = None
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"use_enum_values": True}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
use_enum_values = True
|
||||
model_config = {"use_enum_values": True}
|
||||
|
||||
|
||||
class ModelWithAlias(BaseModel):
|
||||
@@ -89,6 +84,18 @@ def test_encode_dict():
|
||||
}
|
||||
|
||||
|
||||
def test_encode_dict_include_exclude_list():
|
||||
pet = {"name": "Firulais", "owner": {"name": "Foo"}}
|
||||
assert jsonable_encoder(pet) == {"name": "Firulais", "owner": {"name": "Foo"}}
|
||||
assert jsonable_encoder(pet, include=["name"]) == {"name": "Firulais"}
|
||||
assert jsonable_encoder(pet, exclude=["owner"]) == {"name": "Firulais"}
|
||||
assert jsonable_encoder(pet, include=[]) == {}
|
||||
assert jsonable_encoder(pet, exclude=[]) == {
|
||||
"name": "Firulais",
|
||||
"owner": {"name": "Foo"},
|
||||
}
|
||||
|
||||
|
||||
def test_encode_class():
|
||||
person = Person(name="Foo")
|
||||
pet = Pet(owner=person, name="Firulais")
|
||||
@@ -130,7 +137,6 @@ def test_encode_unsupported():
|
||||
jsonable_encoder(unserializable)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_encode_custom_json_encoders_model_pydanticv2():
|
||||
from pydantic import field_serializer
|
||||
|
||||
@@ -153,7 +159,9 @@ def test_encode_custom_json_encoders_model_pydanticv2():
|
||||
# TODO: remove when deprecating Pydantic v1
|
||||
@needs_pydanticv1
|
||||
def test_encode_custom_json_encoders_model_pydanticv1():
|
||||
class ModelWithCustomEncoder(BaseModel):
|
||||
from pydantic import v1
|
||||
|
||||
class ModelWithCustomEncoder(v1.BaseModel):
|
||||
dt_field: datetime
|
||||
|
||||
class Config:
|
||||
@@ -208,10 +216,12 @@ def test_encode_model_with_default():
|
||||
|
||||
@needs_pydanticv1
|
||||
def test_custom_encoders():
|
||||
from pydantic import v1
|
||||
|
||||
class safe_datetime(datetime):
|
||||
pass
|
||||
|
||||
class MyModel(BaseModel):
|
||||
class MyModel(v1.BaseModel):
|
||||
dt_field: safe_datetime
|
||||
|
||||
instance = MyModel(dt_field=safe_datetime.now())
|
||||
@@ -244,12 +254,7 @@ def test_encode_model_with_pure_path():
|
||||
class ModelWithPath(BaseModel):
|
||||
path: PurePath
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
|
||||
test_path = PurePath("/foo", "bar")
|
||||
obj = ModelWithPath(path=test_path)
|
||||
@@ -260,12 +265,7 @@ def test_encode_model_with_pure_posix_path():
|
||||
class ModelWithPath(BaseModel):
|
||||
path: PurePosixPath
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
|
||||
obj = ModelWithPath(path=PurePosixPath("/foo", "bar"))
|
||||
assert jsonable_encoder(obj) == {"path": "/foo/bar"}
|
||||
@@ -275,45 +275,44 @@ def test_encode_model_with_pure_windows_path():
|
||||
class ModelWithPath(BaseModel):
|
||||
path: PureWindowsPath
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
|
||||
obj = ModelWithPath(path=PureWindowsPath("/foo", "bar"))
|
||||
assert jsonable_encoder(obj) == {"path": "\\foo\\bar"}
|
||||
|
||||
|
||||
def test_encode_pure_path():
|
||||
test_path = PurePath("/foo", "bar")
|
||||
|
||||
assert jsonable_encoder({"path": test_path}) == {"path": str(test_path)}
|
||||
|
||||
|
||||
@needs_pydanticv1
|
||||
def test_encode_root():
|
||||
class ModelWithRoot(BaseModel):
|
||||
from pydantic import v1
|
||||
|
||||
class ModelWithRoot(v1.BaseModel):
|
||||
__root__: str
|
||||
|
||||
model = ModelWithRoot(__root__="Foo")
|
||||
assert jsonable_encoder(model) == "Foo"
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_decimal_encoder_float():
|
||||
data = {"value": Decimal(1.23)}
|
||||
assert jsonable_encoder(data) == {"value": 1.23}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_decimal_encoder_int():
|
||||
data = {"value": Decimal(2)}
|
||||
assert jsonable_encoder(data) == {"value": 2}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_decimal_encoder_nan():
|
||||
data = {"value": Decimal("NaN")}
|
||||
assert isnan(jsonable_encoder(data)["value"])
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_decimal_encoder_infinity():
|
||||
data = {"value": Decimal("Infinity")}
|
||||
assert isinf(jsonable_encoder(data)["value"])
|
||||
@@ -330,7 +329,6 @@ def test_encode_deque_encodes_child_models():
|
||||
assert jsonable_encoder(dq)[0]["test"] == "test"
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_encode_pydantic_undefined():
|
||||
data = {"value": Undefined}
|
||||
assert jsonable_encoder(data) == {"value": None}
|
||||
|
||||
@@ -9,8 +9,6 @@ from fastapi.testclient import TestClient
|
||||
from inline_snapshot import snapshot
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from tests.utils import pydantic_snapshot
|
||||
|
||||
|
||||
class MessageEventType(str, Enum):
|
||||
alpha = "alpha"
|
||||
@@ -126,47 +124,21 @@ def test_openapi_schema():
|
||||
},
|
||||
"MessageEvent": {
|
||||
"properties": {
|
||||
"event_type": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/MessageEventType",
|
||||
"default": "alpha",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/MessageEventType"
|
||||
}
|
||||
],
|
||||
"default": "alpha",
|
||||
}
|
||||
),
|
||||
),
|
||||
"event_type": {
|
||||
"$ref": "#/components/schemas/MessageEventType",
|
||||
"default": "alpha",
|
||||
},
|
||||
"output": {"type": "string", "title": "Output"},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["output"],
|
||||
"title": "MessageEvent",
|
||||
},
|
||||
"MessageEventType": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"type": "string",
|
||||
"enum": ["alpha", "beta"],
|
||||
"title": "MessageEventType",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"type": "string",
|
||||
"enum": ["alpha", "beta"],
|
||||
"title": "MessageEventType",
|
||||
"description": "An enumeration.",
|
||||
}
|
||||
),
|
||||
),
|
||||
"MessageEventType": {
|
||||
"type": "string",
|
||||
"enum": ["alpha", "beta"],
|
||||
"title": "MessageEventType",
|
||||
},
|
||||
"MessageOutput": {
|
||||
"properties": {
|
||||
"body": {"type": "string", "title": "Body", "default": ""},
|
||||
|
||||
@@ -3,37 +3,30 @@ from typing import Optional
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from inline_snapshot import snapshot
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .utils import PYDANTIC_V2, needs_pydanticv2
|
||||
from pydantic import BaseModel, computed_field
|
||||
|
||||
|
||||
class SubItem(BaseModel):
|
||||
subname: str
|
||||
sub_description: Optional[str] = None
|
||||
tags: list[str] = []
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"json_schema_serialization_defaults_required": True}
|
||||
model_config = {"json_schema_serialization_defaults_required": True}
|
||||
|
||||
|
||||
class Item(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
sub: Optional[SubItem] = None
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"json_schema_serialization_defaults_required": True}
|
||||
model_config = {"json_schema_serialization_defaults_required": True}
|
||||
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import computed_field
|
||||
class WithComputedField(BaseModel):
|
||||
name: str
|
||||
|
||||
class WithComputedField(BaseModel):
|
||||
name: str
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def computed_field(self) -> str:
|
||||
return f"computed {self.name}"
|
||||
@computed_field
|
||||
@property
|
||||
def computed_field(self) -> str:
|
||||
return f"computed {self.name}"
|
||||
|
||||
|
||||
def get_app_client(separate_input_output_schemas: bool = True) -> TestClient:
|
||||
@@ -58,13 +51,11 @@ def get_app_client(separate_input_output_schemas: bool = True) -> TestClient:
|
||||
Item(name="Plumbus"),
|
||||
]
|
||||
|
||||
if PYDANTIC_V2:
|
||||
|
||||
@app.post("/with-computed-field/")
|
||||
def create_with_computed_field(
|
||||
with_computed_field: WithComputedField,
|
||||
) -> WithComputedField:
|
||||
return with_computed_field
|
||||
@app.post("/with-computed-field/")
|
||||
def create_with_computed_field(
|
||||
with_computed_field: WithComputedField,
|
||||
) -> WithComputedField:
|
||||
return with_computed_field
|
||||
|
||||
client = TestClient(app)
|
||||
return client
|
||||
@@ -151,7 +142,6 @@ def test_read_items():
|
||||
)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_with_computed_field():
|
||||
client = get_app_client()
|
||||
client_no = get_app_client(separate_input_output_schemas=False)
|
||||
@@ -168,7 +158,6 @@ def test_with_computed_field():
|
||||
)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_openapi_schema():
|
||||
client = get_app_client()
|
||||
response = client.get("/openapi.json")
|
||||
@@ -449,7 +438,6 @@ def test_openapi_schema():
|
||||
)
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_openapi_schema_no_separate():
|
||||
client = get_app_client(separate_input_output_schemas=False)
|
||||
response = client.get("/openapi.json")
|
||||
|
||||
98
tests/test_pydantic_v1_deprecation_warnings.py
Normal file
98
tests/test_pydantic_v1_deprecation_warnings.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi._compat.v1 import BaseModel
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
def test_warns_pydantic_v1_model_in_endpoint_param() -> None:
|
||||
class ParamModelV1(BaseModel):
|
||||
name: str
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with pytest.warns(
|
||||
DeprecationWarning,
|
||||
match=r"pydantic\.v1 is deprecated.*Please update the param data:",
|
||||
):
|
||||
|
||||
@app.post("/param")
|
||||
def endpoint(data: ParamModelV1):
|
||||
return data
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.post("/param", json={"name": "test"})
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {"name": "test"}
|
||||
|
||||
|
||||
def test_warns_pydantic_v1_model_in_return_type() -> None:
|
||||
class ReturnModelV1(BaseModel):
|
||||
name: str
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with pytest.warns(
|
||||
DeprecationWarning,
|
||||
match=r"pydantic\.v1 is deprecated.*Please update the response model",
|
||||
):
|
||||
|
||||
@app.get("/return")
|
||||
def endpoint() -> ReturnModelV1:
|
||||
return ReturnModelV1(name="test")
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/return")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {"name": "test"}
|
||||
|
||||
|
||||
def test_warns_pydantic_v1_model_in_response_model() -> None:
|
||||
class ResponseModelV1(BaseModel):
|
||||
name: str
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with pytest.warns(
|
||||
DeprecationWarning,
|
||||
match=r"pydantic\.v1 is deprecated.*Please update the response model",
|
||||
):
|
||||
|
||||
@app.get("/response-model", response_model=ResponseModelV1)
|
||||
def endpoint():
|
||||
return {"name": "test"}
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/response-model")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {"name": "test"}
|
||||
|
||||
|
||||
def test_warns_pydantic_v1_model_in_additional_responses_model() -> None:
|
||||
class ErrorModelV1(BaseModel):
|
||||
detail: str
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with pytest.warns(
|
||||
DeprecationWarning,
|
||||
match=r"pydantic\.v1 is deprecated.*In responses=\{\}, please update",
|
||||
):
|
||||
|
||||
@app.get(
|
||||
"/responses", response_model=None, responses={400: {"model": ErrorModelV1}}
|
||||
)
|
||||
def endpoint():
|
||||
return {"ok": True}
|
||||
|
||||
client = TestClient(app)
|
||||
response = client.get("/responses")
|
||||
assert response.status_code == 200, response.text
|
||||
assert response.json() == {"ok": True}
|
||||
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Union
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -26,30 +27,29 @@ class Item(BaseModel):
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/simple-model")
|
||||
def handle_simple_model(data: SubItem) -> SubItem:
|
||||
return data
|
||||
@app.post("/simple-model")
|
||||
def handle_simple_model(data: SubItem) -> SubItem:
|
||||
return data
|
||||
|
||||
@app.post("/simple-model-filter", response_model=SubItem)
|
||||
def handle_simple_model_filter(data: SubItem) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_price": 42})
|
||||
return extended_data
|
||||
|
||||
@app.post("/simple-model-filter", response_model=SubItem)
|
||||
def handle_simple_model_filter(data: SubItem) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_price": 42})
|
||||
return extended_data
|
||||
@app.post("/item")
|
||||
def handle_item(data: Item) -> Item:
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/item")
|
||||
def handle_item(data: Item) -> Item:
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/item-filter", response_model=Item)
|
||||
def handle_item_filter(data: Item) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return extended_data
|
||||
@app.post("/item-filter", response_model=Item)
|
||||
def handle_item_filter(data: Item) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return extended_data
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
@@ -225,21 +225,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/SubItem"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/SubItem"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -275,21 +266,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/SubItem"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/SubItem"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -325,21 +307,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -373,21 +346,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Union
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -27,49 +28,47 @@ class Item(BaseModel):
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.post("/item")
|
||||
def handle_item(data: Item) -> list[Item]:
|
||||
return [data, data]
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/item")
|
||||
def handle_item(data: Item) -> list[Item]:
|
||||
return [data, data]
|
||||
|
||||
@app.post("/item-filter", response_model=list[Item])
|
||||
def handle_item_filter(data: Item) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return [extended_data, extended_data]
|
||||
|
||||
|
||||
@app.post("/item-list")
|
||||
def handle_item_list(data: list[Item]) -> Item:
|
||||
if data:
|
||||
return data[0]
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
|
||||
|
||||
@app.post("/item-list-filter", response_model=Item)
|
||||
def handle_item_list_filter(data: list[Item]) -> Any:
|
||||
if data:
|
||||
extended_data = data[0].dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return extended_data
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
|
||||
|
||||
@app.post("/item-list-to-list")
|
||||
def handle_item_list_to_list(data: list[Item]) -> list[Item]:
|
||||
return data
|
||||
|
||||
|
||||
@app.post("/item-list-to-list-filter", response_model=list[Item])
|
||||
def handle_item_list_to_list_filter(data: list[Item]) -> Any:
|
||||
if data:
|
||||
extended_data = data[0].dict()
|
||||
@app.post("/item-filter", response_model=list[Item])
|
||||
def handle_item_filter(data: Item) -> Any:
|
||||
extended_data = data.dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return [extended_data, extended_data]
|
||||
return []
|
||||
|
||||
@app.post("/item-list")
|
||||
def handle_item_list(data: list[Item]) -> Item:
|
||||
if data:
|
||||
return data[0]
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
|
||||
@app.post("/item-list-filter", response_model=Item)
|
||||
def handle_item_list_filter(data: list[Item]) -> Any:
|
||||
if data:
|
||||
extended_data = data[0].dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return extended_data
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
|
||||
@app.post("/item-list-to-list")
|
||||
def handle_item_list_to_list(data: list[Item]) -> list[Item]:
|
||||
return data
|
||||
|
||||
@app.post("/item-list-to-list-filter", response_model=list[Item])
|
||||
def handle_item_list_to_list_filter(data: list[Item]) -> Any:
|
||||
if data:
|
||||
extended_data = data[0].dict()
|
||||
extended_data.update({"secret_data": "classified", "internal_id": 12345})
|
||||
extended_data["sub"].update({"internal_id": 67890})
|
||||
return [extended_data, extended_data]
|
||||
return []
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
@@ -375,21 +374,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -429,21 +419,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Union
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -39,179 +40,181 @@ class NewItem(NewBaseModel):
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/v1-to-v2/item")
|
||||
def handle_v1_item_to_v2(data: Item) -> NewItem:
|
||||
return NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
@app.post("/v1-to-v2/item")
|
||||
def handle_v1_item_to_v2(data: Item) -> NewItem:
|
||||
return NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
|
||||
@app.post("/v1-to-v2/item-filter", response_model=NewItem)
|
||||
def handle_v1_item_to_v2_filter(data: Item) -> Any:
|
||||
result = {
|
||||
"new_title": data.title,
|
||||
"new_size": data.size,
|
||||
"new_description": data.description,
|
||||
"new_sub": {
|
||||
"new_sub_name": data.sub.name,
|
||||
"new_sub_secret": "sub_hidden",
|
||||
},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"}
|
||||
for s in data.multi
|
||||
],
|
||||
"secret": "hidden_v1_to_v2",
|
||||
}
|
||||
return result
|
||||
|
||||
@app.post("/v1-to-v2/item-filter", response_model=NewItem)
|
||||
def handle_v1_item_to_v2_filter(data: Item) -> Any:
|
||||
result = {
|
||||
"new_title": data.title,
|
||||
"new_size": data.size,
|
||||
"new_description": data.description,
|
||||
"new_sub": {"new_sub_name": data.sub.name, "new_sub_secret": "sub_hidden"},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"} for s in data.multi
|
||||
],
|
||||
"secret": "hidden_v1_to_v2",
|
||||
}
|
||||
return result
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: NewItem) -> Item:
|
||||
return Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
|
||||
@app.post("/v2-to-v1/item-filter", response_model=Item)
|
||||
def handle_v2_item_to_v1_filter(data: NewItem) -> Any:
|
||||
result = {
|
||||
"title": data.new_title,
|
||||
"size": data.new_size,
|
||||
"description": data.new_description,
|
||||
"sub": {"name": data.new_sub.new_sub_name, "sub_secret": "sub_hidden"},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"}
|
||||
for s in data.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
return result
|
||||
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: NewItem) -> Item:
|
||||
return Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
@app.post("/v1-to-v2/item-to-list")
|
||||
def handle_v1_item_to_v2_list(data: Item) -> list[NewItem]:
|
||||
converted = NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
@app.post("/v1-to-v2/list-to-list")
|
||||
def handle_v1_list_to_v2_list(data: list[Item]) -> list[NewItem]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
NewItem(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=NewSubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in item.multi],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
@app.post("/v2-to-v1/item-filter", response_model=Item)
|
||||
def handle_v2_item_to_v1_filter(data: NewItem) -> Any:
|
||||
result = {
|
||||
"title": data.new_title,
|
||||
"size": data.new_size,
|
||||
"description": data.new_description,
|
||||
"sub": {"name": data.new_sub.new_sub_name, "sub_secret": "sub_hidden"},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"} for s in data.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
return result
|
||||
@app.post("/v1-to-v2/list-to-list-filter", response_model=list[NewItem])
|
||||
def handle_v1_list_to_v2_list_filter(data: list[Item]) -> Any:
|
||||
result = []
|
||||
for item in data:
|
||||
converted = {
|
||||
"new_title": item.title,
|
||||
"new_size": item.size,
|
||||
"new_description": item.description,
|
||||
"new_sub": {
|
||||
"new_sub_name": item.sub.name,
|
||||
"new_sub_secret": "sub_hidden",
|
||||
},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"}
|
||||
for s in item.multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
result.append(converted)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v1-to-v2/item-to-list")
|
||||
def handle_v1_item_to_v2_list(data: Item) -> list[NewItem]:
|
||||
converted = NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
|
||||
@app.post("/v1-to-v2/list-to-list")
|
||||
def handle_v1_list_to_v2_list(data: list[Item]) -> list[NewItem]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
NewItem(
|
||||
@app.post("/v1-to-v2/list-to-item")
|
||||
def handle_v1_list_to_v2_item(data: list[Item]) -> NewItem:
|
||||
if data:
|
||||
item = data[0]
|
||||
return NewItem(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=NewSubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in item.multi],
|
||||
)
|
||||
return NewItem(new_title="", new_size=0, new_sub=NewSubItem(new_sub_name=""))
|
||||
|
||||
@app.post("/v2-to-v1/item-to-list")
|
||||
def handle_v2_item_to_v1_list(data: NewItem) -> list[Item]:
|
||||
converted = Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
return result
|
||||
return [converted, converted]
|
||||
|
||||
@app.post("/v2-to-v1/list-to-list")
|
||||
def handle_v2_list_to_v1_list(data: list[NewItem]) -> list[Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in item.new_multi],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
@app.post("/v1-to-v2/list-to-list-filter", response_model=list[NewItem])
|
||||
def handle_v1_list_to_v2_list_filter(data: list[Item]) -> Any:
|
||||
result = []
|
||||
for item in data:
|
||||
converted = {
|
||||
"new_title": item.title,
|
||||
"new_size": item.size,
|
||||
"new_description": item.description,
|
||||
"new_sub": {"new_sub_name": item.sub.name, "new_sub_secret": "sub_hidden"},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"}
|
||||
for s in item.multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
result.append(converted)
|
||||
return result
|
||||
@app.post("/v2-to-v1/list-to-list-filter", response_model=list[Item])
|
||||
def handle_v2_list_to_v1_list_filter(data: list[NewItem]) -> Any:
|
||||
result = []
|
||||
for item in data:
|
||||
converted = {
|
||||
"title": item.new_title,
|
||||
"size": item.new_size,
|
||||
"description": item.new_description,
|
||||
"sub": {
|
||||
"name": item.new_sub.new_sub_name,
|
||||
"sub_secret": "sub_hidden",
|
||||
},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"}
|
||||
for s in item.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
result.append(converted)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v1-to-v2/list-to-item")
|
||||
def handle_v1_list_to_v2_item(data: list[Item]) -> NewItem:
|
||||
if data:
|
||||
item = data[0]
|
||||
return NewItem(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=NewSubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in item.multi],
|
||||
)
|
||||
return NewItem(new_title="", new_size=0, new_sub=NewSubItem(new_sub_name=""))
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/item-to-list")
|
||||
def handle_v2_item_to_v1_list(data: NewItem) -> list[Item]:
|
||||
converted = Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-to-list")
|
||||
def handle_v2_list_to_v1_list(data: list[NewItem]) -> list[Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
Item(
|
||||
@app.post("/v2-to-v1/list-to-item")
|
||||
def handle_v2_list_to_v1_item(data: list[NewItem]) -> Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
return Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in item.new_multi],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-to-list-filter", response_model=list[Item])
|
||||
def handle_v2_list_to_v1_list_filter(data: list[NewItem]) -> Any:
|
||||
result = []
|
||||
for item in data:
|
||||
converted = {
|
||||
"title": item.new_title,
|
||||
"size": item.new_size,
|
||||
"description": item.new_description,
|
||||
"sub": {"name": item.new_sub.new_sub_name, "sub_secret": "sub_hidden"},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"}
|
||||
for s in item.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
result.append(converted)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-to-item")
|
||||
def handle_v2_list_to_v1_item(data: list[NewItem]) -> Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
return Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in item.new_multi],
|
||||
)
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
return Item(title="", size=0, sub=SubItem(name=""))
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
@@ -668,38 +671,20 @@ def test_v2_to_v1_validation_error():
|
||||
assert response.status_code == 422, response.text
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
[
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
]
|
||||
),
|
||||
v1=snapshot(
|
||||
[
|
||||
{
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
{
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
]
|
||||
),
|
||||
)
|
||||
"detail": [
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -717,23 +702,12 @@ def test_v2_to_v1_nested_validation_error():
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": [
|
||||
pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "Field required",
|
||||
"input": {"wrong_field": "value"},
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
}
|
||||
),
|
||||
)
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "Field required",
|
||||
"input": {"wrong_field": "value"},
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
@@ -777,38 +751,20 @@ def test_v2_list_validation_error():
|
||||
assert response.status_code == 422, response.text
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
[
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", 1, "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Invalid"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", 1, "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Invalid"},
|
||||
},
|
||||
]
|
||||
),
|
||||
v1=snapshot(
|
||||
[
|
||||
{
|
||||
"loc": ["body", 1, "new_size"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
{
|
||||
"loc": ["body", 1, "new_sub"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
]
|
||||
),
|
||||
)
|
||||
"detail": [
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", 1, "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Invalid"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", 1, "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Invalid"},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -844,31 +800,18 @@ def test_invalid_list_structure_v2():
|
||||
assert response.status_code == 422, response.text
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
[
|
||||
{
|
||||
"type": "list_type",
|
||||
"loc": ["body"],
|
||||
"msg": "Input should be a valid list",
|
||||
"input": {
|
||||
"new_title": "Not a list",
|
||||
"new_size": 100,
|
||||
"new_sub": {"new_sub_name": "Sub"},
|
||||
},
|
||||
}
|
||||
]
|
||||
),
|
||||
v1=snapshot(
|
||||
[
|
||||
{
|
||||
"loc": ["body"],
|
||||
"msg": "value is not a valid list",
|
||||
"type": "type_error.list",
|
||||
}
|
||||
]
|
||||
),
|
||||
)
|
||||
"detail": [
|
||||
{
|
||||
"type": "list_type",
|
||||
"loc": ["body"],
|
||||
"msg": "Input should be a valid list",
|
||||
"input": {
|
||||
"new_title": "Not a list",
|
||||
"new_size": 100,
|
||||
"new_sub": {"new_sub_name": "Sub"},
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -888,21 +831,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -938,21 +872,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -1056,21 +981,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -1440,17 +1356,10 @@ def test_openapi_schema():
|
||||
"properties": {
|
||||
"new_title": {"type": "string", "title": "New Title"},
|
||||
"new_size": {"type": "integer", "title": "New Size"},
|
||||
"new_description": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{"type": "string", "title": "New Description"}
|
||||
),
|
||||
),
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {"$ref": "#/components/schemas/NewSubItem"},
|
||||
"new_multi": {
|
||||
"items": {"$ref": "#/components/schemas/NewSubItem"},
|
||||
|
||||
@@ -1,140 +1,137 @@
|
||||
import warnings
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from . import modelsv1, modelsv2, modelsv2b
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/v1-to-v2/item")
|
||||
def handle_v1_item_to_v2(data: modelsv1.Item) -> modelsv2.Item:
|
||||
return modelsv2.Item(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
@app.post("/v1-to-v2/item")
|
||||
def handle_v1_item_to_v2(data: modelsv1.Item) -> modelsv2.Item:
|
||||
return modelsv2.Item(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: modelsv2.Item) -> modelsv1.Item:
|
||||
return modelsv1.Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=modelsv1.SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: modelsv2.Item) -> modelsv1.Item:
|
||||
return modelsv1.Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=modelsv1.SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
@app.post("/v1-to-v2/item-to-list")
|
||||
def handle_v1_item_to_v2_list(data: modelsv1.Item) -> list[modelsv2.Item]:
|
||||
converted = modelsv2.Item(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
@app.post("/v1-to-v2/list-to-list")
|
||||
def handle_v1_list_to_v2_list(data: list[modelsv1.Item]) -> list[modelsv2.Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
modelsv2.Item(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[
|
||||
modelsv2.SubItem(new_sub_name=s.name) for s in item.multi
|
||||
],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
@app.post("/v1-to-v2/item-to-list")
|
||||
def handle_v1_item_to_v2_list(data: modelsv1.Item) -> list[modelsv2.Item]:
|
||||
converted = modelsv2.Item(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
|
||||
@app.post("/v1-to-v2/list-to-list")
|
||||
def handle_v1_list_to_v2_list(data: list[modelsv1.Item]) -> list[modelsv2.Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
modelsv2.Item(
|
||||
@app.post("/v1-to-v2/list-to-item")
|
||||
def handle_v1_list_to_v2_item(data: list[modelsv1.Item]) -> modelsv2.Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
return modelsv2.Item(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in item.multi],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v1-to-v2/list-to-item")
|
||||
def handle_v1_list_to_v2_item(data: list[modelsv1.Item]) -> modelsv2.Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
return modelsv2.Item(
|
||||
new_title=item.title,
|
||||
new_size=item.size,
|
||||
new_description=item.description,
|
||||
new_sub=modelsv2.SubItem(new_sub_name=item.sub.name),
|
||||
new_multi=[modelsv2.SubItem(new_sub_name=s.name) for s in item.multi],
|
||||
new_title="", new_size=0, new_sub=modelsv2.SubItem(new_sub_name="")
|
||||
)
|
||||
return modelsv2.Item(
|
||||
new_title="", new_size=0, new_sub=modelsv2.SubItem(new_sub_name="")
|
||||
)
|
||||
|
||||
@app.post("/v2-to-v1/item-to-list")
|
||||
def handle_v2_item_to_v1_list(data: modelsv2.Item) -> list[modelsv1.Item]:
|
||||
converted = modelsv1.Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=modelsv1.SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
|
||||
@app.post("/v2-to-v1/item-to-list")
|
||||
def handle_v2_item_to_v1_list(data: modelsv2.Item) -> list[modelsv1.Item]:
|
||||
converted = modelsv1.Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=modelsv1.SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
return [converted, converted]
|
||||
@app.post("/v2-to-v1/list-to-list")
|
||||
def handle_v2_list_to_v1_list(data: list[modelsv2.Item]) -> list[modelsv1.Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
modelsv1.Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=modelsv1.SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[
|
||||
modelsv1.SubItem(name=s.new_sub_name) for s in item.new_multi
|
||||
],
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-to-list")
|
||||
def handle_v2_list_to_v1_list(data: list[modelsv2.Item]) -> list[modelsv1.Item]:
|
||||
result = []
|
||||
for item in data:
|
||||
result.append(
|
||||
modelsv1.Item(
|
||||
@app.post("/v2-to-v1/list-to-item")
|
||||
def handle_v2_list_to_v1_item(data: list[modelsv2.Item]) -> modelsv1.Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
return modelsv1.Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=modelsv1.SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in item.new_multi],
|
||||
)
|
||||
)
|
||||
return result
|
||||
return modelsv1.Item(title="", size=0, sub=modelsv1.SubItem(name=""))
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-to-item")
|
||||
def handle_v2_list_to_v1_item(data: list[modelsv2.Item]) -> modelsv1.Item:
|
||||
if data:
|
||||
item = data[0]
|
||||
@app.post("/v2-to-v1/same-name")
|
||||
def handle_v2_same_name_to_v1(
|
||||
item1: modelsv2.Item, item2: modelsv2b.Item
|
||||
) -> modelsv1.Item:
|
||||
return modelsv1.Item(
|
||||
title=item.new_title,
|
||||
size=item.new_size,
|
||||
description=item.new_description,
|
||||
sub=modelsv1.SubItem(name=item.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.new_sub_name) for s in item.new_multi],
|
||||
title=item1.new_title,
|
||||
size=item2.dup_size,
|
||||
description=item1.new_description,
|
||||
sub=modelsv1.SubItem(name=item1.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.dup_sub_name) for s in item2.dup_multi],
|
||||
)
|
||||
return modelsv1.Item(title="", size=0, sub=modelsv1.SubItem(name=""))
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/same-name")
|
||||
def handle_v2_same_name_to_v1(
|
||||
item1: modelsv2.Item, item2: modelsv2b.Item
|
||||
) -> modelsv1.Item:
|
||||
return modelsv1.Item(
|
||||
title=item1.new_title,
|
||||
size=item2.dup_size,
|
||||
description=item1.new_description,
|
||||
sub=modelsv1.SubItem(name=item1.new_sub.new_sub_name),
|
||||
multi=[modelsv1.SubItem(name=s.dup_sub_name) for s in item2.dup_multi],
|
||||
)
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/list-of-items-to-list-of-items")
|
||||
def handle_v2_items_in_list_to_v1_item_in_list(
|
||||
data1: list[modelsv2.ItemInList], data2: list[modelsv2b.ItemInList]
|
||||
) -> list[modelsv1.ItemInList]:
|
||||
result = []
|
||||
item1 = data1[0]
|
||||
item2 = data2[0]
|
||||
result = [
|
||||
modelsv1.ItemInList(name1=item1.name2),
|
||||
modelsv1.ItemInList(name1=item2.dup_name2),
|
||||
]
|
||||
return result
|
||||
@app.post("/v2-to-v1/list-of-items-to-list-of-items")
|
||||
def handle_v2_items_in_list_to_v1_item_in_list(
|
||||
data1: list[modelsv2.ItemInList], data2: list[modelsv2b.ItemInList]
|
||||
) -> list[modelsv1.ItemInList]:
|
||||
item1 = data1[0]
|
||||
item2 = data2[0]
|
||||
return [
|
||||
modelsv1.ItemInList(name1=item1.name2),
|
||||
modelsv1.ItemInList(name1=item2.dup_name2),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -292,23 +292,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__Item"
|
||||
}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -344,18 +335,9 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item"
|
||||
}
|
||||
),
|
||||
),
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -391,23 +373,14 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__Item"
|
||||
}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -535,18 +508,9 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item"
|
||||
}
|
||||
),
|
||||
),
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -587,18 +551,9 @@ def test_openapi_schema():
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"items": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item"
|
||||
}
|
||||
),
|
||||
),
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data",
|
||||
}
|
||||
@@ -642,18 +597,9 @@ def test_openapi_schema():
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"items": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item"
|
||||
}
|
||||
),
|
||||
),
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data",
|
||||
}
|
||||
@@ -767,460 +713,239 @@ def test_openapi_schema():
|
||||
},
|
||||
},
|
||||
"components": {
|
||||
"schemas": pydantic_snapshot(
|
||||
v1=snapshot(
|
||||
{
|
||||
"Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post": {
|
||||
"properties": {
|
||||
"data1": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data1",
|
||||
},
|
||||
"data2": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data2",
|
||||
},
|
||||
"schemas": {
|
||||
"Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post": {
|
||||
"properties": {
|
||||
"data1": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["data1", "data2"],
|
||||
"title": "Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post",
|
||||
"type": "array",
|
||||
"title": "Data1",
|
||||
},
|
||||
"Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post": {
|
||||
"properties": {
|
||||
"item1": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item"
|
||||
},
|
||||
"item2": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__Item"
|
||||
},
|
||||
"data2": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["item1", "item2"],
|
||||
"title": "Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post",
|
||||
"type": "array",
|
||||
"title": "Data2",
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Detail",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["data1", "data2"],
|
||||
"title": "Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post",
|
||||
},
|
||||
"Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post": {
|
||||
"properties": {
|
||||
"item1": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
"item2": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__Item"
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["item1", "item2"],
|
||||
"title": "Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post",
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
"type": "object",
|
||||
"title": "HTTPValidationError",
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
]
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Location",
|
||||
},
|
||||
"msg": {"type": "string", "title": "Message"},
|
||||
"type": {"type": "string", "title": "Error Type"},
|
||||
"type": "array",
|
||||
"title": "Detail",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "HTTPValidationError",
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
]
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"title": "ValidationError",
|
||||
"type": "array",
|
||||
"title": "Location",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__Item": {
|
||||
"properties": {
|
||||
"title": {"type": "string", "title": "Title"},
|
||||
"size": {"type": "integer", "title": "Size"},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description",
|
||||
},
|
||||
"sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Multi",
|
||||
"default": [],
|
||||
},
|
||||
"msg": {"type": "string", "title": "Message"},
|
||||
"type": {"type": "string", "title": "Error Type"},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"title": "ValidationError",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__Item": {
|
||||
"properties": {
|
||||
"title": {"type": "string", "title": "Title"},
|
||||
"size": {"type": "integer", "title": "Size"},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description",
|
||||
},
|
||||
"sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["title", "size", "sub"],
|
||||
"title": "Item",
|
||||
"type": "array",
|
||||
"title": "Multi",
|
||||
"default": [],
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__ItemInList": {
|
||||
"properties": {
|
||||
"name1": {"type": "string", "title": "Name1"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["title", "size", "sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__ItemInList": {
|
||||
"properties": {"name1": {"type": "string", "title": "Name1"}},
|
||||
"type": "object",
|
||||
"required": ["name1"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem": {
|
||||
"properties": {"name": {"type": "string", "title": "Name"}},
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__Item": {
|
||||
"properties": {
|
||||
"new_title": {
|
||||
"type": "string",
|
||||
"title": "New Title",
|
||||
},
|
||||
"new_size": {
|
||||
"type": "integer",
|
||||
"title": "New Size",
|
||||
},
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"new_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name1"],
|
||||
"title": "ItemInList",
|
||||
"type": "array",
|
||||
"title": "New Multi",
|
||||
"default": [],
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem": {
|
||||
"properties": {
|
||||
"name": {"type": "string", "title": "Name"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_title", "new_size", "new_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input": {
|
||||
"properties": {
|
||||
"new_title": {
|
||||
"type": "string",
|
||||
"title": "New Title",
|
||||
},
|
||||
"new_size": {
|
||||
"type": "integer",
|
||||
"title": "New Size",
|
||||
},
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"new_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"title": "SubItem",
|
||||
"type": "array",
|
||||
"title": "New Multi",
|
||||
"default": [],
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__Item": {
|
||||
"properties": {
|
||||
"new_title": {
|
||||
"type": "string",
|
||||
"title": "New Title",
|
||||
},
|
||||
"new_size": {
|
||||
"type": "integer",
|
||||
"title": "New Size",
|
||||
},
|
||||
"new_description": {
|
||||
"type": "string",
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"new_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "New Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_title", "new_size", "new_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList": {
|
||||
"properties": {"name2": {"type": "string", "title": "Name2"}},
|
||||
"type": "object",
|
||||
"required": ["name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem": {
|
||||
"properties": {
|
||||
"new_sub_name": {
|
||||
"type": "string",
|
||||
"title": "New Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__Item": {
|
||||
"properties": {
|
||||
"dup_title": {
|
||||
"type": "string",
|
||||
"title": "Dup Title",
|
||||
},
|
||||
"dup_size": {
|
||||
"type": "integer",
|
||||
"title": "Dup Size",
|
||||
},
|
||||
"dup_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "Dup Description",
|
||||
},
|
||||
"dup_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"dup_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_title", "new_size", "new_sub"],
|
||||
"title": "Item",
|
||||
"type": "array",
|
||||
"title": "Dup Multi",
|
||||
"default": [],
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList": {
|
||||
"properties": {
|
||||
"name2": {"type": "string", "title": "Name2"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem": {
|
||||
"properties": {
|
||||
"new_sub_name": {
|
||||
"type": "string",
|
||||
"title": "New Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__Item": {
|
||||
"properties": {
|
||||
"dup_title": {
|
||||
"type": "string",
|
||||
"title": "Dup Title",
|
||||
},
|
||||
"dup_size": {
|
||||
"type": "integer",
|
||||
"title": "Dup Size",
|
||||
},
|
||||
"dup_description": {
|
||||
"type": "string",
|
||||
"title": "Dup Description",
|
||||
},
|
||||
"dup_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"dup_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Dup Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_title", "dup_size", "dup_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList": {
|
||||
"properties": {
|
||||
"dup_name2": {
|
||||
"type": "string",
|
||||
"title": "Dup Name2",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem": {
|
||||
"properties": {
|
||||
"dup_sub_name": {
|
||||
"type": "string",
|
||||
"title": "Dup Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
}
|
||||
),
|
||||
v2=snapshot(
|
||||
{
|
||||
"Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post": {
|
||||
"properties": {
|
||||
"data1": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data1",
|
||||
},
|
||||
"data2": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Data2",
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["data1", "data2"],
|
||||
"title": "Body_handle_v2_items_in_list_to_v1_item_in_list_v2_to_v1_list_of_items_to_list_of_items_post",
|
||||
},
|
||||
"Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post": {
|
||||
"properties": {
|
||||
"item1": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input"
|
||||
},
|
||||
"item2": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__Item"
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["item1", "item2"],
|
||||
"title": "Body_handle_v2_same_name_to_v1_v2_to_v1_same_name_post",
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Detail",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "HTTPValidationError",
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
]
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Location",
|
||||
},
|
||||
"msg": {"type": "string", "title": "Message"},
|
||||
"type": {"type": "string", "title": "Error Type"},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["loc", "msg", "type"],
|
||||
"title": "ValidationError",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__Item": {
|
||||
"properties": {
|
||||
"title": {"type": "string", "title": "Title"},
|
||||
"size": {"type": "integer", "title": "Size"},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description",
|
||||
},
|
||||
"sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["title", "size", "sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__ItemInList": {
|
||||
"properties": {
|
||||
"name1": {"type": "string", "title": "Name1"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name1"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv1__SubItem": {
|
||||
"properties": {
|
||||
"name": {"type": "string", "title": "Name"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__Item": {
|
||||
"properties": {
|
||||
"new_title": {
|
||||
"type": "string",
|
||||
"title": "New Title",
|
||||
},
|
||||
"new_size": {
|
||||
"type": "integer",
|
||||
"title": "New Size",
|
||||
},
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"new_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "New Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_title", "new_size", "new_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__Item-Input": {
|
||||
"properties": {
|
||||
"new_title": {
|
||||
"type": "string",
|
||||
"title": "New Title",
|
||||
},
|
||||
"new_size": {
|
||||
"type": "integer",
|
||||
"title": "New Size",
|
||||
},
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"new_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "New Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_title", "new_size", "new_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__ItemInList": {
|
||||
"properties": {
|
||||
"name2": {"type": "string", "title": "Name2"}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2__SubItem": {
|
||||
"properties": {
|
||||
"new_sub_name": {
|
||||
"type": "string",
|
||||
"title": "New Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["new_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__Item": {
|
||||
"properties": {
|
||||
"dup_title": {
|
||||
"type": "string",
|
||||
"title": "Dup Title",
|
||||
},
|
||||
"dup_size": {
|
||||
"type": "integer",
|
||||
"title": "Dup Size",
|
||||
},
|
||||
"dup_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "Dup Description",
|
||||
},
|
||||
"dup_sub": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"dup_multi": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Dup Multi",
|
||||
"default": [],
|
||||
},
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_title", "dup_size", "dup_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList": {
|
||||
"properties": {
|
||||
"dup_name2": {
|
||||
"type": "string",
|
||||
"title": "Dup Name2",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem": {
|
||||
"properties": {
|
||||
"dup_sub_name": {
|
||||
"type": "string",
|
||||
"title": "Dup Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
}
|
||||
),
|
||||
),
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_title", "dup_size", "dup_sub"],
|
||||
"title": "Item",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__ItemInList": {
|
||||
"properties": {
|
||||
"dup_name2": {
|
||||
"type": "string",
|
||||
"title": "Dup Name2",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_name2"],
|
||||
"title": "ItemInList",
|
||||
},
|
||||
"tests__test_pydantic_v1_v2_multifile__modelsv2b__SubItem": {
|
||||
"properties": {
|
||||
"dup_sub_name": {
|
||||
"type": "string",
|
||||
"title": "Dup Sub Name",
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["dup_sub_name"],
|
||||
"title": "SubItem",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Union
|
||||
|
||||
from tests.utils import pydantic_snapshot, skip_module_if_py_gte_314
|
||||
from tests.utils import skip_module_if_py_gte_314
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
skip_module_if_py_gte_314()
|
||||
@@ -39,65 +40,69 @@ class NewItem(NewBaseModel):
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/v1-to-v2/")
|
||||
def handle_v1_item_to_v2(data: Item) -> Union[NewItem, None]:
|
||||
if data.size < 0:
|
||||
return None
|
||||
return NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
@app.post("/v1-to-v2/")
|
||||
def handle_v1_item_to_v2(data: Item) -> Union[NewItem, None]:
|
||||
if data.size < 0:
|
||||
return None
|
||||
return NewItem(
|
||||
new_title=data.title,
|
||||
new_size=data.size,
|
||||
new_description=data.description,
|
||||
new_sub=NewSubItem(new_sub_name=data.sub.name),
|
||||
new_multi=[NewSubItem(new_sub_name=s.name) for s in data.multi],
|
||||
)
|
||||
|
||||
@app.post("/v1-to-v2/item-filter", response_model=Union[NewItem, None])
|
||||
def handle_v1_item_to_v2_filter(data: Item) -> Any:
|
||||
if data.size < 0:
|
||||
return None
|
||||
result = {
|
||||
"new_title": data.title,
|
||||
"new_size": data.size,
|
||||
"new_description": data.description,
|
||||
"new_sub": {
|
||||
"new_sub_name": data.sub.name,
|
||||
"new_sub_secret": "sub_hidden",
|
||||
},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"}
|
||||
for s in data.multi
|
||||
],
|
||||
"secret": "hidden_v1_to_v2",
|
||||
}
|
||||
return result
|
||||
|
||||
@app.post("/v1-to-v2/item-filter", response_model=Union[NewItem, None])
|
||||
def handle_v1_item_to_v2_filter(data: Item) -> Any:
|
||||
if data.size < 0:
|
||||
return None
|
||||
result = {
|
||||
"new_title": data.title,
|
||||
"new_size": data.size,
|
||||
"new_description": data.description,
|
||||
"new_sub": {"new_sub_name": data.sub.name, "new_sub_secret": "sub_hidden"},
|
||||
"new_multi": [
|
||||
{"new_sub_name": s.name, "new_sub_secret": "sub_hidden"} for s in data.multi
|
||||
],
|
||||
"secret": "hidden_v1_to_v2",
|
||||
}
|
||||
return result
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: NewItem) -> Union[Item, None]:
|
||||
if data.new_size < 0:
|
||||
return None
|
||||
return Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/item")
|
||||
def handle_v2_item_to_v1(data: NewItem) -> Union[Item, None]:
|
||||
if data.new_size < 0:
|
||||
return None
|
||||
return Item(
|
||||
title=data.new_title,
|
||||
size=data.new_size,
|
||||
description=data.new_description,
|
||||
sub=SubItem(name=data.new_sub.new_sub_name),
|
||||
multi=[SubItem(name=s.new_sub_name) for s in data.new_multi],
|
||||
)
|
||||
|
||||
|
||||
@app.post("/v2-to-v1/item-filter", response_model=Union[Item, None])
|
||||
def handle_v2_item_to_v1_filter(data: NewItem) -> Any:
|
||||
if data.new_size < 0:
|
||||
return None
|
||||
result = {
|
||||
"title": data.new_title,
|
||||
"size": data.new_size,
|
||||
"description": data.new_description,
|
||||
"sub": {"name": data.new_sub.new_sub_name, "sub_secret": "sub_hidden"},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"} for s in data.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
return result
|
||||
@app.post("/v2-to-v1/item-filter", response_model=Union[Item, None])
|
||||
def handle_v2_item_to_v1_filter(data: NewItem) -> Any:
|
||||
if data.new_size < 0:
|
||||
return None
|
||||
result = {
|
||||
"title": data.new_title,
|
||||
"size": data.new_size,
|
||||
"description": data.new_description,
|
||||
"sub": {"name": data.new_sub.new_sub_name, "sub_secret": "sub_hidden"},
|
||||
"multi": [
|
||||
{"name": s.new_sub_name, "sub_secret": "sub_hidden"}
|
||||
for s in data.new_multi
|
||||
],
|
||||
"secret": "hidden_v2_to_v1",
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
@@ -312,38 +317,20 @@ def test_v2_to_v1_validation_error():
|
||||
assert response.status_code == 422, response.text
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
[
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
]
|
||||
),
|
||||
v1=snapshot(
|
||||
[
|
||||
{
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
{
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
},
|
||||
]
|
||||
),
|
||||
)
|
||||
"detail": [
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub"],
|
||||
"msg": "Field required",
|
||||
"input": {"new_title": "Missing fields"},
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -361,23 +348,12 @@ def test_v2_to_v1_nested_validation_error():
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": [
|
||||
pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "Field required",
|
||||
"input": {"wrong_field": "value"},
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "field required",
|
||||
"type": "value_error.missing",
|
||||
}
|
||||
),
|
||||
)
|
||||
{
|
||||
"type": "missing",
|
||||
"loc": ["body", "new_sub", "new_sub_name"],
|
||||
"msg": "Field required",
|
||||
"input": {"wrong_field": "value"},
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
@@ -396,23 +372,12 @@ def test_v2_to_v1_type_validation_error():
|
||||
assert response.json() == snapshot(
|
||||
{
|
||||
"detail": [
|
||||
pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"type": "int_parsing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Input should be a valid integer, unable to parse string as an integer",
|
||||
"input": "not_a_number",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "value is not a valid integer",
|
||||
"type": "type_error.integer",
|
||||
}
|
||||
),
|
||||
)
|
||||
{
|
||||
"type": "int_parsing",
|
||||
"loc": ["body", "new_size"],
|
||||
"msg": "Input should be a valid integer, unable to parse string as an integer",
|
||||
"input": "not_a_number",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
@@ -483,21 +448,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -507,22 +463,15 @@ def test_openapi_schema():
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/NewItem"
|
||||
},
|
||||
{"type": "null"},
|
||||
],
|
||||
"title": "Response Handle V1 Item To V2 V1 To V2 Post",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{"$ref": "#/components/schemas/NewItem"}
|
||||
),
|
||||
)
|
||||
"$ref": "#/components/schemas/NewItem"
|
||||
},
|
||||
{"type": "null"},
|
||||
],
|
||||
"title": "Response Handle V1 Item To V2 V1 To V2 Post",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -546,21 +495,12 @@ def test_openapi_schema():
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Item"
|
||||
}
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
"schema": {
|
||||
"allOf": [
|
||||
{"$ref": "#/components/schemas/Item"}
|
||||
),
|
||||
)
|
||||
],
|
||||
"title": "Data",
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": True,
|
||||
@@ -570,22 +510,15 @@ def test_openapi_schema():
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/NewItem"
|
||||
},
|
||||
{"type": "null"},
|
||||
],
|
||||
"title": "Response Handle V1 Item To V2 Filter V1 To V2 Item Filter Post",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{"$ref": "#/components/schemas/NewItem"}
|
||||
),
|
||||
)
|
||||
"$ref": "#/components/schemas/NewItem"
|
||||
},
|
||||
{"type": "null"},
|
||||
],
|
||||
"title": "Response Handle V1 Item To V2 Filter V1 To V2 Item Filter Post",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -707,17 +640,10 @@ def test_openapi_schema():
|
||||
"properties": {
|
||||
"new_title": {"type": "string", "title": "New Title"},
|
||||
"new_size": {"type": "integer", "title": "New Size"},
|
||||
"new_description": pydantic_snapshot(
|
||||
v2=snapshot(
|
||||
{
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
}
|
||||
),
|
||||
v1=snapshot(
|
||||
{"type": "string", "title": "New Description"}
|
||||
),
|
||||
),
|
||||
"new_description": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}],
|
||||
"title": "New Description",
|
||||
},
|
||||
"new_sub": {"$ref": "#/components/schemas/NewSubItem"},
|
||||
"new_multi": {
|
||||
"items": {"$ref": "#/components/schemas/NewSubItem"},
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from fastapi import Cookie, FastAPI, Header, Query
|
||||
from fastapi._compat import PYDANTIC_V2
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -9,12 +8,7 @@ app = FastAPI()
|
||||
class Model(BaseModel):
|
||||
param: str
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config = {"extra": "allow"}
|
||||
else:
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
|
||||
@app.get("/query")
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import warnings
|
||||
from typing import Any
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from .utils import needs_pydanticv1, needs_pydanticv2
|
||||
from .utils import needs_pydanticv1
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
def test_read_with_orm_mode() -> None:
|
||||
class PersonBase(BaseModel):
|
||||
name: str
|
||||
@@ -48,7 +48,9 @@ def test_read_with_orm_mode() -> None:
|
||||
|
||||
@needs_pydanticv1
|
||||
def test_read_with_orm_mode_pv1() -> None:
|
||||
class PersonBase(BaseModel):
|
||||
from pydantic import v1
|
||||
|
||||
class PersonBase(v1.BaseModel):
|
||||
name: str
|
||||
lastname: str
|
||||
|
||||
@@ -72,10 +74,13 @@ def test_read_with_orm_mode_pv1() -> None:
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@app.post("/people/", response_model=PersonRead)
|
||||
def create_person(person: PersonCreate) -> Any:
|
||||
db_person = Person.from_orm(person)
|
||||
return db_person
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
|
||||
@app.post("/people/", response_model=PersonRead)
|
||||
def create_person(person: PersonCreate) -> Any:
|
||||
db_person = Person.from_orm(person)
|
||||
return db_person
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from dirty_equals import IsPartialDict
|
||||
from fastapi import Cookie, FastAPI, Header, Query
|
||||
from fastapi._compat import PYDANTIC_V2
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -53,8 +52,7 @@ def test_query_model_with_alias_by_name():
|
||||
response = client.get("/query", params={"param": "value"})
|
||||
assert response.status_code == 422, response.text
|
||||
details = response.json()
|
||||
if PYDANTIC_V2:
|
||||
assert details["detail"][0]["input"] == {"param": "value"}
|
||||
assert details["detail"][0]["input"] == {"param": "value"}
|
||||
|
||||
|
||||
def test_header_model_with_alias_by_name():
|
||||
@@ -62,8 +60,7 @@ def test_header_model_with_alias_by_name():
|
||||
response = client.get("/header", headers={"param": "value"})
|
||||
assert response.status_code == 422, response.text
|
||||
details = response.json()
|
||||
if PYDANTIC_V2:
|
||||
assert details["detail"][0]["input"] == IsPartialDict({"param": "value"})
|
||||
assert details["detail"][0]["input"] == IsPartialDict({"param": "value"})
|
||||
|
||||
|
||||
def test_cookie_model_with_alias_by_name():
|
||||
@@ -72,5 +69,4 @@ def test_cookie_model_with_alias_by_name():
|
||||
response = client.get("/cookie")
|
||||
assert response.status_code == 422, response.text
|
||||
details = response.json()
|
||||
if PYDANTIC_V2:
|
||||
assert details["detail"][0]["input"] == {"param": "value"}
|
||||
assert details["detail"][0]["input"] == {"param": "value"}
|
||||
|
||||
@@ -6,8 +6,6 @@ from fastapi import Body, FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from tests.utils import needs_pydanticv2
|
||||
|
||||
from .utils import get_body_model_name
|
||||
|
||||
app = FastAPI()
|
||||
@@ -246,7 +244,6 @@ async def read_model_required_list_validation_alias(
|
||||
return {"p": p.p}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
["/required-list-validation-alias", "/model-required-list-validation-alias"],
|
||||
@@ -269,7 +266,6 @@ def test_required_list_validation_alias_schema(path: str):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize("json", [None, {}])
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
@@ -294,7 +290,6 @@ def test_required_list_validation_alias_missing(path: str, json: Union[dict, Non
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
@@ -319,7 +314,6 @@ def test_required_list_validation_alias_by_name(path: str):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
@@ -364,7 +358,6 @@ def read_model_required_list_alias_and_validation_alias(
|
||||
return {"p": p.p}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
@@ -390,7 +383,6 @@ def test_required_list_alias_and_validation_alias_schema(path: str):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize("json", [None, {}])
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
@@ -415,7 +407,6 @@ def test_required_list_alias_and_validation_alias_missing(path: str, json):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
@@ -442,7 +433,6 @@ def test_required_list_alias_and_validation_alias_by_name(path: str):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
@@ -467,7 +457,6 @@ def test_required_list_alias_and_validation_alias_by_alias(path: str):
|
||||
}
|
||||
|
||||
|
||||
@needs_pydanticv2
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
[
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user