API refactor
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-07 16:25:52 +09:00
parent 76d0d86211
commit 91c7e04474
1171 changed files with 81940 additions and 44117 deletions

View File

@@ -1,23 +1,33 @@
"""Configuration for Pydantic models."""
from __future__ import annotations as _annotations
from typing import TYPE_CHECKING, Any, Callable, Dict, Type, Union
import warnings
from re import Pattern
from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, Union, cast, overload
from typing_extensions import Literal, TypeAlias, TypedDict
from typing_extensions import TypeAlias, TypedDict, Unpack, deprecated
from ._migration import getattr_migration
from .aliases import AliasGenerator
from .errors import PydanticUserError
from .warnings import PydanticDeprecatedSince211
if TYPE_CHECKING:
from ._internal._generate_schema import GenerateSchema as _GenerateSchema
from .fields import ComputedFieldInfo, FieldInfo
__all__ = ('ConfigDict',)
__all__ = ('ConfigDict', 'with_config')
JsonValue: TypeAlias = Union[int, float, str, bool, None, list['JsonValue'], 'JsonDict']
JsonDict: TypeAlias = dict[str, JsonValue]
JsonEncoder = Callable[[Any], Any]
JsonSchemaExtraCallable: TypeAlias = Union[
Callable[[Dict[str, Any]], None],
Callable[[Dict[str, Any], Type[Any]], None],
Callable[[JsonDict], None],
Callable[[JsonDict, type[Any]], None],
]
ExtraValues = Literal['allow', 'ignore', 'forbid']
@@ -29,11 +39,18 @@ class ConfigDict(TypedDict, total=False):
title: str | None
"""The title for the generated JSON schema, defaults to the model's name"""
model_title_generator: Callable[[type], str] | None
"""A callable that takes a model class and returns the title for it. Defaults to `None`."""
field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str] | None
"""A callable that takes a field's name and info and returns title for it. Defaults to `None`."""
str_to_lower: bool
"""Whether to convert all characters to lowercase for str types. Defaults to `False`."""
str_to_upper: bool
"""Whether to convert all characters to uppercase for str types. Defaults to `False`."""
str_strip_whitespace: bool
"""Whether to strip leading and trailing whitespace for str types."""
@@ -44,84 +61,108 @@ class ConfigDict(TypedDict, total=False):
"""The maximum length for str types. Defaults to `None`."""
extra: ExtraValues | None
"""
Whether to ignore, allow, or forbid extra attributes during model initialization. Defaults to `'ignore'`.
'''
Whether to ignore, allow, or forbid extra data during model initialization. Defaults to `'ignore'`.
You can configure how pydantic handles the attributes that are not defined in the model:
Three configuration values are available:
* `allow` - Allow any extra attributes.
* `forbid` - Forbid any extra attributes.
* `ignore` - Ignore any extra attributes.
- `'ignore'`: Providing extra data is ignored (the default):
```python
from pydantic import BaseModel, ConfigDict
```py
from pydantic import BaseModel, ConfigDict
class User(BaseModel):
model_config = ConfigDict(extra='ignore') # (1)!
name: str
user = User(name='John Doe', age=20) # (2)!
print(user)
#> name='John Doe'
```
1. This is the default behaviour.
2. The `age` argument is ignored.
- `'forbid'`: Providing extra data is not permitted, and a [`ValidationError`][pydantic_core.ValidationError]
will be raised if this is the case:
```python
from pydantic import BaseModel, ConfigDict, ValidationError
class User(BaseModel):
model_config = ConfigDict(extra='ignore') # (1)!
class Model(BaseModel):
x: int
name: str
model_config = ConfigDict(extra='forbid')
user = User(name='John Doe', age=20) # (2)!
print(user)
#> name='John Doe'
```
try:
Model(x=1, y='a')
except ValidationError as exc:
print(exc)
"""
1 validation error for Model
y
Extra inputs are not permitted [type=extra_forbidden, input_value='a', input_type=str]
"""
```
1. This is the default behaviour.
2. The `age` argument is ignored.
Instead, with `extra='allow'`, the `age` argument is included:
```py
from pydantic import BaseModel, ConfigDict
- `'allow'`: Providing extra data is allowed and stored in the `__pydantic_extra__` dictionary attribute:
```python
from pydantic import BaseModel, ConfigDict
class User(BaseModel):
model_config = ConfigDict(extra='allow')
class Model(BaseModel):
x: int
name: str
model_config = ConfigDict(extra='allow')
user = User(name='John Doe', age=20) # (1)!
print(user)
#> name='John Doe' age=20
```
1. The `age` argument is included.
With `extra='forbid'`, an error is raised:
```py
from pydantic import BaseModel, ConfigDict, ValidationError
m = Model(x=1, y='a')
assert m.__pydantic_extra__ == {'y': 'a'}
```
By default, no validation will be applied to these extra items, but you can set a type for the values by overriding
the type annotation for `__pydantic_extra__`:
```python
from pydantic import BaseModel, ConfigDict, Field, ValidationError
class User(BaseModel):
model_config = ConfigDict(extra='forbid')
class Model(BaseModel):
__pydantic_extra__: dict[str, int] = Field(init=False) # (1)!
name: str
x: int
model_config = ConfigDict(extra='allow')
try:
User(name='John Doe', age=20)
except ValidationError as e:
print(e)
'''
1 validation error for User
age
Extra inputs are not permitted [type=extra_forbidden, input_value=20, input_type=int]
'''
```
"""
try:
Model(x=1, y='a')
except ValidationError as exc:
print(exc)
"""
1 validation error for Model
y
Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str]
"""
m = Model(x=1, y='2')
assert m.x == 1
assert m.y == 2
assert m.model_dump() == {'x': 1, 'y': 2}
assert m.__pydantic_extra__ == {'y': 2}
```
1. The `= Field(init=False)` does not have any effect at runtime, but prevents the `__pydantic_extra__` field from
being included as a parameter to the model's `__init__` method by type checkers.
'''
frozen: bool
"""
Whether or not models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates
Whether models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates
a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the
attributes are hashable. Defaults to `False`.
Note:
On V1, this setting was called `allow_mutation`, and was `True` by default.
On V1, the inverse of this setting was called `allow_mutation`, and was `True` by default.
"""
populate_by_name: bool
@@ -129,38 +170,77 @@ class ConfigDict(TypedDict, total=False):
Whether an aliased field may be populated by its name as given by the model
attribute, as well as the alias. Defaults to `False`.
Note:
The name of this configuration setting was changed in **v2.0** from
`allow_population_by_alias` to `populate_by_name`.
!!! warning
`populate_by_name` usage is not recommended in v2.11+ and will be deprecated in v3.
Instead, you should use the [`validate_by_name`][pydantic.config.ConfigDict.validate_by_name] configuration setting.
```py
from pydantic import BaseModel, ConfigDict, Field
When `validate_by_name=True` and `validate_by_alias=True`, this is strictly equivalent to the
previous behavior of `populate_by_name=True`.
In v2.11, we also introduced a [`validate_by_alias`][pydantic.config.ConfigDict.validate_by_alias] setting that introduces more fine grained
control for validation behavior.
class User(BaseModel):
model_config = ConfigDict(populate_by_name=True)
Here's how you might go about using the new settings to achieve the same behavior:
name: str = Field(alias='full_name') # (1)!
age: int
```python
from pydantic import BaseModel, ConfigDict, Field
class Model(BaseModel):
model_config = ConfigDict(validate_by_name=True, validate_by_alias=True)
user = User(full_name='John Doe', age=20) # (2)!
print(user)
#> name='John Doe' age=20
user = User(name='John Doe', age=20) # (3)!
print(user)
#> name='John Doe' age=20
```
my_field: str = Field(alias='my_alias') # (1)!
1. The field `'name'` has an alias `'full_name'`.
2. The model is populated by the alias `'full_name'`.
3. The model is populated by the field name `'name'`.
m = Model(my_alias='foo') # (2)!
print(m)
#> my_field='foo'
m = Model(my_alias='foo') # (3)!
print(m)
#> my_field='foo'
```
1. The field `'my_field'` has an alias `'my_alias'`.
2. The model is populated by the alias `'my_alias'`.
3. The model is populated by the attribute name `'my_field'`.
"""
use_enum_values: bool
"""
Whether to populate models with the `value` property of enums, rather than the raw enum.
This may be useful if you want to serialize `model.model_dump()` later. Defaults to `False`.
!!! note
If you have an `Optional[Enum]` value that you set a default for, you need to use `validate_default=True`
for said Field to ensure that the `use_enum_values` flag takes effect on the default, as extracting an
enum's value occurs during validation, not serialization.
```python
from enum import Enum
from typing import Optional
from pydantic import BaseModel, ConfigDict, Field
class SomeEnum(Enum):
FOO = 'foo'
BAR = 'bar'
BAZ = 'baz'
class SomeModel(BaseModel):
model_config = ConfigDict(use_enum_values=True)
some_enum: SomeEnum
another_enum: Optional[SomeEnum] = Field(
default=SomeEnum.FOO, validate_default=True
)
model1 = SomeModel(some_enum=SomeEnum.BAR)
print(model1.model_dump())
#> {'some_enum': 'bar', 'another_enum': 'foo'}
model2 = SomeModel(some_enum=SomeEnum.BAR, another_enum=SomeEnum.BAZ)
print(model2.model_dump())
#> {'some_enum': 'bar', 'another_enum': 'baz'}
```
"""
validate_assignment: bool
@@ -171,7 +251,7 @@ class ConfigDict(TypedDict, total=False):
In case the user changes the data after the model is created, the model is _not_ revalidated.
```py
```python
from pydantic import BaseModel
class User(BaseModel):
@@ -190,7 +270,7 @@ class ConfigDict(TypedDict, total=False):
In case you want to revalidate the model when the data is changed, you can use `validate_assignment=True`:
```py
```python
from pydantic import BaseModel, ValidationError
class User(BaseModel, validate_assignment=True): # (1)!
@@ -219,7 +299,7 @@ class ConfigDict(TypedDict, total=False):
"""
Whether arbitrary types are allowed for field types. Defaults to `False`.
```py
```python
from pydantic import BaseModel, ConfigDict, ValidationError
# This is not a pydantic model, it's an arbitrary class
@@ -278,14 +358,20 @@ class ConfigDict(TypedDict, total=False):
loc_by_alias: bool
"""Whether to use the actual key provided in the data (e.g. alias) for error `loc`s rather than the field's name. Defaults to `True`."""
alias_generator: Callable[[str], str] | None
alias_generator: Callable[[str], str] | AliasGenerator | None
"""
A callable that takes a field name and returns an alias for it.
A callable that takes a field name and returns an alias for it
or an instance of [`AliasGenerator`][pydantic.aliases.AliasGenerator]. Defaults to `None`.
When using a callable, the alias generator is used for both validation and serialization.
If you want to use different alias generators for validation and serialization, you can use
[`AliasGenerator`][pydantic.aliases.AliasGenerator] instead.
If data source field names do not match your code style (e. g. CamelCase fields),
you can automatically generate aliases using `alias_generator`:
you can automatically generate aliases using `alias_generator`. Here's an example with
a basic callable:
```py
```python
from pydantic import BaseModel, ConfigDict
from pydantic.alias_generators import to_pascal
@@ -302,6 +388,30 @@ class ConfigDict(TypedDict, total=False):
#> {'Name': 'Filiz', 'LanguageCode': 'tr-TR'}
```
If you want to use different alias generators for validation and serialization, you can use
[`AliasGenerator`][pydantic.aliases.AliasGenerator].
```python
from pydantic import AliasGenerator, BaseModel, ConfigDict
from pydantic.alias_generators import to_camel, to_pascal
class Athlete(BaseModel):
first_name: str
last_name: str
sport: str
model_config = ConfigDict(
alias_generator=AliasGenerator(
validation_alias=to_camel,
serialization_alias=to_pascal,
)
)
athlete = Athlete(firstName='John', lastName='Doe', sport='track')
print(athlete.model_dump(by_alias=True))
#> {'FirstName': 'John', 'LastName': 'Doe', 'Sport': 'track'}
```
Note:
Pydantic offers three built-in alias generators: [`to_pascal`][pydantic.alias_generators.to_pascal],
[`to_camel`][pydantic.alias_generators.to_camel], and [`to_snake`][pydantic.alias_generators.to_snake].
@@ -315,9 +425,9 @@ class ConfigDict(TypedDict, total=False):
"""
allow_inf_nan: bool
"""Whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields. Defaults to `True`."""
"""Whether to allow infinity (`+inf` an `-inf`) and NaN values to float and decimal fields. Defaults to `True`."""
json_schema_extra: dict[str, object] | JsonSchemaExtraCallable | None
json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
"""A dict or callable to provide extra JSON schema properties. Defaults to `None`."""
json_encoders: dict[type[object], JsonEncoder] | None
@@ -342,7 +452,7 @@ class ConfigDict(TypedDict, total=False):
To configure strict mode for all fields on a model, you can set `strict=True` on the model.
```py
```python
from pydantic import BaseModel, ConfigDict
class Model(BaseModel):
@@ -370,16 +480,14 @@ class ConfigDict(TypedDict, total=False):
By default, model and dataclass instances are not revalidated during validation.
```py
from typing import List
```python
from pydantic import BaseModel
class User(BaseModel, revalidate_instances='never'): # (1)!
hobbies: List[str]
hobbies: list[str]
class SubUser(User):
sins: List[str]
sins: list[str]
class Transaction(BaseModel):
user: User
@@ -407,16 +515,14 @@ class ConfigDict(TypedDict, total=False):
If you want to revalidate instances during validation, you can set `revalidate_instances` to `'always'`
in the model's config.
```py
from typing import List
```python
from pydantic import BaseModel, ValidationError
class User(BaseModel, revalidate_instances='always'): # (1)!
hobbies: List[str]
hobbies: list[str]
class SubUser(User):
sins: List[str]
sins: list[str]
class Transaction(BaseModel):
user: User
@@ -450,16 +556,14 @@ class ConfigDict(TypedDict, total=False):
It's also possible to set `revalidate_instances` to `'subclass-instances'` to only revalidate instances
of subclasses of the model.
```py
from typing import List
```python
from pydantic import BaseModel
class User(BaseModel, revalidate_instances='subclass-instances'): # (1)!
hobbies: List[str]
hobbies: list[str]
class SubUser(User):
sins: List[str]
sins: list[str]
class Transaction(BaseModel):
user: User
@@ -494,13 +598,33 @@ class ConfigDict(TypedDict, total=False):
- `'float'` will serialize timedeltas to the total number of seconds.
"""
ser_json_bytes: Literal['utf8', 'base64']
ser_json_bytes: Literal['utf8', 'base64', 'hex']
"""
The encoding of JSON serialized bytes. Accepts the string values of `'utf8'` and `'base64'`.
Defaults to `'utf8'`.
The encoding of JSON serialized bytes. Defaults to `'utf8'`.
Set equal to `val_json_bytes` to get back an equal value after serialization round trip.
- `'utf8'` will serialize bytes to UTF-8 strings.
- `'base64'` will serialize bytes to URL safe base64 strings.
- `'hex'` will serialize bytes to hexadecimal strings.
"""
val_json_bytes: Literal['utf8', 'base64', 'hex']
"""
The encoding of JSON serialized bytes to decode. Defaults to `'utf8'`.
Set equal to `ser_json_bytes` to get back an equal value after serialization round trip.
- `'utf8'` will deserialize UTF-8 strings to bytes.
- `'base64'` will deserialize URL safe base64 strings to bytes.
- `'hex'` will deserialize hexadecimal strings to bytes.
"""
ser_json_inf_nan: Literal['null', 'constants', 'strings']
"""
The encoding of JSON serialized infinity and NaN float values. Defaults to `'null'`.
- `'null'` will serialize infinity and NaN values as `null`.
- `'constants'` will serialize infinity and NaN values as `Infinity` and `NaN`.
- `'strings'` will serialize infinity as string `"Infinity"` and NaN as string `"NaN"`.
"""
# whether to validate default values during validation, default False
@@ -508,17 +632,26 @@ class ConfigDict(TypedDict, total=False):
"""Whether to validate default values during validation. Defaults to `False`."""
validate_return: bool
"""whether to validate the return value from call validators. Defaults to `False`."""
"""Whether to validate the return value from call validators. Defaults to `False`."""
protected_namespaces: tuple[str, ...]
protected_namespaces: tuple[str | Pattern[str], ...]
"""
A `tuple` of strings that prevent model to have field which conflict with them.
Defaults to `('model_', )`).
A `tuple` of strings and/or patterns that prevent models from having fields with names that conflict with them.
For strings, we match on a prefix basis. Ex, if 'dog' is in the protected namespace, 'dog_name' will be protected.
For patterns, we match on the entire field name. Ex, if `re.compile(r'^dog$')` is in the protected namespace, 'dog' will be protected, but 'dog_name' will not be.
Defaults to `('model_validate', 'model_dump',)`.
Pydantic prevents collisions between model attributes and `BaseModel`'s own methods by
namespacing them with the prefix `model_`.
The reason we've selected these is to prevent collisions with other validation / dumping formats
in the future - ex, `model_validate_{some_newly_supported_format}`.
```py
Before v2.10, Pydantic used `('model_',)` as the default value for this setting to
prevent collisions between model attributes and `BaseModel`'s own methods. This was changed
in v2.10 given feedback that this restriction was limiting in AI and data science contexts,
where it is common to have fields with names like `model_id`, `model_input`, `model_output`, etc.
For more details, see https://github.com/pydantic/pydantic/issues/10315.
```python
import warnings
from pydantic import BaseModel
@@ -528,56 +661,65 @@ class ConfigDict(TypedDict, total=False):
try:
class Model(BaseModel):
model_prefixed_field: str
model_dump_something: str
except UserWarning as e:
print(e)
'''
Field "model_prefixed_field" has conflict with protected namespace "model_".
Field "model_dump_something" in Model has conflict with protected namespace "model_dump".
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ()`.
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('model_validate',)`.
'''
```
You can customize this behavior using the `protected_namespaces` setting:
```py
```python {test="skip"}
import re
import warnings
from pydantic import BaseModel, ConfigDict
warnings.filterwarnings('error') # Raise warnings as errors
try:
with warnings.catch_warnings(record=True) as caught_warnings:
warnings.simplefilter('always') # Catch all warnings
class Model(BaseModel):
model_prefixed_field: str
safe_field: str
also_protect_field: str
protect_this: str
model_config = ConfigDict(
protected_namespaces=('protect_me_', 'also_protect_')
protected_namespaces=(
'protect_me_',
'also_protect_',
re.compile('^protect_this$'),
)
)
except UserWarning as e:
print(e)
for warning in caught_warnings:
print(f'{warning.message}')
'''
Field "also_protect_field" has conflict with protected namespace "also_protect_".
Field "also_protect_field" in Model has conflict with protected namespace "also_protect_".
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_', re.compile('^protect_this$'))`.
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_',)`.
Field "protect_this" in Model has conflict with protected namespace "re.compile('^protect_this$')".
You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_', 'also_protect_')`.
'''
```
While Pydantic will only emit a warning when an item is in a protected namespace but does not actually have a collision,
an error _is_ raised if there is an actual collision with an existing attribute:
```py
from pydantic import BaseModel
```python
from pydantic import BaseModel, ConfigDict
try:
class Model(BaseModel):
model_validate: str
model_config = ConfigDict(protected_namespaces=('model_',))
except NameError as e:
print(e)
'''
@@ -592,7 +734,7 @@ class ConfigDict(TypedDict, total=False):
Pydantic shows the input value and type when it raises `ValidationError` during the validation.
```py
```python
from pydantic import BaseModel, ValidationError
class Model(BaseModel):
@@ -611,7 +753,7 @@ class ConfigDict(TypedDict, total=False):
You can hide the input value and type by setting the `hide_input_in_errors` config to `True`.
```py
```python
from pydantic import BaseModel, ConfigDict, ValidationError
class Model(BaseModel):
@@ -632,27 +774,26 @@ class ConfigDict(TypedDict, total=False):
defer_build: bool
"""
Whether to defer model validator and serializer construction until the first model validation.
Whether to defer model validator and serializer construction until the first model validation. Defaults to False.
This can be useful to avoid the overhead of building models which are only
used nested within other models, or when you want to manually define type namespace via
[`Model.model_rebuild(_types_namespace=...)`][pydantic.BaseModel.model_rebuild]. Defaults to False.
[`Model.model_rebuild(_types_namespace=...)`][pydantic.BaseModel.model_rebuild].
Since v2.10, this setting also applies to pydantic dataclasses and TypeAdapter instances.
"""
plugin_settings: dict[str, object] | None
"""A `dict` of settings for plugins. Defaults to `None`.
See [Pydantic Plugins](../concepts/plugins.md) for details.
"""
"""A `dict` of settings for plugins. Defaults to `None`."""
schema_generator: type[_GenerateSchema] | None
"""
A custom core schema generator class to use when generating JSON schemas.
Useful if you want to change the way types are validated across an entire model/schema. Defaults to `None`.
!!! warning
`schema_generator` is deprecated in v2.10.
The `GenerateSchema` interface is subject to change, currently only the `string_schema` method is public.
See [#6737](https://github.com/pydantic/pydantic/pull/6737) for details.
Prior to v2.10, this setting was advertised as highly subject to change.
It's possible that this interface may once again become public once the internal core schema generation
API is more stable, but that will likely come after significant performance improvements have been made.
"""
json_schema_serialization_defaults_required: bool
@@ -666,7 +807,7 @@ class ConfigDict(TypedDict, total=False):
between validation and serialization, and don't mind fields with defaults being marked as not required during
serialization. See [#7209](https://github.com/pydantic/pydantic/issues/7209) for more details.
```py
```python
from pydantic import BaseModel, ConfigDict
class Model(BaseModel):
@@ -709,7 +850,7 @@ class ConfigDict(TypedDict, total=False):
the validation and serialization schemas (since both will use the specified schema), and so prevents the suffixes
from being added to the definition references.
```py
```python
from pydantic import BaseModel, ConfigDict, Json
class Model(BaseModel):
@@ -755,7 +896,7 @@ class ConfigDict(TypedDict, total=False):
Pydantic doesn't allow number types (`int`, `float`, `Decimal`) to be coerced as type `str` by default.
```py
```python
from decimal import Decimal
from pydantic import BaseModel, ConfigDict, ValidationError
@@ -787,5 +928,286 @@ class ConfigDict(TypedDict, total=False):
```
"""
regex_engine: Literal['rust-regex', 'python-re']
"""
The regex engine to be used for pattern validation.
Defaults to `'rust-regex'`.
- `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust crate,
which is non-backtracking and therefore more DDoS resistant, but does not support all regex features.
- `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module,
which supports all regex features, but may be slower.
!!! note
If you use a compiled regex pattern, the python-re engine will be used regardless of this setting.
This is so that flags such as `re.IGNORECASE` are respected.
```python
from pydantic import BaseModel, ConfigDict, Field, ValidationError
class Model(BaseModel):
model_config = ConfigDict(regex_engine='python-re')
value: str = Field(pattern=r'^abc(?=def)')
print(Model(value='abcdef').value)
#> abcdef
try:
print(Model(value='abxyzcdef'))
except ValidationError as e:
print(e)
'''
1 validation error for Model
value
String should match pattern '^abc(?=def)' [type=string_pattern_mismatch, input_value='abxyzcdef', input_type=str]
'''
```
"""
validation_error_cause: bool
"""
If `True`, Python exceptions that were part of a validation failure will be shown as an exception group as a cause. Can be useful for debugging. Defaults to `False`.
Note:
Python 3.10 and older don't support exception groups natively. <=3.10, backport must be installed: `pip install exceptiongroup`.
Note:
The structure of validation errors are likely to change in future Pydantic versions. Pydantic offers no guarantees about their structure. Should be used for visual traceback debugging only.
"""
use_attribute_docstrings: bool
'''
Whether docstrings of attributes (bare string literals immediately following the attribute declaration)
should be used for field descriptions. Defaults to `False`.
Available in Pydantic v2.7+.
```python
from pydantic import BaseModel, ConfigDict, Field
class Model(BaseModel):
model_config = ConfigDict(use_attribute_docstrings=True)
x: str
"""
Example of an attribute docstring
"""
y: int = Field(description="Description in Field")
"""
Description in Field overrides attribute docstring
"""
print(Model.model_fields["x"].description)
# > Example of an attribute docstring
print(Model.model_fields["y"].description)
# > Description in Field
```
This requires the source code of the class to be available at runtime.
!!! warning "Usage with `TypedDict` and stdlib dataclasses"
Due to current limitations, attribute docstrings detection may not work as expected when using
[`TypedDict`][typing.TypedDict] and stdlib dataclasses, in particular when:
- inheritance is being used.
- multiple classes have the same name in the same source file.
'''
cache_strings: bool | Literal['all', 'keys', 'none']
"""
Whether to cache strings to avoid constructing new Python objects. Defaults to True.
Enabling this setting should significantly improve validation performance while increasing memory usage slightly.
- `True` or `'all'` (the default): cache all strings
- `'keys'`: cache only dictionary keys
- `False` or `'none'`: no caching
!!! note
`True` or `'all'` is required to cache strings during general validation because
validators don't know if they're in a key or a value.
!!! tip
If repeated strings are rare, it's recommended to use `'keys'` or `'none'` to reduce memory usage,
as the performance difference is minimal if repeated strings are rare.
"""
validate_by_alias: bool
"""
Whether an aliased field may be populated by its alias. Defaults to `True`.
!!! note
In v2.11, `validate_by_alias` was introduced in conjunction with [`validate_by_name`][pydantic.ConfigDict.validate_by_name]
to empower users with more fine grained validation control. In <v2.11, disabling validation by alias was not possible.
Here's an example of disabling validation by alias:
```py
from pydantic import BaseModel, ConfigDict, Field
class Model(BaseModel):
model_config = ConfigDict(validate_by_name=True, validate_by_alias=False)
my_field: str = Field(validation_alias='my_alias') # (1)!
m = Model(my_field='foo') # (2)!
print(m)
#> my_field='foo'
```
1. The field `'my_field'` has an alias `'my_alias'`.
2. The model can only be populated by the attribute name `'my_field'`.
!!! warning
You cannot set both `validate_by_alias` and `validate_by_name` to `False`.
This would make it impossible to populate an attribute.
See [usage errors](../errors/usage_errors.md#validate-by-alias-and-name-false) for an example.
If you set `validate_by_alias` to `False`, under the hood, Pydantic dynamically sets
`validate_by_name` to `True` to ensure that validation can still occur.
"""
validate_by_name: bool
"""
Whether an aliased field may be populated by its name as given by the model
attribute. Defaults to `False`.
!!! note
In v2.0-v2.10, the `populate_by_name` configuration setting was used to specify
whether or not a field could be populated by its name **and** alias.
In v2.11, `validate_by_name` was introduced in conjunction with [`validate_by_alias`][pydantic.ConfigDict.validate_by_alias]
to empower users with more fine grained validation behavior control.
```python
from pydantic import BaseModel, ConfigDict, Field
class Model(BaseModel):
model_config = ConfigDict(validate_by_name=True, validate_by_alias=True)
my_field: str = Field(validation_alias='my_alias') # (1)!
m = Model(my_alias='foo') # (2)!
print(m)
#> my_field='foo'
m = Model(my_field='foo') # (3)!
print(m)
#> my_field='foo'
```
1. The field `'my_field'` has an alias `'my_alias'`.
2. The model is populated by the alias `'my_alias'`.
3. The model is populated by the attribute name `'my_field'`.
!!! warning
You cannot set both `validate_by_alias` and `validate_by_name` to `False`.
This would make it impossible to populate an attribute.
See [usage errors](../errors/usage_errors.md#validate-by-alias-and-name-false) for an example.
"""
serialize_by_alias: bool
"""
Whether an aliased field should be serialized by its alias. Defaults to `False`.
Note: In v2.11, `serialize_by_alias` was introduced to address the
[popular request](https://github.com/pydantic/pydantic/issues/8379)
for consistency with alias behavior for validation and serialization settings.
In v3, the default value is expected to change to `True` for consistency with the validation default.
```python
from pydantic import BaseModel, ConfigDict, Field
class Model(BaseModel):
model_config = ConfigDict(serialize_by_alias=True)
my_field: str = Field(serialization_alias='my_alias') # (1)!
m = Model(my_field='foo')
print(m.model_dump()) # (2)!
#> {'my_alias': 'foo'}
```
1. The field `'my_field'` has an alias `'my_alias'`.
2. The model is serialized using the alias `'my_alias'` for the `'my_field'` attribute.
"""
_TypeT = TypeVar('_TypeT', bound=type)
@overload
@deprecated('Passing `config` as a keyword argument is deprecated. Pass `config` as a positional argument instead.')
def with_config(*, config: ConfigDict) -> Callable[[_TypeT], _TypeT]: ...
@overload
def with_config(config: ConfigDict, /) -> Callable[[_TypeT], _TypeT]: ...
@overload
def with_config(**config: Unpack[ConfigDict]) -> Callable[[_TypeT], _TypeT]: ...
def with_config(config: ConfigDict | None = None, /, **kwargs: Any) -> Callable[[_TypeT], _TypeT]:
"""!!! abstract "Usage Documentation"
[Configuration with other types](../concepts/config.md#configuration-on-other-supported-types)
A convenience decorator to set a [Pydantic configuration](config.md) on a `TypedDict` or a `dataclass` from the standard library.
Although the configuration can be set using the `__pydantic_config__` attribute, it does not play well with type checkers,
especially with `TypedDict`.
!!! example "Usage"
```python
from typing_extensions import TypedDict
from pydantic import ConfigDict, TypeAdapter, with_config
@with_config(ConfigDict(str_to_lower=True))
class TD(TypedDict):
x: str
ta = TypeAdapter(TD)
print(ta.validate_python({'x': 'ABC'}))
#> {'x': 'abc'}
```
"""
if config is not None and kwargs:
raise ValueError('Cannot specify both `config` and keyword arguments')
if len(kwargs) == 1 and (kwargs_conf := kwargs.get('config')) is not None:
warnings.warn(
'Passing `config` as a keyword argument is deprecated. Pass `config` as a positional argument instead',
category=PydanticDeprecatedSince211,
stacklevel=2,
)
final_config = cast(ConfigDict, kwargs_conf)
else:
final_config = config if config is not None else cast(ConfigDict, kwargs)
def inner(class_: _TypeT, /) -> _TypeT:
# Ideally, we would check for `class_` to either be a `TypedDict` or a stdlib dataclass.
# However, the `@with_config` decorator can be applied *after* `@dataclass`. To avoid
# common mistakes, we at least check for `class_` to not be a Pydantic model.
from ._internal._utils import is_model_class
if is_model_class(class_):
raise PydanticUserError(
f'Cannot use `with_config` on {class_.__name__} as it is a Pydantic model',
code='with-config-on-model',
)
class_.__pydantic_config__ = final_config
return class_
return inner
__getattr__ = getattr_migration(__name__)