Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
120 changes: 65 additions & 55 deletions benches/main.rs

Large diffs are not rendered by default.

26 changes: 22 additions & 4 deletions python/pydantic_core/_pydantic_core.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ class SchemaValidator:
context: Any | None = None,
self_instance: Any | None = None,
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
by_alias: bool | None = None,
by_name: bool | None = None,
Comment on lines +100 to +101
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's important that these have the | None specification because we want to be able to detect that a value is unset + enforce a default.

) -> Any:
"""
Validate a Python object against the schema and return the validated object.
Expand All @@ -114,6 +116,8 @@ class SchemaValidator:
allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences
and mappings are ignored.
`'trailing-strings'` means any final unfinished JSON string is included in the result.
by_alias: Whether to use the field's alias when validating against the provided input data.
by_name: Whether to use the field's name when validating against the provided input data.

Raises:
ValidationError: If validation fails.
Expand All @@ -130,6 +134,8 @@ class SchemaValidator:
from_attributes: bool | None = None,
context: Any | None = None,
self_instance: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> bool:
"""
Similar to [`validate_python()`][pydantic_core.SchemaValidator.validate_python] but returns a boolean.
Expand All @@ -148,6 +154,8 @@ class SchemaValidator:
context: Any | None = None,
self_instance: Any | None = None,
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Any:
"""
Validate JSON data directly against the schema and return the validated Python object.
Expand All @@ -168,6 +176,8 @@ class SchemaValidator:
allow_partial: Whether to allow partial validation; if `True` incomplete JSON will be parsed successfully
and errors in the last element of sequences and mappings are ignored.
`'trailing-strings'` means any final unfinished JSON string is included in the result.
by_alias: Whether to use the field's alias when validating against the provided input data.
by_name: Whether to use the field's name when validating against the provided input data.

Raises:
ValidationError: If validation fails or if the JSON data is invalid.
Expand All @@ -183,6 +193,8 @@ class SchemaValidator:
strict: bool | None = None,
context: Any | None = None,
allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Any:
"""
Validate a string against the schema and return the validated Python object.
Expand All @@ -199,6 +211,8 @@ class SchemaValidator:
allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences
and mappings are ignored.
`'trailing-strings'` means any final unfinished JSON string is included in the result.
by_alias: Whether to use the field's alias when validating against the provided input data.
by_name: Whether to use the field's name when validating against the provided input data.

Raises:
ValidationError: If validation fails or if the JSON data is invalid.
Expand All @@ -216,6 +230,8 @@ class SchemaValidator:
strict: bool | None = None,
from_attributes: bool | None = None,
context: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> dict[str, Any] | tuple[dict[str, Any], dict[str, Any] | None, set[str]]:
"""
Validate an assignment to a field on a model.
Expand All @@ -230,6 +246,8 @@ class SchemaValidator:
If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used.
context: The context to use for validation, this is passed to functional validators as
[`info.context`][pydantic_core.core_schema.ValidationInfo.context].
by_alias: Whether to use the field's alias when validating against the provided input data.
by_name: Whether to use the field's name when validating against the provided input data.

Raises:
ValidationError: If validation fails.
Expand Down Expand Up @@ -283,7 +301,7 @@ class SchemaSerializer:
mode: str | None = None,
include: _IncEx | None = None,
exclude: _IncEx | None = None,
by_alias: bool = True,
by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
Expand Down Expand Up @@ -329,7 +347,7 @@ class SchemaSerializer:
indent: int | None = None,
include: _IncEx | None = None,
exclude: _IncEx | None = None,
by_alias: bool = True,
by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
Expand Down Expand Up @@ -374,7 +392,7 @@ def to_json(
indent: int | None = None,
include: _IncEx | None = None,
exclude: _IncEx | None = None,
by_alias: bool = True,
by_alias: bool | None = None,
exclude_none: bool = False,
round_trip: bool = False,
timedelta_mode: Literal['iso8601', 'float'] = 'iso8601',
Expand Down Expand Up @@ -450,7 +468,7 @@ def to_jsonable_python(
*,
include: _IncEx | None = None,
exclude: _IncEx | None = None,
by_alias: bool = True,
by_alias: bool | None = None,
exclude_none: bool = False,
round_trip: bool = False,
timedelta_mode: Literal['iso8601', 'float'] = 'iso8601',
Expand Down
34 changes: 14 additions & 20 deletions python/pydantic_core/core_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,6 @@ class CoreConfig(TypedDict, total=False):
`field_names` to construct error `loc`s. Default is `True`.
revalidate_instances: Whether instances of models and dataclasses should re-validate. Default is 'never'.
validate_default: Whether to validate default values during validation. Default is `False`.
populate_by_name: Whether an aliased field may be populated by its name as given by the model attribute,
as well as the alias. (Replaces 'allow_population_by_field_name' in Pydantic v1.) Default is `False`.
str_max_length: The maximum length for string fields.
str_min_length: The minimum length for string fields.
str_strip_whitespace: Whether to strip whitespace from string fields.
Expand All @@ -74,6 +72,9 @@ class CoreConfig(TypedDict, total=False):
regex_engine: The regex engine to use for regex pattern validation. Default is 'rust-regex'. See `StringSchema`.
cache_strings: Whether to cache strings. Default is `True`, `True` or `'all'` is required to cache strings
during general validation since validators don't know if they're in a key or a value.
validate_by_alias: Whether to use the field's alias when validating against the provided input data. Default is `True`.
validate_by_name: Whether to use the field's name when validating against the provided input data. Default is `False`. Replacement for `populate_by_name`.
serialize_by_alias: Whether to serialize by alias. Default is `False`, expected to change to `True` in V3.
"""

title: str
Expand All @@ -91,7 +92,6 @@ class CoreConfig(TypedDict, total=False):
# whether to validate default values during validation, default False
validate_default: bool
# used on typed-dicts and arguments
populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1
# fields related to string fields only
str_max_length: int
str_min_length: int
Expand All @@ -111,6 +111,9 @@ class CoreConfig(TypedDict, total=False):
coerce_numbers_to_str: bool # default: False
regex_engine: Literal['rust-regex', 'python-re'] # default: 'rust-regex'
cache_strings: Union[bool, Literal['all', 'keys', 'none']] # default: 'True'
validate_by_alias: bool # default: True
validate_by_name: bool # default: False
serialize_by_alias: bool # default: False


IncExCall: TypeAlias = 'set[int | str] | dict[int | str, IncExCall] | None'
Expand Down Expand Up @@ -2888,7 +2891,6 @@ class TypedDictSchema(TypedDict, total=False):
# all these values can be set via config, equivalent fields have `typed_dict_` prefix
extra_behavior: ExtraBehavior
total: bool # default: True
populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We remove this specification off of core schemas (other than arguments) because it can be specified through configuration, and that's how it's practically done in pydantic during schema builds.

ref: str
metadata: dict[str, Any]
serialization: SerSchema
Expand All @@ -2904,7 +2906,6 @@ def typed_dict_schema(
extras_schema: CoreSchema | None = None,
extra_behavior: ExtraBehavior | None = None,
total: bool | None = None,
populate_by_name: bool | None = None,
ref: str | None = None,
metadata: dict[str, Any] | None = None,
serialization: SerSchema | None = None,
Expand Down Expand Up @@ -2938,7 +2939,6 @@ class MyTypedDict(TypedDict):
metadata: Any other information you want to include with the schema, not used by pydantic-core
extra_behavior: The extra behavior to use for the typed dict
total: Whether the typed dict is total, otherwise uses `typed_dict_total` from config
populate_by_name: Whether the typed dict should populate by name
serialization: Custom serialization schema
"""
return _dict_not_none(
Expand All @@ -2950,7 +2950,6 @@ class MyTypedDict(TypedDict):
extras_schema=extras_schema,
extra_behavior=extra_behavior,
total=total,
populate_by_name=populate_by_name,
ref=ref,
metadata=metadata,
serialization=serialization,
Expand Down Expand Up @@ -3012,9 +3011,7 @@ class ModelFieldsSchema(TypedDict, total=False):
computed_fields: list[ComputedField]
strict: bool
extras_schema: CoreSchema
# all these values can be set via config, equivalent fields have `typed_dict_` prefix
extra_behavior: ExtraBehavior
populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1
from_attributes: bool
ref: str
metadata: dict[str, Any]
Expand All @@ -3029,7 +3026,6 @@ def model_fields_schema(
strict: bool | None = None,
extras_schema: CoreSchema | None = None,
extra_behavior: ExtraBehavior | None = None,
populate_by_name: bool | None = None,
from_attributes: bool | None = None,
ref: str | None = None,
metadata: dict[str, Any] | None = None,
Expand Down Expand Up @@ -3058,7 +3054,6 @@ def model_fields_schema(
ref: optional unique identifier of the schema, used to reference the schema in other places
metadata: Any other information you want to include with the schema, not used by pydantic-core
extra_behavior: The extra behavior to use for the typed dict
populate_by_name: Whether the typed dict should populate by name
from_attributes: Whether the typed dict should be populated from attributes
serialization: Custom serialization schema
"""
Expand All @@ -3070,7 +3065,6 @@ def model_fields_schema(
strict=strict,
extras_schema=extras_schema,
extra_behavior=extra_behavior,
populate_by_name=populate_by_name,
from_attributes=from_attributes,
ref=ref,
metadata=metadata,
Expand Down Expand Up @@ -3254,7 +3248,6 @@ class DataclassArgsSchema(TypedDict, total=False):
dataclass_name: Required[str]
fields: Required[list[DataclassField]]
computed_fields: list[ComputedField]
populate_by_name: bool # default: False
collect_init_only: bool # default: False
ref: str
metadata: dict[str, Any]
Expand All @@ -3267,7 +3260,6 @@ def dataclass_args_schema(
fields: list[DataclassField],
*,
computed_fields: list[ComputedField] | None = None,
populate_by_name: bool | None = None,
collect_init_only: bool | None = None,
ref: str | None = None,
metadata: dict[str, Any] | None = None,
Expand Down Expand Up @@ -3295,7 +3287,6 @@ def dataclass_args_schema(
dataclass_name: The name of the dataclass being validated
fields: The fields to use for the dataclass
computed_fields: Computed fields to use when serializing the dataclass
populate_by_name: Whether to populate by name
collect_init_only: Whether to collect init only fields into a dict to pass to `__post_init__`
ref: optional unique identifier of the schema, used to reference the schema in other places
metadata: Any other information you want to include with the schema, not used by pydantic-core
Expand All @@ -3307,7 +3298,6 @@ def dataclass_args_schema(
dataclass_name=dataclass_name,
fields=fields,
computed_fields=computed_fields,
populate_by_name=populate_by_name,
collect_init_only=collect_init_only,
ref=ref,
metadata=metadata,
Expand Down Expand Up @@ -3436,7 +3426,8 @@ def arguments_parameter(
class ArgumentsSchema(TypedDict, total=False):
type: Required[Literal['arguments']]
arguments_schema: Required[list[ArgumentsParameter]]
populate_by_name: bool
validate_by_name: bool
validate_by_alias: bool
var_args_schema: CoreSchema
var_kwargs_mode: VarKwargsMode
var_kwargs_schema: CoreSchema
Expand All @@ -3448,7 +3439,8 @@ class ArgumentsSchema(TypedDict, total=False):
def arguments_schema(
arguments: list[ArgumentsParameter],
*,
populate_by_name: bool | None = None,
validate_by_name: bool | None = None,
validate_by_alias: bool | None = None,
var_args_schema: CoreSchema | None = None,
var_kwargs_mode: VarKwargsMode | None = None,
var_kwargs_schema: CoreSchema | None = None,
Expand All @@ -3475,7 +3467,8 @@ def arguments_schema(

Args:
arguments: The arguments to use for the arguments schema
populate_by_name: Whether to populate by name
validate_by_name: Whether to populate by the parameter names, defaults to `False`.
validate_by_alias: Whether to populate by the parameter aliases, defaults to `True`.
var_args_schema: The variable args schema to use for the arguments schema
var_kwargs_mode: The validation mode to use for variadic keyword arguments. If `'uniform'`, every value of the
keyword arguments will be validated against the `var_kwargs_schema` schema. If `'unpacked-typed-dict'`,
Expand All @@ -3488,7 +3481,8 @@ def arguments_schema(
return _dict_not_none(
type='arguments',
arguments_schema=arguments,
populate_by_name=populate_by_name,
validate_by_name=validate_by_name,
validate_by_alias=validate_by_alias,
var_args_schema=var_args_schema,
var_kwargs_mode=var_kwargs_mode,
var_kwargs_schema=var_kwargs_schema,
Expand Down
2 changes: 1 addition & 1 deletion src/errors/validation_exception.rs
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ impl ValidationError {
let extra = state.extra(
py,
&SerMode::Json,
true,
None,
false,
false,
true,
Expand Down
44 changes: 44 additions & 0 deletions src/lookup_key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -577,3 +577,47 @@ fn py_get_attrs<'py>(obj: &Bound<'py, PyAny>, attr_name: &Py<PyString>) -> PyRes
}
}
}

#[derive(Debug)]
#[allow(clippy::struct_field_names)]
pub struct LookupKeyCollection {
by_name: LookupKey,
by_alias: Option<LookupKey>,
by_alias_then_name: Option<LookupKey>,
}

impl LookupKeyCollection {
pub fn new(py: Python, validation_alias: Option<Bound<'_, PyAny>>, field_name: &str) -> PyResult<Self> {
let by_name = LookupKey::from_string(py, field_name);

if let Some(va) = validation_alias {
let by_alias = Some(LookupKey::from_py(py, &va, None)?);
let by_alias_then_name = Some(LookupKey::from_py(py, &va, Some(field_name))?);
Ok(Self {
by_name,
by_alias,
by_alias_then_name,
})
} else {
Ok(Self {
by_name,
by_alias: None,
by_alias_then_name: None,
})
}
}

pub fn select(&self, validate_by_alias: bool, validate_by_name: bool) -> PyResult<&LookupKey> {
let lookup_key_selection = match (validate_by_alias, validate_by_name) {
(true, true) => self.by_alias_then_name.as_ref().unwrap_or(&self.by_name),
(true, false) => self.by_alias.as_ref().unwrap_or(&self.by_name),
(false, true) => &self.by_name,
(false, false) => {
// Note: we shouldn't hit this branch much, as this is enforced in `pydantic` with a `PydanticUserError`
// at config creation time / validation function call time.
return py_schema_err!("`validate_by_name` and `validate_by_alias` cannot both be set to `False`.");
}
};
Ok(lookup_key_selection)
}
}
6 changes: 4 additions & 2 deletions src/serializers/computed_fields.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl ComputedFields {
exclude: next_exclude.as_ref(),
extra: &field_extra,
};
let key = match extra.by_alias {
let key = match extra.serialize_by_alias_or(computed_field.serialize_by_alias) {
true => computed_field.alias.as_str(),
false => computed_field.property_name.as_str(),
};
Expand All @@ -116,6 +116,7 @@ struct ComputedField {
serializer: CombinedSerializer,
alias: String,
alias_py: Py<PyString>,
serialize_by_alias: Option<bool>,
}

impl ComputedField {
Expand All @@ -139,6 +140,7 @@ impl ComputedField {
serializer,
alias: alias_py.extract()?,
alias_py: alias_py.into(),
serialize_by_alias: config.get_as(intern!(py, "serialize_by_alias"))?,
})
}

Expand All @@ -163,7 +165,7 @@ impl ComputedField {
if extra.exclude_none && value.is_none(py) {
return Ok(());
}
let key = match extra.by_alias {
let key = match extra.serialize_by_alias_or(self.serialize_by_alias) {
true => self.alias_py.bind(py),
false => property_name_py,
};
Expand Down
Loading
Loading