Skip to content

back to Reference (Gold) summary

Reference (Gold): pydantic

Pytest Summary for test tests

status count
passed 5091
skipped 38
xfailed 12
total 5141
collected 5141

Failed pytests:

test_computed_fields.py::test_multiple_references_to_schema[make_typed_dict]

test_computed_fields.py::test_multiple_references_to_schema[make_typed_dict]
model_factory = 

    @pytest.mark.parametrize(
        'model_factory',
        [
            make_base_model,
            pytest.param(
                make_typed_dict,
                marks=pytest.mark.xfail(
                    reason='computed fields do not work with TypedDict yet. See https://github.com/pydantic/pydantic-core/issues/657'
                ),
            ),
            make_dataclass,
        ],
    )
    def test_multiple_references_to_schema(model_factory: Callable[[], Any]) -> None:
        """
        https://github.com/pydantic/pydantic/issues/5980
        """

        model = model_factory()

        ta = TypeAdapter(model)

>       assert ta.dump_python(model()) == {'comp_1': {}, 'comp_2': {}}

tests/test_computed_fields.py:726: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
pydantic/type_adapter.py:142: in wrapped
    return func(self, *args, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
instance = {}

    @_frame_depth(1)
    def dump_python(
        self,
        instance: T,
        /,
        *,
        mode: Literal['json', 'python'] = 'python',
        include: IncEx | None = None,
        exclude: IncEx | None = None,
        by_alias: bool = False,
        exclude_unset: bool = False,
        exclude_defaults: bool = False,
        exclude_none: bool = False,
        round_trip: bool = False,
        warnings: bool | Literal['none', 'warn', 'error'] = True,
        serialize_as_any: bool = False,
        context: dict[str, Any] | None = None,
    ) -> Any:
        """Dump an instance of the adapted type to a Python object.

        Args:
            instance: The Python object to serialize.
            mode: The output format.
            include: Fields to include in the output.
            exclude: Fields to exclude from the output.
            by_alias: Whether to use alias names for field names.
            exclude_unset: Whether to exclude unset fields.
            exclude_defaults: Whether to exclude fields with default values.
            exclude_none: Whether to exclude fields with None values.
            round_trip: Whether to output the serialized data in a way that is compatible with deserialization.
            warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
                "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
            serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
            context: Additional context to pass to the serializer.

        Returns:
            The serialized object.
        """
>       return self.serializer.to_python(
            instance,
            mode=mode,
            by_alias=by_alias,
            include=include,
            exclude=exclude,
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
            round_trip=round_trip,
            warnings=warnings,
            serialize_as_any=serialize_as_any,
            context=context,
        )
E       AttributeError: 'dict' object has no attribute 'comp_1'

pydantic/type_adapter.py:458: AttributeError

test_discriminated_union.py::test_presence_of_discriminator_when_generating_type_adaptor_json_schema_definitions

test_discriminated_union.py::test_presence_of_discriminator_when_generating_type_adaptor_json_schema_definitions
@pytest.mark.xfail(reason='Issue not yet fixed, see: https://github.com/pydantic/pydantic/issues/8271.')
    def test_presence_of_discriminator_when_generating_type_adaptor_json_schema_definitions() -> None:
        class ItemType(str, Enum):
            ITEM1 = 'item1'
            ITEM2 = 'item2'

        class CreateItem1(BaseModel):
            item_type: Annotated[Literal[ItemType.ITEM1], Field(alias='type')]
            id: int

        class CreateItem2(BaseModel):
            item_type: Annotated[Literal[ItemType.ITEM2], Field(alias='type')]
            id: int

        class CreateObjectDto(BaseModel):
            id: int
            items: List[
                Annotated[
                    Union[
                        CreateItem1,
                        CreateItem2,
                    ],
                    Field(discriminator='item_type'),
                ]
            ]

        adaptor = TypeAdapter(
            Annotated[CreateObjectDto, FieldInfo(examples=[{'id': 1, 'items': [{'id': 3, 'type': 'ITEM1'}]}])]
        )

        schema_map, definitions = GenerateJsonSchema().generate_definitions([(adaptor, 'validation', adaptor.core_schema)])
>       assert definitions == {
            'CreateItem1': {
                'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'type': {'const': 'item1', 'title': 'Type'}},
                'required': ['type', 'id'],
                'title': 'CreateItem1',
                'type': 'object',
            },
            'CreateItem2': {
                'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'type': {'const': 'item2', 'title': 'Type'}},
                'required': ['type', 'id'],
                'title': 'CreateItem2',
                'type': 'object',
            },
            'CreateObjectDto': {
                'properties': {
                    'id': {'title': 'Id', 'type': 'integer'},
                    'items': {
                        'items': {
                            'discriminator': {
                                'mapping': {'item1': '#/$defs/CreateItem1', 'item2': '#/$defs/CreateItem2'},
                                'propertyName': 'type',
                            },
                            'oneOf': [{'$ref': '#/$defs/CreateItem1'}, {'$ref': '#/$defs/CreateItem2'}],
                        },
                        'title': 'Items',
                        'type': 'array',
                    },
                },
                'required': ['id', 'items'],
                'title': 'CreateObjectDto',
                'type': 'object',
            },
        }
E       AssertionError: assert {'CreateItem1...e': 'object'}} == {'CreateItem1...e': 'object'}}
E         
E         Differing items:
E         {'CreateItem2': {'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'type': {'const': 'item2', 'enum': ['item2'], 'title': 'Type', 'type': 'string'}}, 'required': ['type', 'id'], 'title': 'CreateItem2', 'type': 'object'}} != {'CreateItem2': {'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'type': {'const': 'item2', 'title': 'Type'}}, 'required': ['type', 'id'], 'title': 'CreateItem2', 'type': 'object'}}
E         {'CreateItem1': {'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'type': {'const': 'item1', 'enum': ['item1'], 'title': 'Type', 't...
E         
E         ...Full output truncated (3 lines hidden), use '-vv' to show

tests/test_discriminated_union.py:1742: AssertionError

test_discriminated_union.py::test_discriminated_union_model_dump_with_nested_class

test_discriminated_union.py::test_discriminated_union_model_dump_with_nested_class
@pytest.mark.xfail(
        reason='model_dump does not properly serialize the discriminator field to string if it is using an Enum. Issue: https://github.com/pydantic/pydantic/issues/9235'
    )
    def test_discriminated_union_model_dump_with_nested_class():
        class SomeEnum(str, Enum):
            CAT = 'cat'
            DOG = 'dog'

        class Dog(BaseModel):
            type: Literal[SomeEnum.DOG] = SomeEnum.DOG
            name: str

        class Cat(BaseModel):
            type: Literal[SomeEnum.CAT] = SomeEnum.CAT
            name: str

        class Yard(BaseModel):
            pet: Union[Dog, Cat] = Field(discriminator='type')

        yard = Yard(pet=Dog(name='Rex'))
        yard_dict = yard.model_dump(mode='json')
        assert isinstance(yard_dict['pet']['type'], str)
>       assert not isinstance(yard_dict['pet']['type'], SomeEnum)
E       AssertionError: assert not True
E        +  where True = isinstance(, )

tests/test_discriminated_union.py:2058: AssertionError

test_generics.py::test_generic_model_as_parameter_to_generic_type_alias

test_generics.py::test_generic_model_as_parameter_to_generic_type_alias
@pytest.mark.xfail(reason='Generic models are not type aliases', raises=TypeError)
    def test_generic_model_as_parameter_to_generic_type_alias() -> None:
        T = TypeVar('T')

        class GenericPydanticModel(BaseModel, Generic[T]):
            x: T

        GenericPydanticModelList = List[GenericPydanticModel[T]]
>       GenericPydanticModelList[int]

tests/test_generics.py:2236: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/root/.local/share/uv/python/cpython-3.12.6-linux-x86_64-gnu/lib/python3.12/typing.py:398: in inner
    return func(*args, **kwds)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = typing.List[tests.test_generics.test_generic_model_as_parameter_to_generic_type_alias..GenericPydanticModel]
args = 

    @_tp_cache
    def __getitem__(self, args):
        # Parameterizes an already-parameterized object.
        #
        # For example, we arrive here doing something like:
        #   T1 = TypeVar('T1')
        #   T2 = TypeVar('T2')
        #   T3 = TypeVar('T3')
        #   class A(Generic[T1]): pass
        #   B = A[T2]  # B is a _GenericAlias
        #   C = B[T3]  # Invokes _GenericAlias.__getitem__
        #
        # We also arrive here when parameterizing a generic `Callable` alias:
        #   T = TypeVar('T')
        #   C = Callable[[T], None]
        #   C[int]  # Invokes _GenericAlias.__getitem__

        if self.__origin__ in (Generic, Protocol):
            # Can't subscript Generic[...] or Protocol[...].
            raise TypeError(f"Cannot subscript already-subscripted {self}")
        if not self.__parameters__:
>           raise TypeError(f"{self} is not a generic class")
E           TypeError: typing.List[tests.test_generics.test_generic_model_as_parameter_to_generic_type_alias..GenericPydanticModel] is not a generic class

/root/.local/share/uv/python/cpython-3.12.6-linux-x86_64-gnu/lib/python3.12/typing.py:1315: TypeError

test_generics.py::test_variadic_generic_init

test_generics.py::test_variadic_generic_init
@pytest.mark.skipif(sys.version_info < (3, 11), reason='requires python 3.11 or higher')
    @pytest.mark.xfail(
        reason='TODO: Variadic generic parametrization is not supported yet;'
        ' Issue: https://github.com/pydantic/pydantic/issues/5804'
    )
    def test_variadic_generic_init():
        class ComponentModel(BaseModel):
            pass

        class Wrench(ComponentModel):
            pass

        class Screwdriver(ComponentModel):
            pass

        ComponentVar = TypeVar('ComponentVar', bound=ComponentModel)
        NumberOfComponents = TypeVarTuple('NumberOfComponents')

>       class VariadicToolbox(BaseModel, Generic[ComponentVar, Unpack[NumberOfComponents]]):

tests/test_generics.py:2497: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
pydantic/_internal/_model_construction.py:205: in __new__
    complete_model_class(
pydantic/_internal/_model_construction.py:534: in complete_model_class
    schema = cls.__get_pydantic_core_schema__(cls, handler)
pydantic/main.py:643: in __get_pydantic_core_schema__
    return handler(source)
pydantic/_internal/_schema_generation_shared.py:83: in __call__
    schema = self._handler(source_type)
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:784: in _generate_schema_inner
    return self._model_schema(obj)
pydantic/_internal/_generate_schema.py:593: in _model_schema
    self._computed_field_schema(d, decorators.field_serializers)
pydantic/_internal/_generate_schema.py:1768: in _computed_field_schema
    return_type_schema = self.generate_schema(return_type)
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:789: in _generate_schema_inner
    return self.match_type(obj)
pydantic/_internal/_generate_schema.py:871: in match_type
    return self._match_generic_type(obj, origin)
pydantic/_internal/_generate_schema.py:897: in _match_generic_type
    return self._tuple_schema(obj)
pydantic/_internal/_generate_schema.py:1460: in _tuple_schema
    return core_schema.tuple_schema([self.generate_schema(param) for param in params])
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:789: in _generate_schema_inner
    return self.match_type(obj)
pydantic/_internal/_generate_schema.py:871: in match_type
    return self._match_generic_type(obj, origin)
pydantic/_internal/_generate_schema.py:919: in _match_generic_type
    return self._unknown_type_schema(obj)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
obj = typing.Unpack[NumberOfComponents]

    def _unknown_type_schema(self, obj: Any) -> CoreSchema:
>       raise PydanticSchemaGenerationError(
            f'Unable to generate pydantic-core schema for {obj!r}. '
            'Set `arbitrary_types_allowed=True` in the model_config to ignore this error'
            ' or implement `__get_pydantic_core_schema__` on your type to fully support it.'
            '\n\nIf you got this error by calling handler() within'
            ' `__get_pydantic_core_schema__` then you likely need to call'
            ' `handler.generate_schema()` since we do not call'
            ' `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.'
        )
E       pydantic.errors.PydanticSchemaGenerationError: Unable to generate pydantic-core schema for typing.Unpack[NumberOfComponents]. Set `arbitrary_types_allowed=True` in the model_config to ignore this error or implement `__get_pydantic_core_schema__` on your type to fully support it.
E       
E       If you got this error by calling handler() within `__get_pydantic_core_schema__` then you likely need to call `handler.generate_schema()` since we do not call `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.
E       
E       For further information visit https://errors.pydantic.dev/2.8/u/schema-for-unknown-type

pydantic/_internal/_generate_schema.py:415: PydanticSchemaGenerationError

test_generics.py::test_variadic_generic_with_variadic_fields

test_generics.py::test_variadic_generic_with_variadic_fields
@pytest.mark.skipif(sys.version_info < (3, 11), reason='requires python 3.11 or higher')
    @pytest.mark.xfail(
        reason='TODO: Variadic fields are not supported yet; Issue: https://github.com/pydantic/pydantic/issues/5804'
    )
    def test_variadic_generic_with_variadic_fields():
        class ComponentModel(BaseModel):
            pass

        class Wrench(ComponentModel):
            pass

        class Screwdriver(ComponentModel):
            pass

        ComponentVar = TypeVar('ComponentVar', bound=ComponentModel)
        NumberOfComponents = TypeVarTuple('NumberOfComponents')

>       class VariadicToolbox(BaseModel, Generic[ComponentVar, Unpack[NumberOfComponents]]):

tests/test_generics.py:2532: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
pydantic/_internal/_model_construction.py:205: in __new__
    complete_model_class(
pydantic/_internal/_model_construction.py:534: in complete_model_class
    schema = cls.__get_pydantic_core_schema__(cls, handler)
pydantic/main.py:643: in __get_pydantic_core_schema__
    return handler(source)
pydantic/_internal/_schema_generation_shared.py:83: in __call__
    schema = self._handler(source_type)
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:784: in _generate_schema_inner
    return self._model_schema(obj)
pydantic/_internal/_generate_schema.py:591: in _model_schema
    {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
pydantic/_internal/_generate_schema.py:947: in _generate_md_field_schema
    common_field = self._common_field_schema(name, field_info, decorators)
pydantic/_internal/_generate_schema.py:1134: in _common_field_schema
    schema = self._apply_annotations(
pydantic/_internal/_generate_schema.py:1890: in _apply_annotations
    schema = get_inner_schema(source_type)
pydantic/_internal/_schema_generation_shared.py:83: in __call__
    schema = self._handler(source_type)
pydantic/_internal/_generate_schema.py:1871: in inner_handler
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:789: in _generate_schema_inner
    return self.match_type(obj)
pydantic/_internal/_generate_schema.py:871: in match_type
    return self._match_generic_type(obj, origin)
pydantic/_internal/_generate_schema.py:895: in _match_generic_type
    return self._union_schema(obj)
pydantic/_internal/_generate_schema.py:1207: in _union_schema
    choices.append(self.generate_schema(arg))
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:789: in _generate_schema_inner
    return self.match_type(obj)
pydantic/_internal/_generate_schema.py:871: in match_type
    return self._match_generic_type(obj, origin)
pydantic/_internal/_generate_schema.py:897: in _match_generic_type
    return self._tuple_schema(obj)
pydantic/_internal/_generate_schema.py:1460: in _tuple_schema
    return core_schema.tuple_schema([self.generate_schema(param) for param in params])
pydantic/_internal/_generate_schema.py:512: in generate_schema
    schema = self._generate_schema_inner(obj)
pydantic/_internal/_generate_schema.py:789: in _generate_schema_inner
    return self.match_type(obj)
pydantic/_internal/_generate_schema.py:871: in match_type
    return self._match_generic_type(obj, origin)
pydantic/_internal/_generate_schema.py:919: in _match_generic_type
    return self._unknown_type_schema(obj)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
obj = typing.Unpack[NumberOfComponents]

    def _unknown_type_schema(self, obj: Any) -> CoreSchema:
>       raise PydanticSchemaGenerationError(
            f'Unable to generate pydantic-core schema for {obj!r}. '
            'Set `arbitrary_types_allowed=True` in the model_config to ignore this error'
            ' or implement `__get_pydantic_core_schema__` on your type to fully support it.'
            '\n\nIf you got this error by calling handler() within'
            ' `__get_pydantic_core_schema__` then you likely need to call'
            ' `handler.generate_schema()` since we do not call'
            ' `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.'
        )
E       pydantic.errors.PydanticSchemaGenerationError: Unable to generate pydantic-core schema for typing.Unpack[NumberOfComponents]. Set `arbitrary_types_allowed=True` in the model_config to ignore this error or implement `__get_pydantic_core_schema__` on your type to fully support it.
E       
E       If you got this error by calling handler() within `__get_pydantic_core_schema__` then you likely need to call `handler.generate_schema()` since we do not call `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.
E       
E       For further information visit https://errors.pydantic.dev/2.8/u/schema-for-unknown-type

pydantic/_internal/_generate_schema.py:415: PydanticSchemaGenerationError

test_json_schema.py::test_get_pydantic_core_schema_calls

test_json_schema.py::test_get_pydantic_core_schema_calls
@pytest.mark.xfail(
        reason=(
            'We are calling __get_pydantic_json_schema__ too many times.'
            ' The second time we analyze a model we get the CoreSchema from __pydantic_core_schema__.'
            ' But then we proceed to append to the metadata json schema functions.'
        )
    )
    def test_get_pydantic_core_schema_calls() -> None:
        """Verify when/how many times `__get_pydantic_core_schema__` gets called"""

        calls: List[str] = []

        class Model(BaseModel):
            @classmethod
            def __get_pydantic_json_schema__(cls, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
                calls.append('Model::before')
                json_schema = handler(schema)
                calls.append('Model::after')
                return json_schema

        schema = Model.model_json_schema()
        expected: JsonSchemaValue = {'type': 'object', 'properties': {}, 'title': 'Model'}

        assert schema == expected
        assert calls == ['Model::before', 'Model::after']

        calls.clear()

        class CustomAnnotation(NamedTuple):
            name: str

            def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
                calls.append(f'CustomAnnotation({self.name})::before')
                json_schema = handler(schema)
                calls.append(f'CustomAnnotation({self.name})::after')
                return json_schema

        AnnotatedType = Annotated[str, CustomAnnotation('foo'), CustomAnnotation('bar')]

        schema = TypeAdapter(AnnotatedType).json_schema()
        expected: JsonSchemaValue = {'type': 'string'}

        assert schema == expected
        assert calls == [
            'CustomAnnotation(bar)::before',
            'CustomAnnotation(foo)::before',
            'CustomAnnotation(foo)::after',
            'CustomAnnotation(bar)::after',
        ]

        calls.clear()

        class OuterModel(BaseModel):
            x: Model

            @classmethod
            def __get_pydantic_json_schema__(cls, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
                calls.append('OuterModel::before')
                json_schema = handler(schema)
                calls.append('OuterModel::after')
                return json_schema

        schema = OuterModel.model_json_schema()
        expected: JsonSchemaValue = {
            'type': 'object',
            'properties': {'x': {'$ref': '#/$defs/Model'}},
            'required': ['x'],
            'title': 'OuterModel',
            '$defs': {'Model': {'type': 'object', 'properties': {}, 'title': 'Model'}},
        }

        assert schema == expected
        assert calls == [
            'OuterModel::before',
            'Model::before',
            'Model::after',
            'OuterModel::after',
        ]

        calls.clear()

        AnnotatedModel = Annotated[Model, CustomAnnotation('foo')]

        schema = TypeAdapter(AnnotatedModel).json_schema()
        expected: JsonSchemaValue = {}

>       assert schema == expected
E       AssertionError: assert {'properties'...pe': 'object'} == {}
E         
E         Left contains 3 more items:
E         {'properties': {}, 'title': 'Model', 'type': 'object'}
E         Use -v to get more diff

tests/test_json_schema.py:4585: AssertionError

test_main.py::test_model_validate_list_strict

test_main.py::test_model_validate_list_strict
@pytest.mark.xfail(
        reason='strict=True in model_validate_json does not overwrite strict=False given in ConfigDict'
        'See issue: https://github.com/pydantic/pydantic/issues/8930'
    )
    def test_model_validate_list_strict() -> None:
        # FIXME: This change must be implemented in pydantic-core. The argument strict=True
        # in model_validate_json method is not overwriting the one set with ConfigDict(strict=False)
        # for sequence like types. See: https://github.com/pydantic/pydantic/issues/8930

        class LaxModel(BaseModel):
            x: List[str]
            model_config = ConfigDict(strict=False)

        assert LaxModel.model_validate_json(json.dumps({'x': ('a', 'b', 'c')}), strict=None) == LaxModel(x=('a', 'b', 'c'))
        assert LaxModel.model_validate_json(json.dumps({'x': ('a', 'b', 'c')}), strict=False) == LaxModel(x=('a', 'b', 'c'))
>       with pytest.raises(ValidationError) as exc_info:
E       Failed: DID NOT RAISE 

tests/test_main.py:2415: Failed

app]

app]
dsn = 'mongodb+srv://user:pass@localhost/app'
expected = 'mongodb+srv://user:pass@localhost/app'

    @pytest.mark.parametrize(
        ('dsn', 'expected'),
        [
            ('mongodb://user:pass@localhost/app', 'mongodb://user:pass@localhost:27017/app'),
            pytest.param(
                'mongodb+srv://user:pass@localhost/app',
                'mongodb+srv://user:pass@localhost/app',
                marks=pytest.mark.xfail(
                    reason=(
                        'This case is not supported. '
                        'Check https://github.com/pydantic/pydantic/pull/7116 for more details.'
                    )
                ),
            ),
        ],
    )
    def test_mongodsn_default_ports(dsn: str, expected: str):
        class Model(BaseModel):
            dsn: MongoDsn

        m = Model(dsn=dsn)
>       assert str(m.dsn) == expected
E       AssertionError: assert 'mongodb+srv:...ost:27017/app' == 'mongodb+srv:...localhost/app'
E         
E         - mongodb+srv://user:pass@localhost/app
E         + mongodb+srv://user:pass@localhost:27017/app
E         ?                                  ++++++

tests/test_networks.py:756: AssertionError

test_type_adapter.py::test_validate_json_strict

test_type_adapter.py::test_validate_json_strict
@pytest.mark.xfail(reason='Need to fix this in https://github.com/pydantic/pydantic/pull/5944')
    def test_validate_json_strict() -> None:
        class Model(TypedDict):
            x: int

        class ModelStrict(Model):
            __pydantic_config__ = ConfigDict(strict=True)  # type: ignore

>       lax_validator = TypeAdapter(Model, config=ConfigDict(strict=False))

tests/test_type_adapter.py:240: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
type = .Model'>

    def __init__(
        self,
        type: Any,
        *,
        config: ConfigDict | None = None,
        _parent_depth: int = 2,
        module: str | None = None,
    ) -> None:
        """Initializes the TypeAdapter object.

        Args:
            type: The type associated with the `TypeAdapter`.
            config: Configuration for the `TypeAdapter`, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict].
            _parent_depth: depth at which to search the parent namespace to construct the local namespace.
            module: The module that passes to plugin if provided.

        !!! note
            You cannot use the `config` argument when instantiating a `TypeAdapter` if the type you're using has its own
            config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A
            [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will be raised in this case.

        !!! note
            The `_parent_depth` argument is named with an underscore to suggest its private nature and discourage use.
            It may be deprecated in a minor version, so we only recommend using it if you're
            comfortable with potential change in behavior / support.

        ??? tip "Compatibility with `mypy`"
            Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly
            annotate your variable:

            
from typing import Union

from pydantic import TypeAdapter

ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int])  # type: ignore[arg-type]
Returns: A type adapter configured for the specified `type`. """ if _type_has_config(type) and config is not None: > raise PydanticUserError( 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.' ' These types can have their own config and setting the config via the `config`' ' parameter to TypeAdapter will not override it, thus the `config` you passed to' ' TypeAdapter becomes meaningless, which is probably not what you want.', code='type-adapter-config-unused', ) E pydantic.errors.PydanticUserError: Cannot use `config` when the type is a BaseModel, dataclass or TypedDict. These types can have their own config and setting the config via the `config` parameter to TypeAdapter will not override it, thus the `config` you passed to TypeAdapter becomes meaningless, which is probably not what you want. E E For further information visit https://errors.pydantic.dev/2.8/u/type-adapter-config-unused pydantic/type_adapter.py:238: PydanticUserError

test_type_alias_type.py::test_field

test_type_alias_type.py::test_field
@pytest.mark.xfail(reason='description is currently dropped')
    def test_field() -> None:
        SomeAlias = TypeAliasType('SomeAlias', Annotated[int, Field(description='number')])

        ta = TypeAdapter(Annotated[SomeAlias, Field(title='abc')])

        # insert_assert(ta.json_schema())
>       assert ta.json_schema() == {
            '$defs': {'SomeAlias': {'type': 'integer', 'description': 'number'}},
            'allOf': [{'$ref': '#/$defs/SomeAlias'}],
            'title': 'abc',
        }
E       AssertionError: assert {'$defs': {'S...title': 'abc'} == {'$defs': {'S...title': 'abc'}
E         
E         Omitting 2 identical items, use -vv to show
E         Differing items:
E         {'$defs': {'SomeAlias': {'type': 'integer'}}} != {'$defs': {'SomeAlias': {'description': 'number', 'type': 'integer'}}}
E         Use -v to get more diff

tests/test_type_alias_type.py:325: AssertionError

test_types.py::test_invalid_schema_constraints[kwargs0-int]

test_types.py::test_invalid_schema_constraints[kwargs0-int]
kwargs = {'pattern': '^foo$'}, type_ = 

    @pytest.mark.parametrize(
        'kwargs,type_',
        [
            pytest.param(
                {'pattern': '^foo$'},
                int,
                marks=pytest.mark.xfail(
                    reason='int cannot be used with pattern but we do not currently validate that at schema build time'
                ),
            ),
            ({'gt': 0}, conlist(int, min_length=4)),
            ({'gt': 0}, conset(int, min_length=4)),
            ({'gt': 0}, confrozenset(int, min_length=4)),
        ],
    )
    def test_invalid_schema_constraints(kwargs, type_):
        match = (
            r'(:?Invalid Schema:\n.*\n  Extra inputs are not permitted)|(:?The following constraints cannot be applied to)'
        )
>       with pytest.raises((SchemaError, TypeError), match=match):
E       Failed: DID NOT RAISE (, )

tests/test_types.py:1803: Failed

Patch diff

diff --git a/pydantic/_internal/_config.py b/pydantic/_internal/_config.py
index 9809c1f3e..d27e3bba8 100644
--- a/pydantic/_internal/_config.py
+++ b/pydantic/_internal/_config.py
@@ -1,26 +1,46 @@
 from __future__ import annotations as _annotations
+
 import warnings
 from contextlib import contextmanager
-from typing import TYPE_CHECKING, Any, Callable, cast
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    cast,
+)
+
 from pydantic_core import core_schema
-from typing_extensions import Literal, Self
+from typing_extensions import (
+    Literal,
+    Self,
+)
+
 from ..aliases import AliasGenerator
 from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable
 from ..errors import PydanticUserError
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 if TYPE_CHECKING:
     from .._internal._schema_generation_shared import GenerateSchema
     from ..fields import ComputedFieldInfo, FieldInfo
-DEPRECATION_MESSAGE = (
-    'Support for class-based `config` is deprecated, use ConfigDict instead.')
+
+DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.'


 class ConfigWrapper:
     """Internal wrapper for Config which exposes ConfigDict items as attributes."""
-    __slots__ = 'config_dict',
+
+    __slots__ = ('config_dict',)
+
     config_dict: ConfigDict
+
+    # all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they
+    # stop matching
     title: str | None
     str_to_lower: bool
     str_to_upper: bool
@@ -34,20 +54,25 @@ class ConfigWrapper:
     validate_assignment: bool
     arbitrary_types_allowed: bool
     from_attributes: bool
+    # whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names
+    # to construct error `loc`s, default `True`
     loc_by_alias: bool
     alias_generator: Callable[[str], str] | AliasGenerator | None
     model_title_generator: Callable[[type], str] | None
-    field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str
-        ] | None
+    field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str] | None
     ignored_types: tuple[type, ...]
     allow_inf_nan: bool
     json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
     json_encoders: dict[type[object], JsonEncoder] | None
+
+    # new in V2
     strict: bool
+    # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
     revalidate_instances: Literal['always', 'never', 'subclass-instances']
     ser_json_timedelta: Literal['iso8601', 'float']
     ser_json_bytes: Literal['utf8', 'base64']
     ser_json_inf_nan: Literal['null', 'constants', 'strings']
+    # whether to validate default values during validation, default False
     validate_default: bool
     validate_return: bool
     protected_namespaces: tuple[str, ...]
@@ -64,16 +89,14 @@ class ConfigWrapper:
     use_attribute_docstrings: bool
     cache_strings: bool | Literal['all', 'keys', 'none']

-    def __init__(self, config: (ConfigDict | dict[str, Any] | type[Any] |
-        None), *, check: bool=True):
+    def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True):
         if check:
             self.config_dict = prepare_config(config)
         else:
             self.config_dict = cast(ConfigDict, config)

     @classmethod
-    def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str,
-        Any], kwargs: dict[str, Any]) ->Self:
+    def for_model(cls, bases: tuple[type[Any], ...], namespace: dict[str, Any], kwargs: dict[str, Any]) -> Self:
         """Build a new `ConfigWrapper` instance for a `BaseModel`.

         The config wrapper built based on (in descending order of priority):
@@ -89,20 +112,48 @@ class ConfigWrapper:
         Returns:
             A `ConfigWrapper` instance for `BaseModel`.
         """
-        pass
-    if not TYPE_CHECKING:
+        config_new = ConfigDict()
+        for base in bases:
+            config = getattr(base, 'model_config', None)
+            if config:
+                config_new.update(config.copy())
+
+        config_class_from_namespace = namespace.get('Config')
+        config_dict_from_namespace = namespace.get('model_config')
+
+        raw_annotations = namespace.get('__annotations__', {})
+        if raw_annotations.get('model_config') and not config_dict_from_namespace:
+            raise PydanticUserError(
+                '`model_config` cannot be used as a model field name. Use `model_config` for model configuration.',
+                code='model-config-invalid-field-name',
+            )
+
+        if config_class_from_namespace and config_dict_from_namespace:
+            raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both')
+
+        config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace)
+
+        config_new.update(config_from_namespace)
+
+        for k in list(kwargs.keys()):
+            if k in config_keys:
+                config_new[k] = kwargs.pop(k)

-        def __getattr__(self, name: str) ->Any:
+        return cls(config_new)
+
+    # we don't show `__getattr__` to type checkers so missing attributes cause errors
+    if not TYPE_CHECKING:  # pragma: no branch
+
+        def __getattr__(self, name: str) -> Any:
             try:
                 return self.config_dict[name]
             except KeyError:
                 try:
                     return config_defaults[name]
                 except KeyError:
-                    raise AttributeError(f'Config has no attribute {name!r}'
-                        ) from None
+                    raise AttributeError(f'Config has no attribute {name!r}') from None

-    def core_config(self, obj: Any) ->core_schema.CoreConfig:
+    def core_config(self, obj: Any) -> core_schema.CoreConfig:
         """Create a pydantic-core config, `obj` is just used to populate `title` if not set in config.

         Pass `obj=None` if you do not want to attempt to infer the `title`.
@@ -115,7 +166,37 @@ class ConfigWrapper:
         Returns:
             A `CoreConfig` object created from config.
         """
-        pass
+
+        def dict_not_none(**kwargs: Any) -> Any:
+            return {k: v for k, v in kwargs.items() if v is not None}
+
+        core_config = core_schema.CoreConfig(
+            **dict_not_none(
+                title=self.config_dict.get('title') or (obj and obj.__name__),
+                extra_fields_behavior=self.config_dict.get('extra'),
+                allow_inf_nan=self.config_dict.get('allow_inf_nan'),
+                populate_by_name=self.config_dict.get('populate_by_name'),
+                str_strip_whitespace=self.config_dict.get('str_strip_whitespace'),
+                str_to_lower=self.config_dict.get('str_to_lower'),
+                str_to_upper=self.config_dict.get('str_to_upper'),
+                strict=self.config_dict.get('strict'),
+                ser_json_timedelta=self.config_dict.get('ser_json_timedelta'),
+                ser_json_bytes=self.config_dict.get('ser_json_bytes'),
+                ser_json_inf_nan=self.config_dict.get('ser_json_inf_nan'),
+                from_attributes=self.config_dict.get('from_attributes'),
+                loc_by_alias=self.config_dict.get('loc_by_alias'),
+                revalidate_instances=self.config_dict.get('revalidate_instances'),
+                validate_default=self.config_dict.get('validate_default'),
+                str_max_length=self.config_dict.get('str_max_length'),
+                str_min_length=self.config_dict.get('str_min_length'),
+                hide_input_in_errors=self.config_dict.get('hide_input_in_errors'),
+                coerce_numbers_to_str=self.config_dict.get('coerce_numbers_to_str'),
+                regex_engine=self.config_dict.get('regex_engine'),
+                validation_error_cause=self.config_dict.get('validation_error_cause'),
+                cache_strings=self.config_dict.get('cache_strings'),
+            )
+        )
+        return core_config

     def __repr__(self):
         c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items())
@@ -128,27 +209,73 @@ class ConfigWrapperStack:
     def __init__(self, config_wrapper: ConfigWrapper):
         self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper]

+    @property
+    def tail(self) -> ConfigWrapper:
+        return self._config_wrapper_stack[-1]
+
+    @contextmanager
+    def push(self, config_wrapper: ConfigWrapper | ConfigDict | None):
+        if config_wrapper is None:
+            yield
+            return
+
+        if not isinstance(config_wrapper, ConfigWrapper):
+            config_wrapper = ConfigWrapper(config_wrapper, check=False)
+
+        self._config_wrapper_stack.append(config_wrapper)
+        try:
+            yield
+        finally:
+            self._config_wrapper_stack.pop()
+

-config_defaults = ConfigDict(title=None, str_to_lower=False, str_to_upper=
-    False, str_strip_whitespace=False, str_min_length=0, str_max_length=
-    None, extra=None, frozen=False, populate_by_name=False, use_enum_values
-    =False, validate_assignment=False, arbitrary_types_allowed=False,
-    from_attributes=False, loc_by_alias=True, alias_generator=None,
-    model_title_generator=None, field_title_generator=None, ignored_types=(
-    ), allow_inf_nan=True, json_schema_extra=None, strict=False,
-    revalidate_instances='never', ser_json_timedelta='iso8601',
-    ser_json_bytes='utf8', ser_json_inf_nan='null', validate_default=False,
-    validate_return=False, protected_namespaces=('model_',),
-    hide_input_in_errors=False, json_encoders=None, defer_build=False,
-    experimental_defer_build_mode=('model',), plugin_settings=None,
-    schema_generator=None, json_schema_serialization_defaults_required=
-    False, json_schema_mode_override=None, coerce_numbers_to_str=False,
-    regex_engine='rust-regex', validation_error_cause=False,
-    use_attribute_docstrings=False, cache_strings=True)
-
-
-def prepare_config(config: (ConfigDict | dict[str, Any] | type[Any] | None)
-    ) ->ConfigDict:
+config_defaults = ConfigDict(
+    title=None,
+    str_to_lower=False,
+    str_to_upper=False,
+    str_strip_whitespace=False,
+    str_min_length=0,
+    str_max_length=None,
+    # let the model / dataclass decide how to handle it
+    extra=None,
+    frozen=False,
+    populate_by_name=False,
+    use_enum_values=False,
+    validate_assignment=False,
+    arbitrary_types_allowed=False,
+    from_attributes=False,
+    loc_by_alias=True,
+    alias_generator=None,
+    model_title_generator=None,
+    field_title_generator=None,
+    ignored_types=(),
+    allow_inf_nan=True,
+    json_schema_extra=None,
+    strict=False,
+    revalidate_instances='never',
+    ser_json_timedelta='iso8601',
+    ser_json_bytes='utf8',
+    ser_json_inf_nan='null',
+    validate_default=False,
+    validate_return=False,
+    protected_namespaces=('model_',),
+    hide_input_in_errors=False,
+    json_encoders=None,
+    defer_build=False,
+    experimental_defer_build_mode=('model',),
+    plugin_settings=None,
+    schema_generator=None,
+    json_schema_serialization_defaults_required=False,
+    json_schema_mode_override=None,
+    coerce_numbers_to_str=False,
+    regex_engine='rust-regex',
+    validation_error_cause=False,
+    use_attribute_docstrings=False,
+    cache_strings=True,
+)
+
+
+def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict:
     """Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None.

     Args:
@@ -157,26 +284,58 @@ def prepare_config(config: (ConfigDict | dict[str, Any] | type[Any] | None)
     Returns:
         A ConfigDict object created from config.
     """
-    pass
+    if config is None:
+        return ConfigDict()
+
+    if not isinstance(config, dict):
+        warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
+        config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
+
+    config_dict = cast(ConfigDict, config)
+    check_deprecated(config_dict)
+    return config_dict


 config_keys = set(ConfigDict.__annotations__.keys())
-V2_REMOVED_KEYS = {'allow_mutation', 'error_msg_templates', 'fields',
-    'getter_dict', 'smart_union', 'underscore_attrs_are_private',
-    'json_loads', 'json_dumps', 'copy_on_model_validation', 'post_init_call'}
-V2_RENAMED_KEYS = {'allow_population_by_field_name': 'populate_by_name',
-    'anystr_lower': 'str_to_lower', 'anystr_strip_whitespace':
-    'str_strip_whitespace', 'anystr_upper': 'str_to_upper',
-    'keep_untouched': 'ignored_types', 'max_anystr_length':
-    'str_max_length', 'min_anystr_length': 'str_min_length', 'orm_mode':
-    'from_attributes', 'schema_extra': 'json_schema_extra', 'validate_all':
-    'validate_default'}
-
-
-def check_deprecated(config_dict: ConfigDict) ->None:
+
+
+V2_REMOVED_KEYS = {
+    'allow_mutation',
+    'error_msg_templates',
+    'fields',
+    'getter_dict',
+    'smart_union',
+    'underscore_attrs_are_private',
+    'json_loads',
+    'json_dumps',
+    'copy_on_model_validation',
+    'post_init_call',
+}
+V2_RENAMED_KEYS = {
+    'allow_population_by_field_name': 'populate_by_name',
+    'anystr_lower': 'str_to_lower',
+    'anystr_strip_whitespace': 'str_strip_whitespace',
+    'anystr_upper': 'str_to_upper',
+    'keep_untouched': 'ignored_types',
+    'max_anystr_length': 'str_max_length',
+    'min_anystr_length': 'str_min_length',
+    'orm_mode': 'from_attributes',
+    'schema_extra': 'json_schema_extra',
+    'validate_all': 'validate_default',
+}
+
+
+def check_deprecated(config_dict: ConfigDict) -> None:
     """Check for deprecated config keys and warn the user.

     Args:
         config_dict: The input config.
     """
-    pass
+    deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys()
+    deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys()
+    if deprecated_removed_keys or deprecated_renamed_keys:
+        renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)}
+        renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()]
+        removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)]
+        message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets)
+        warnings.warn(message, UserWarning)
diff --git a/pydantic/_internal/_core_metadata.py b/pydantic/_internal/_core_metadata.py
index e9361dbe5..296d49f59 100644
--- a/pydantic/_internal/_core_metadata.py
+++ b/pydantic/_internal/_core_metadata.py
@@ -1,13 +1,20 @@
 from __future__ import annotations as _annotations
+
 import typing
 from typing import Any
+
 import typing_extensions
+
 if typing.TYPE_CHECKING:
-    from ._schema_generation_shared import CoreSchemaOrField as CoreSchemaOrField
-    from ._schema_generation_shared import GetJsonSchemaFunction
+    from ._schema_generation_shared import (
+        CoreSchemaOrField as CoreSchemaOrField,
+    )
+    from ._schema_generation_shared import (
+        GetJsonSchemaFunction,
+    )


-class CoreMetadata(typing_extensions.TypedDict, total=(False)):
+class CoreMetadata(typing_extensions.TypedDict, total=False):
     """A `TypedDict` for holding the metadata dict of the schema.

     Attributes:
@@ -15,10 +22,15 @@ class CoreMetadata(typing_extensions.TypedDict, total=(False)):
         pydantic_js_prefer_positional_arguments: Whether JSON schema generator will
             prefer positional over keyword arguments for an 'arguments' schema.
     """
+
     pydantic_js_functions: list[GetJsonSchemaFunction]
     pydantic_js_annotation_functions: list[GetJsonSchemaFunction]
+
+    # If `pydantic_js_prefer_positional_arguments` is True, the JSON schema generator will
+    # prefer positional over keyword arguments for an 'arguments' schema.
     pydantic_js_prefer_positional_arguments: bool | None
-    pydantic_typed_dict_cls: type[Any] | None
+
+    pydantic_typed_dict_cls: type[Any] | None  # TODO: Consider moving this into the pydantic-core TypedDictSchema


 class CoreMetadataHandler:
@@ -27,31 +39,54 @@ class CoreMetadataHandler:
     This class is used to interact with the metadata field on a CoreSchema object in a consistent
     way throughout pydantic.
     """
-    __slots__ = '_schema',
+
+    __slots__ = ('_schema',)

     def __init__(self, schema: CoreSchemaOrField):
         self._schema = schema
+
         metadata = schema.get('metadata')
         if metadata is None:
             schema['metadata'] = CoreMetadata()
         elif not isinstance(metadata, dict):
-            raise TypeError(
-                f'CoreSchema metadata should be a dict; got {metadata!r}.')
+            raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')

     @property
-    def metadata(self) ->CoreMetadata:
+    def metadata(self) -> CoreMetadata:
         """Retrieves the metadata dict from the schema, initializing it to a dict if it is None
         and raises an error if it is not a dict.
         """
-        pass
+        metadata = self._schema.get('metadata')
+        if metadata is None:
+            self._schema['metadata'] = metadata = CoreMetadata()
+        if not isinstance(metadata, dict):
+            raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
+        return metadata


-def build_metadata_dict(*, js_functions: (list[GetJsonSchemaFunction] |
-    None)=None, js_annotation_functions: (list[GetJsonSchemaFunction] |
-    None)=None, js_prefer_positional_arguments: (bool | None)=None,
-    typed_dict_cls: (type[Any] | None)=None, initial_metadata: (Any | None)
-    =None) ->Any:
+def build_metadata_dict(
+    *,  # force keyword arguments to make it easier to modify this signature in a backwards-compatible way
+    js_functions: list[GetJsonSchemaFunction] | None = None,
+    js_annotation_functions: list[GetJsonSchemaFunction] | None = None,
+    js_prefer_positional_arguments: bool | None = None,
+    typed_dict_cls: type[Any] | None = None,
+    initial_metadata: Any | None = None,
+) -> Any:
     """Builds a dict to use as the metadata field of a CoreSchema object in a manner that is consistent
     with the CoreMetadataHandler class.
     """
-    pass
+    if initial_metadata is not None and not isinstance(initial_metadata, dict):
+        raise TypeError(f'CoreSchema metadata should be a dict; got {initial_metadata!r}.')
+
+    metadata = CoreMetadata(
+        pydantic_js_functions=js_functions or [],
+        pydantic_js_annotation_functions=js_annotation_functions or [],
+        pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments,
+        pydantic_typed_dict_cls=typed_dict_cls,
+    )
+    metadata = {k: v for k, v in metadata.items() if v is not None}
+
+    if initial_metadata is not None:
+        metadata = {**initial_metadata, **metadata}
+
+    return metadata
diff --git a/pydantic/_internal/_core_utils.py b/pydantic/_internal/_core_utils.py
index 5f858e638..baec70625 100644
--- a/pydantic/_internal/_core_utils.py
+++ b/pydantic/_internal/_core_utils.py
@@ -1,26 +1,45 @@
 from __future__ import annotations
+
 import os
 from collections import defaultdict
-from typing import Any, Callable, Hashable, TypeVar, Union
+from typing import (
+    Any,
+    Callable,
+    Hashable,
+    TypeVar,
+    Union,
+)
+
 from pydantic_core import CoreSchema, core_schema
 from pydantic_core import validate_core_schema as _validate_core_schema
 from typing_extensions import TypeAliasType, TypeGuard, get_args, get_origin
+
 from . import _repr
 from ._typing_extra import is_generic_alias
-AnyFunctionSchema = Union[core_schema.AfterValidatorFunctionSchema,
-    core_schema.BeforeValidatorFunctionSchema, core_schema.
-    WrapValidatorFunctionSchema, core_schema.PlainValidatorFunctionSchema]
-FunctionSchemaWithInnerSchema = Union[core_schema.
-    AfterValidatorFunctionSchema, core_schema.BeforeValidatorFunctionSchema,
-    core_schema.WrapValidatorFunctionSchema]
-CoreSchemaField = Union[core_schema.ModelField, core_schema.DataclassField,
-    core_schema.TypedDictField, core_schema.ComputedField]
+
+AnyFunctionSchema = Union[
+    core_schema.AfterValidatorFunctionSchema,
+    core_schema.BeforeValidatorFunctionSchema,
+    core_schema.WrapValidatorFunctionSchema,
+    core_schema.PlainValidatorFunctionSchema,
+]
+
+
+FunctionSchemaWithInnerSchema = Union[
+    core_schema.AfterValidatorFunctionSchema,
+    core_schema.BeforeValidatorFunctionSchema,
+    core_schema.WrapValidatorFunctionSchema,
+]
+
+CoreSchemaField = Union[
+    core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField
+]
 CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField]
-_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field',
-    'model-field', 'computed-field'}
-_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after',
-    'function-wrap'}
+
+_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'}
+_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'}
 _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'}
+
 TAGGED_UNION_TAG_KEY = 'pydantic.internal.tagged_union_tag'
 """
 Used in a `Tag` schema to specify the tag used for a discriminated union.
@@ -31,39 +50,354 @@ schema was first encountered.
 """


-def get_type_ref(type_: type[Any], args_override: (tuple[type[Any], ...] |
-    None)=None) ->str:
+def is_core_schema(
+    schema: CoreSchemaOrField,
+) -> TypeGuard[CoreSchema]:
+    return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES
+
+
+def is_core_schema_field(
+    schema: CoreSchemaOrField,
+) -> TypeGuard[CoreSchemaField]:
+    return schema['type'] in _CORE_SCHEMA_FIELD_TYPES
+
+
+def is_function_with_inner_schema(
+    schema: CoreSchemaOrField,
+) -> TypeGuard[FunctionSchemaWithInnerSchema]:
+    return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES
+
+
+def is_list_like_schema_with_items_schema(
+    schema: CoreSchema,
+) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]:
+    return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES
+
+
+def get_type_ref(type_: type[Any], args_override: tuple[type[Any], ...] | None = None) -> str:
     """Produces the ref to be used for this type by pydantic_core's core schemas.

     This `args_override` argument was added for the purpose of creating valid recursive references
     when creating generic models without needing to create a concrete class.
     """
-    pass
+    origin = get_origin(type_) or type_

+    args = get_args(type_) if is_generic_alias(type_) else (args_override or ())
+    generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None)
+    if generic_metadata:
+        origin = generic_metadata['origin'] or origin
+        args = generic_metadata['args'] or args

-def get_ref(s: core_schema.CoreSchema) ->(None | str):
+    module_name = getattr(origin, '__module__', '<No __module__>')
+    if isinstance(origin, TypeAliasType):
+        type_ref = f'{module_name}.{origin.__name__}:{id(origin)}'
+    else:
+        try:
+            qualname = getattr(origin, '__qualname__', f'<No __qualname__: {origin}>')
+        except Exception:
+            qualname = getattr(origin, '__qualname__', '<No __qualname__>')
+        type_ref = f'{module_name}.{qualname}:{id(origin)}'
+
+    arg_refs: list[str] = []
+    for arg in args:
+        if isinstance(arg, str):
+            # Handle string literals as a special case; we may be able to remove this special handling if we
+            # wrap them in a ForwardRef at some point.
+            arg_ref = f'{arg}:str-{id(arg)}'
+        else:
+            arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}'
+        arg_refs.append(arg_ref)
+    if arg_refs:
+        type_ref = f'{type_ref}[{",".join(arg_refs)}]'
+    return type_ref
+
+
+def get_ref(s: core_schema.CoreSchema) -> None | str:
     """Get the ref from the schema if it has one.
     This exists just for type checking to work correctly.
     """
-    pass
+    return s.get('ref', None)
+
+
+def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
+    defs: dict[str, CoreSchema] = {}
+
+    def _record_valid_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+        ref = get_ref(s)
+        if ref:
+            defs[ref] = s
+        return recurse(s, _record_valid_refs)
+
+    walk_core_schema(schema, _record_valid_refs)
+
+    return defs
+
+
+def define_expected_missing_refs(
+    schema: core_schema.CoreSchema, allowed_missing_refs: set[str]
+) -> core_schema.CoreSchema | None:
+    if not allowed_missing_refs:
+        # in this case, there are no missing refs to potentially substitute, so there's no need to walk the schema
+        # this is a common case (will be hit for all non-generic models), so it's worth optimizing for
+        return None
+
+    refs = collect_definitions(schema).keys()
+
+    expected_missing_refs = allowed_missing_refs.difference(refs)
+    if expected_missing_refs:
+        definitions: list[core_schema.CoreSchema] = [
+            # TODO: Replace this with a (new) CoreSchema that, if present at any level, makes validation fail
+            #   Issue: https://github.com/pydantic/pydantic-core/issues/619
+            core_schema.none_schema(ref=ref, metadata={HAS_INVALID_SCHEMAS_METADATA_KEY: True})
+            for ref in expected_missing_refs
+        ]
+        return core_schema.definitions_schema(schema, definitions)
+    return None
+
+
+def collect_invalid_schemas(schema: core_schema.CoreSchema) -> bool:
+    invalid = False
+
+    def _is_schema_valid(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+        nonlocal invalid
+        if 'metadata' in s:
+            metadata = s['metadata']
+            if HAS_INVALID_SCHEMAS_METADATA_KEY in metadata:
+                invalid = metadata[HAS_INVALID_SCHEMAS_METADATA_KEY]
+                return s
+        return recurse(s, _is_schema_valid)
+
+    walk_core_schema(schema, _is_schema_valid)
+    return invalid


 T = TypeVar('T')
+
+
 Recurse = Callable[[core_schema.CoreSchema, 'Walk'], core_schema.CoreSchema]
 Walk = Callable[[core_schema.CoreSchema, Recurse], core_schema.CoreSchema]

+# TODO: Should we move _WalkCoreSchema into pydantic_core proper?
+#   Issue: https://github.com/pydantic/pydantic-core/issues/615

-class _WalkCoreSchema:

+class _WalkCoreSchema:
     def __init__(self):
         self._schema_type_to_method = self._build_schema_type_to_method()

+    def _build_schema_type_to_method(self) -> dict[core_schema.CoreSchemaType, Recurse]:
+        mapping: dict[core_schema.CoreSchemaType, Recurse] = {}
+        key: core_schema.CoreSchemaType
+        for key in get_args(core_schema.CoreSchemaType):
+            method_name = f"handle_{key.replace('-', '_')}_schema"
+            mapping[key] = getattr(self, method_name, self._handle_other_schemas)
+        return mapping
+
+    def walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+        return f(schema, self._walk)
+
+    def _walk(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+        schema = self._schema_type_to_method[schema['type']](schema.copy(), f)
+        ser_schema: core_schema.SerSchema | None = schema.get('serialization')  # type: ignore
+        if ser_schema:
+            schema['serialization'] = self._handle_ser_schemas(ser_schema, f)
+        return schema
+
+    def _handle_other_schemas(self, schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
+        sub_schema = schema.get('schema', None)
+        if sub_schema is not None:
+            schema['schema'] = self.walk(sub_schema, f)  # type: ignore
+        return schema
+
+    def _handle_ser_schemas(self, ser_schema: core_schema.SerSchema, f: Walk) -> core_schema.SerSchema:
+        schema: core_schema.CoreSchema | None = ser_schema.get('schema', None)
+        if schema is not None:
+            ser_schema['schema'] = self.walk(schema, f)  # type: ignore
+        return_schema: core_schema.CoreSchema | None = ser_schema.get('return_schema', None)
+        if return_schema is not None:
+            ser_schema['return_schema'] = self.walk(return_schema, f)  # type: ignore
+        return ser_schema
+
+    def handle_definitions_schema(self, schema: core_schema.DefinitionsSchema, f: Walk) -> core_schema.CoreSchema:
+        new_definitions: list[core_schema.CoreSchema] = []
+        for definition in schema['definitions']:
+            if 'schema_ref' in definition and 'ref' in definition:
+                # This indicates a purposely indirect reference
+                # We want to keep such references around for implications related to JSON schema, etc.:
+                new_definitions.append(definition)
+                # However, we still need to walk the referenced definition:
+                self.walk(definition, f)
+                continue
+
+            updated_definition = self.walk(definition, f)
+            if 'ref' in updated_definition:
+                # If the updated definition schema doesn't have a 'ref', it shouldn't go in the definitions
+                # This is most likely to happen due to replacing something with a definition reference, in
+                # which case it should certainly not go in the definitions list
+                new_definitions.append(updated_definition)
+        new_inner_schema = self.walk(schema['schema'], f)
+
+        if not new_definitions and len(schema) == 3:
+            # This means we'd be returning a "trivial" definitions schema that just wrapped the inner schema
+            return new_inner_schema
+
+        new_schema = schema.copy()
+        new_schema['schema'] = new_inner_schema
+        new_schema['definitions'] = new_definitions
+        return new_schema
+
+    def handle_list_schema(self, schema: core_schema.ListSchema, f: Walk) -> core_schema.CoreSchema:
+        items_schema = schema.get('items_schema')
+        if items_schema is not None:
+            schema['items_schema'] = self.walk(items_schema, f)
+        return schema
+
+    def handle_set_schema(self, schema: core_schema.SetSchema, f: Walk) -> core_schema.CoreSchema:
+        items_schema = schema.get('items_schema')
+        if items_schema is not None:
+            schema['items_schema'] = self.walk(items_schema, f)
+        return schema
+
+    def handle_frozenset_schema(self, schema: core_schema.FrozenSetSchema, f: Walk) -> core_schema.CoreSchema:
+        items_schema = schema.get('items_schema')
+        if items_schema is not None:
+            schema['items_schema'] = self.walk(items_schema, f)
+        return schema
+
+    def handle_generator_schema(self, schema: core_schema.GeneratorSchema, f: Walk) -> core_schema.CoreSchema:
+        items_schema = schema.get('items_schema')
+        if items_schema is not None:
+            schema['items_schema'] = self.walk(items_schema, f)
+        return schema
+
+    def handle_tuple_schema(self, schema: core_schema.TupleSchema, f: Walk) -> core_schema.CoreSchema:
+        schema['items_schema'] = [self.walk(v, f) for v in schema['items_schema']]
+        return schema
+
+    def handle_dict_schema(self, schema: core_schema.DictSchema, f: Walk) -> core_schema.CoreSchema:
+        keys_schema = schema.get('keys_schema')
+        if keys_schema is not None:
+            schema['keys_schema'] = self.walk(keys_schema, f)
+        values_schema = schema.get('values_schema')
+        if values_schema:
+            schema['values_schema'] = self.walk(values_schema, f)
+        return schema
+
+    def handle_function_schema(self, schema: AnyFunctionSchema, f: Walk) -> core_schema.CoreSchema:
+        if not is_function_with_inner_schema(schema):
+            return schema
+        schema['schema'] = self.walk(schema['schema'], f)
+        return schema
+
+    def handle_union_schema(self, schema: core_schema.UnionSchema, f: Walk) -> core_schema.CoreSchema:
+        new_choices: list[CoreSchema | tuple[CoreSchema, str]] = []
+        for v in schema['choices']:
+            if isinstance(v, tuple):
+                new_choices.append((self.walk(v[0], f), v[1]))
+            else:
+                new_choices.append(self.walk(v, f))
+        schema['choices'] = new_choices
+        return schema
+
+    def handle_tagged_union_schema(self, schema: core_schema.TaggedUnionSchema, f: Walk) -> core_schema.CoreSchema:
+        new_choices: dict[Hashable, core_schema.CoreSchema] = {}
+        for k, v in schema['choices'].items():
+            new_choices[k] = v if isinstance(v, (str, int)) else self.walk(v, f)
+        schema['choices'] = new_choices
+        return schema
+
+    def handle_chain_schema(self, schema: core_schema.ChainSchema, f: Walk) -> core_schema.CoreSchema:
+        schema['steps'] = [self.walk(v, f) for v in schema['steps']]
+        return schema
+
+    def handle_lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema, f: Walk) -> core_schema.CoreSchema:
+        schema['lax_schema'] = self.walk(schema['lax_schema'], f)
+        schema['strict_schema'] = self.walk(schema['strict_schema'], f)
+        return schema
+
+    def handle_json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema, f: Walk) -> core_schema.CoreSchema:
+        schema['json_schema'] = self.walk(schema['json_schema'], f)
+        schema['python_schema'] = self.walk(schema['python_schema'], f)
+        return schema
+
+    def handle_model_fields_schema(self, schema: core_schema.ModelFieldsSchema, f: Walk) -> core_schema.CoreSchema:
+        extras_schema = schema.get('extras_schema')
+        if extras_schema is not None:
+            schema['extras_schema'] = self.walk(extras_schema, f)
+        replaced_fields: dict[str, core_schema.ModelField] = {}
+        replaced_computed_fields: list[core_schema.ComputedField] = []
+        for computed_field in schema.get('computed_fields', ()):
+            replaced_field = computed_field.copy()
+            replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+            replaced_computed_fields.append(replaced_field)
+        if replaced_computed_fields:
+            schema['computed_fields'] = replaced_computed_fields
+        for k, v in schema['fields'].items():
+            replaced_field = v.copy()
+            replaced_field['schema'] = self.walk(v['schema'], f)
+            replaced_fields[k] = replaced_field
+        schema['fields'] = replaced_fields
+        return schema
+
+    def handle_typed_dict_schema(self, schema: core_schema.TypedDictSchema, f: Walk) -> core_schema.CoreSchema:
+        extras_schema = schema.get('extras_schema')
+        if extras_schema is not None:
+            schema['extras_schema'] = self.walk(extras_schema, f)
+        replaced_computed_fields: list[core_schema.ComputedField] = []
+        for computed_field in schema.get('computed_fields', ()):
+            replaced_field = computed_field.copy()
+            replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+            replaced_computed_fields.append(replaced_field)
+        if replaced_computed_fields:
+            schema['computed_fields'] = replaced_computed_fields
+        replaced_fields: dict[str, core_schema.TypedDictField] = {}
+        for k, v in schema['fields'].items():
+            replaced_field = v.copy()
+            replaced_field['schema'] = self.walk(v['schema'], f)
+            replaced_fields[k] = replaced_field
+        schema['fields'] = replaced_fields
+        return schema
+
+    def handle_dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema, f: Walk) -> core_schema.CoreSchema:
+        replaced_fields: list[core_schema.DataclassField] = []
+        replaced_computed_fields: list[core_schema.ComputedField] = []
+        for computed_field in schema.get('computed_fields', ()):
+            replaced_field = computed_field.copy()
+            replaced_field['return_schema'] = self.walk(computed_field['return_schema'], f)
+            replaced_computed_fields.append(replaced_field)
+        if replaced_computed_fields:
+            schema['computed_fields'] = replaced_computed_fields
+        for field in schema['fields']:
+            replaced_field = field.copy()
+            replaced_field['schema'] = self.walk(field['schema'], f)
+            replaced_fields.append(replaced_field)
+        schema['fields'] = replaced_fields
+        return schema
+
+    def handle_arguments_schema(self, schema: core_schema.ArgumentsSchema, f: Walk) -> core_schema.CoreSchema:
+        replaced_arguments_schema: list[core_schema.ArgumentsParameter] = []
+        for param in schema['arguments_schema']:
+            replaced_param = param.copy()
+            replaced_param['schema'] = self.walk(param['schema'], f)
+            replaced_arguments_schema.append(replaced_param)
+        schema['arguments_schema'] = replaced_arguments_schema
+        if 'var_args_schema' in schema:
+            schema['var_args_schema'] = self.walk(schema['var_args_schema'], f)
+        if 'var_kwargs_schema' in schema:
+            schema['var_kwargs_schema'] = self.walk(schema['var_kwargs_schema'], f)
+        return schema
+
+    def handle_call_schema(self, schema: core_schema.CallSchema, f: Walk) -> core_schema.CoreSchema:
+        schema['arguments_schema'] = self.walk(schema['arguments_schema'], f)
+        if 'return_schema' in schema:
+            schema['return_schema'] = self.walk(schema['return_schema'], f)
+        return schema
+

 _dispatch = _WalkCoreSchema().walk


-def walk_core_schema(schema: core_schema.CoreSchema, f: Walk
-    ) ->core_schema.CoreSchema:
+def walk_core_schema(schema: core_schema.CoreSchema, f: Walk) -> core_schema.CoreSchema:
     """Recursively traverse a CoreSchema.

     Args:
@@ -77,11 +411,142 @@ def walk_core_schema(schema: core_schema.CoreSchema, f: Walk
     Returns:
         core_schema.CoreSchema: A processed CoreSchema.
     """
-    pass
+    return f(schema.copy(), _dispatch)
+

+def simplify_schema_references(schema: core_schema.CoreSchema) -> core_schema.CoreSchema:  # noqa: C901
+    definitions: dict[str, core_schema.CoreSchema] = {}
+    ref_counts: dict[str, int] = defaultdict(int)
+    involved_in_recursion: dict[str, bool] = {}
+    current_recursion_ref_count: dict[str, int] = defaultdict(int)

-def pretty_print_core_schema(schema: CoreSchema, include_metadata: bool=False
-    ) ->None:
+    def collect_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+        if s['type'] == 'definitions':
+            for definition in s['definitions']:
+                ref = get_ref(definition)
+                assert ref is not None
+                if ref not in definitions:
+                    definitions[ref] = definition
+                recurse(definition, collect_refs)
+            return recurse(s['schema'], collect_refs)
+        else:
+            ref = get_ref(s)
+            if ref is not None:
+                new = recurse(s, collect_refs)
+                new_ref = get_ref(new)
+                if new_ref:
+                    definitions[new_ref] = new
+                return core_schema.definition_reference_schema(schema_ref=ref)
+            else:
+                return recurse(s, collect_refs)
+
+    schema = walk_core_schema(schema, collect_refs)
+
+    def count_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+        if s['type'] != 'definition-ref':
+            return recurse(s, count_refs)
+        ref = s['schema_ref']
+        ref_counts[ref] += 1
+
+        if ref_counts[ref] >= 2:
+            # If this model is involved in a recursion this should be detected
+            # on its second encounter, we can safely stop the walk here.
+            if current_recursion_ref_count[ref] != 0:
+                involved_in_recursion[ref] = True
+            return s
+
+        current_recursion_ref_count[ref] += 1
+        recurse(definitions[ref], count_refs)
+        current_recursion_ref_count[ref] -= 1
+        return s
+
+    schema = walk_core_schema(schema, count_refs)
+
+    assert all(c == 0 for c in current_recursion_ref_count.values()), 'this is a bug! please report it'
+
+    def can_be_inlined(s: core_schema.DefinitionReferenceSchema, ref: str) -> bool:
+        if ref_counts[ref] > 1:
+            return False
+        if involved_in_recursion.get(ref, False):
+            return False
+        if 'serialization' in s:
+            return False
+        if 'metadata' in s:
+            metadata = s['metadata']
+            for k in (
+                'pydantic_js_functions',
+                'pydantic_js_annotation_functions',
+                'pydantic.internal.union_discriminator',
+            ):
+                if k in metadata:
+                    # we need to keep this as a ref
+                    return False
+        return True
+
+    def inline_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
+        if s['type'] == 'definition-ref':
+            ref = s['schema_ref']
+            # Check if the reference is only used once, not involved in recursion and does not have
+            # any extra keys (like 'serialization')
+            if can_be_inlined(s, ref):
+                # Inline the reference by replacing the reference with the actual schema
+                new = definitions.pop(ref)
+                ref_counts[ref] -= 1  # because we just replaced it!
+                # put all other keys that were on the def-ref schema into the inlined version
+                # in particular this is needed for `serialization`
+                if 'serialization' in s:
+                    new['serialization'] = s['serialization']
+                s = recurse(new, inline_refs)
+                return s
+            else:
+                return recurse(s, inline_refs)
+        else:
+            return recurse(s, inline_refs)
+
+    schema = walk_core_schema(schema, inline_refs)
+
+    def_values = [v for v in definitions.values() if ref_counts[v['ref']] > 0]  # type: ignore
+
+    if def_values:
+        schema = core_schema.definitions_schema(schema=schema, definitions=def_values)
+    return schema
+
+
+def _strip_metadata(schema: CoreSchema) -> CoreSchema:
+    def strip_metadata(s: CoreSchema, recurse: Recurse) -> CoreSchema:
+        s = s.copy()
+        s.pop('metadata', None)
+        if s['type'] == 'model-fields':
+            s = s.copy()
+            s['fields'] = {k: v.copy() for k, v in s['fields'].items()}
+            for field_name, field_schema in s['fields'].items():
+                field_schema.pop('metadata', None)
+                s['fields'][field_name] = field_schema
+            computed_fields = s.get('computed_fields', None)
+            if computed_fields:
+                s['computed_fields'] = [cf.copy() for cf in computed_fields]
+                for cf in computed_fields:
+                    cf.pop('metadata', None)
+            else:
+                s.pop('computed_fields', None)
+        elif s['type'] == 'model':
+            # remove some defaults
+            if s.get('custom_init', True) is False:
+                s.pop('custom_init')
+            if s.get('root_model', True) is False:
+                s.pop('root_model')
+            if {'title'}.issuperset(s.get('config', {}).keys()):
+                s.pop('config', None)
+
+        return recurse(s, strip_metadata)
+
+    return walk_core_schema(schema, strip_metadata)
+
+
+def pretty_print_core_schema(
+    schema: CoreSchema,
+    include_metadata: bool = False,
+) -> None:
     """Pretty print a CoreSchema using rich.
     This is intended for debugging purposes.

@@ -89,4 +554,15 @@ def pretty_print_core_schema(schema: CoreSchema, include_metadata: bool=False
         schema: The CoreSchema to print.
         include_metadata: Whether to include metadata in the output. Defaults to `False`.
     """
-    pass
+    from rich import print  # type: ignore  # install it manually in your dev env
+
+    if not include_metadata:
+        schema = _strip_metadata(schema)
+
+    return print(schema)
+
+
+def validate_core_schema(schema: CoreSchema) -> CoreSchema:
+    if 'PYDANTIC_SKIP_VALIDATING_CORE_SCHEMAS' in os.environ:
+        return schema
+    return _validate_core_schema(schema)
diff --git a/pydantic/_internal/_dataclasses.py b/pydantic/_internal/_dataclasses.py
index 35f40c504..73272922a 100644
--- a/pydantic/_internal/_dataclasses.py
+++ b/pydantic/_internal/_dataclasses.py
@@ -1,12 +1,21 @@
 """Private logic for creating pydantic dataclasses."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import typing
 import warnings
 from functools import partial, wraps
 from typing import Any, Callable, ClassVar
-from pydantic_core import ArgsKwargs, SchemaSerializer, SchemaValidator, core_schema
+
+from pydantic_core import (
+    ArgsKwargs,
+    SchemaSerializer,
+    SchemaValidator,
+    core_schema,
+)
 from typing_extensions import TypeGuard
+
 from ..errors import PydanticUndefinedAnnotation
 from ..fields import FieldInfo
 from ..plugin._schema_validator import PluggableSchemaValidator, create_schema_validator
@@ -18,19 +27,18 @@ from ._generics import get_standard_typevars_map
 from ._mock_val_ser import set_dataclass_mocks
 from ._schema_generation_shared import CallbackGetCoreSchemaHandler
 from ._signature import generate_pydantic_signature
+
 if typing.TYPE_CHECKING:
     from ..config import ConfigDict

-
     class StandardDataclass(typing.Protocol):
         __dataclass_fields__: ClassVar[dict[str, Any]]
-        __dataclass_params__: ClassVar[Any]
+        __dataclass_params__: ClassVar[Any]  # in reality `dataclasses._DataclassParams`
         __post_init__: ClassVar[Callable[..., None]]

-        def __init__(self, *args: object, **kwargs: object) ->None:
+        def __init__(self, *args: object, **kwargs: object) -> None:
             pass

-
     class PydanticDataclass(StandardDataclass, typing.Protocol):
         """A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass.

@@ -43,21 +51,26 @@ if typing.TYPE_CHECKING:
             __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass.
             __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass.
         """
+
         __pydantic_config__: ClassVar[ConfigDict]
         __pydantic_complete__: ClassVar[bool]
         __pydantic_core_schema__: ClassVar[core_schema.CoreSchema]
         __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
         __pydantic_fields__: ClassVar[dict[str, FieldInfo]]
         __pydantic_serializer__: ClassVar[SchemaSerializer]
-        __pydantic_validator__: ClassVar[SchemaValidator |
-            PluggableSchemaValidator]
+        __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
+
 else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20


-def set_dataclass_fields(cls: type[StandardDataclass], types_namespace: (
-    dict[str, Any] | None)=None, config_wrapper: (_config.ConfigWrapper |
-    None)=None) ->None:
+def set_dataclass_fields(
+    cls: type[StandardDataclass],
+    types_namespace: dict[str, Any] | None = None,
+    config_wrapper: _config.ConfigWrapper | None = None,
+) -> None:
     """Collect and set `cls.__pydantic_fields__`.

     Args:
@@ -65,12 +78,19 @@ def set_dataclass_fields(cls: type[StandardDataclass], types_namespace: (
         types_namespace: The types namespace, defaults to `None`.
         config_wrapper: The config wrapper instance, defaults to `None`.
     """
-    pass
+    typevars_map = get_standard_typevars_map(cls)
+    fields = collect_dataclass_fields(cls, types_namespace, typevars_map=typevars_map, config_wrapper=config_wrapper)

+    cls.__pydantic_fields__ = fields  # type: ignore

-def complete_dataclass(cls: type[Any], config_wrapper: _config.
-    ConfigWrapper, *, raise_errors: bool=True, types_namespace: (dict[str,
-    Any] | None)) ->bool:
+
+def complete_dataclass(
+    cls: type[Any],
+    config_wrapper: _config.ConfigWrapper,
+    *,
+    raise_errors: bool = True,
+    types_namespace: dict[str, Any] | None,
+) -> bool:
     """Finish building a pydantic dataclass.

     This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`.
@@ -89,10 +109,92 @@ def complete_dataclass(cls: type[Any], config_wrapper: _config.
     Raises:
         PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations.
     """
-    pass
-
-
-def is_builtin_dataclass(_cls: type[Any]) ->TypeGuard[type[StandardDataclass]]:
+    if hasattr(cls, '__post_init_post_parse__'):
+        warnings.warn(
+            'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning
+        )
+
+    if types_namespace is None:
+        types_namespace = _typing_extra.get_cls_types_namespace(cls)
+
+    set_dataclass_fields(cls, types_namespace, config_wrapper=config_wrapper)
+
+    typevars_map = get_standard_typevars_map(cls)
+    gen_schema = GenerateSchema(
+        config_wrapper,
+        types_namespace,
+        typevars_map,
+    )
+
+    # This needs to be called before we change the __init__
+    sig = generate_pydantic_signature(
+        init=cls.__init__,
+        fields=cls.__pydantic_fields__,  # type: ignore
+        config_wrapper=config_wrapper,
+        is_dataclass=True,
+    )
+
+    # dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied.
+    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
+        __tracebackhide__ = True
+        s = __dataclass_self__
+        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
+
+    __init__.__qualname__ = f'{cls.__qualname__}.__init__'
+
+    cls.__init__ = __init__  # type: ignore
+    cls.__pydantic_config__ = config_wrapper.config_dict  # type: ignore
+    cls.__signature__ = sig  # type: ignore
+    get_core_schema = getattr(cls, '__get_pydantic_core_schema__', None)
+    try:
+        if get_core_schema:
+            schema = get_core_schema(
+                cls,
+                CallbackGetCoreSchemaHandler(
+                    partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
+                    gen_schema,
+                    ref_mode='unpack',
+                ),
+            )
+        else:
+            schema = gen_schema.generate_schema(cls, from_dunder_get_core_schema=False)
+    except PydanticUndefinedAnnotation as e:
+        if raise_errors:
+            raise
+        set_dataclass_mocks(cls, cls.__name__, f'`{e.name}`')
+        return False
+
+    core_config = config_wrapper.core_config(cls)
+
+    try:
+        schema = gen_schema.clean_schema(schema)
+    except gen_schema.CollectedInvalid:
+        set_dataclass_mocks(cls, cls.__name__, 'all referenced types')
+        return False
+
+    # We are about to set all the remaining required properties expected for this cast;
+    # __pydantic_decorators__ and __pydantic_fields__ should already be set
+    cls = typing.cast('type[PydanticDataclass]', cls)
+    # debug(schema)
+
+    cls.__pydantic_core_schema__ = schema
+    cls.__pydantic_validator__ = validator = create_schema_validator(
+        schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings
+    )
+    cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
+
+    if config_wrapper.validate_assignment:
+
+        @wraps(cls.__setattr__)
+        def validated_setattr(instance: Any, field: str, value: str, /) -> None:
+            validator.validate_assignment(instance, field, value)
+
+        cls.__setattr__ = validated_setattr.__get__(None, cls)  # type: ignore
+
+    return True
+
+
+def is_builtin_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
     """Returns True if a class is a stdlib dataclass and *not* a pydantic dataclass.

     We check that
@@ -122,4 +224,8 @@ def is_builtin_dataclass(_cls: type[Any]) ->TypeGuard[type[StandardDataclass]]:
     Returns:
         `True` if the class is a stdlib dataclass, `False` otherwise.
     """
-    pass
+    return (
+        dataclasses.is_dataclass(_cls)
+        and not hasattr(_cls, '__pydantic_validator__')
+        and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
+    )
diff --git a/pydantic/_internal/_decorators.py b/pydantic/_internal/_decorators.py
index cc38227b5..66db2184a 100644
--- a/pydantic/_internal/_decorators.py
+++ b/pydantic/_internal/_decorators.py
@@ -1,17 +1,22 @@
 """Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators."""
+
 from __future__ import annotations as _annotations
+
 from collections import deque
 from dataclasses import dataclass, field
 from functools import cached_property, partial, partialmethod
 from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor, signature
 from itertools import islice
 from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Iterable, TypeVar, Union
+
 from pydantic_core import PydanticUndefined, core_schema
 from typing_extensions import Literal, TypeAlias, is_typeddict
+
 from ..errors import PydanticUserError
 from ._core_utils import get_type_ref
 from ._internal_dataclass import slots_true
 from ._typing_extra import get_function_type_hints
+
 if TYPE_CHECKING:
     from ..fields import ComputedFieldInfo
     from ..functional_validators import FieldValidatorModes
@@ -31,7 +36,9 @@ class ValidatorDecoratorInfo:
         always: Whether this method and other validators should be called even if the value is missing.
         check_fields: Whether to check that the fields actually exist on the model.
     """
+
     decorator_repr: ClassVar[str] = '@validator'
+
     fields: tuple[str, ...]
     mode: Literal['before', 'after']
     each_item: bool
@@ -50,7 +57,9 @@ class FieldValidatorDecoratorInfo:
         mode: The proposed validator mode.
         check_fields: Whether to check that the fields actually exist on the model.
     """
+
     decorator_repr: ClassVar[str] = '@field_validator'
+
     fields: tuple[str, ...]
     mode: FieldValidatorModes
     check_fields: bool | None
@@ -65,6 +74,7 @@ class RootValidatorDecoratorInfo:
         decorator_repr: A class variable representing the decorator string, '@root_validator'.
         mode: The proposed validator mode.
     """
+
     decorator_repr: ClassVar[str] = '@root_validator'
     mode: Literal['before', 'after']

@@ -83,6 +93,7 @@ class FieldSerializerDecoratorInfo:
             and `'json-unless-none'`.
         check_fields: Whether to check that the fields actually exist on the model.
     """
+
     decorator_repr: ClassVar[str] = '@field_serializer'
     fields: tuple[str, ...]
     mode: Literal['plain', 'wrap']
@@ -103,6 +114,7 @@ class ModelSerializerDecoratorInfo:
         when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`,
             and `'json-unless-none'`.
     """
+
     decorator_repr: ClassVar[str] = '@model_serializer'
     mode: Literal['plain', 'wrap']
     return_type: Any
@@ -118,6 +130,7 @@ class ModelValidatorDecoratorInfo:
         decorator_repr: A class variable representing the decorator string, '@model_serializer'.
         mode: The proposed serializer mode.
     """
+
     decorator_repr: ClassVar[str] = '@model_validator'
     mode: Literal['wrap', 'before', 'after']

@@ -131,13 +144,14 @@ DecoratorInfo: TypeAlias = """Union[
     ModelValidatorDecoratorInfo,
     ComputedFieldInfo,
 ]"""
+
 ReturnType = TypeVar('ReturnType')
 DecoratedType: TypeAlias = (
     'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]'
-    )
+)


-@dataclass
+@dataclass  # can't use slots here since we set attributes on `__post_init__`
 class PydanticDescriptorProxy(Generic[ReturnType]):
     """Wrap a classmethod, staticmethod, property or unbound function
     and act as a descriptor that allows us to detect decorated items
@@ -151,28 +165,33 @@ class PydanticDescriptorProxy(Generic[ReturnType]):
         decorator_info: The decorator info.
         shim: A wrapper function to wrap V1 style function.
     """
+
     wrapped: DecoratedType[ReturnType]
     decorator_info: DecoratorInfo
     shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None

     def __post_init__(self):
-        for attr in ('setter', 'deleter'):
+        for attr in 'setter', 'deleter':
             if hasattr(self.wrapped, attr):
                 f = partial(self._call_wrapped_attr, name=attr)
                 setattr(self, attr, f)

-    def __get__(self, obj: (object | None), obj_type: (type[object] | None)
-        =None) ->PydanticDescriptorProxy[ReturnType]:
+    def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]:
+        self.wrapped = getattr(self.wrapped, name)(func)
+        return self
+
+    def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]:
         try:
             return self.wrapped.__get__(obj, obj_type)
         except AttributeError:
-            return self.wrapped
+            # not a descriptor, e.g. a partial object
+            return self.wrapped  # type: ignore[return-value]

-    def __set_name__(self, instance: Any, name: str) ->None:
+    def __set_name__(self, instance: Any, name: str) -> None:
         if hasattr(self.wrapped, '__set_name__'):
-            self.wrapped.__set_name__(instance, name)
+            self.wrapped.__set_name__(instance, name)  # pyright: ignore[reportFunctionMemberAccess]

-    def __getattr__(self, __name: str) ->Any:
+    def __getattr__(self, __name: str) -> Any:
         """Forward checks for __isabstractmethod__ and such."""
         return getattr(self.wrapped, __name)

@@ -194,6 +213,7 @@ class Decorator(Generic[DecoratorInfoType]):
         shim: A wrapper function to wrap V1 style function.
         info: The decorator info.
     """
+
     cls_ref: str
     cls_var_name: str
     func: Callable[..., Any]
@@ -201,8 +221,13 @@ class Decorator(Generic[DecoratorInfoType]):
     info: DecoratorInfoType

     @staticmethod
-    def build(cls_: Any, *, cls_var_name: str, shim: (Callable[[Any], Any] |
-        None), info: DecoratorInfoType) ->Decorator[DecoratorInfoType]:
+    def build(
+        cls_: Any,
+        *,
+        cls_var_name: str,
+        shim: Callable[[Any], Any] | None,
+        info: DecoratorInfoType,
+    ) -> Decorator[DecoratorInfoType]:
         """Build a new decorator.

         Args:
@@ -214,9 +239,24 @@ class Decorator(Generic[DecoratorInfoType]):
         Returns:
             The new decorator instance.
         """
-        pass
-
-    def bind_to_cls(self, cls: Any) ->Decorator[DecoratorInfoType]:
+        func = get_attribute_from_bases(cls_, cls_var_name)
+        if shim is not None:
+            func = shim(func)
+        func = unwrap_wrapped_function(func, unwrap_partial=False)
+        if not callable(func):
+            # This branch will get hit for classmethod properties
+            attribute = get_attribute_from_base_dicts(cls_, cls_var_name)  # prevents the binding call to `__get__`
+            if isinstance(attribute, PydanticDescriptorProxy):
+                func = unwrap_wrapped_function(attribute.wrapped)
+        return Decorator(
+            cls_ref=get_type_ref(cls_),
+            cls_var_name=cls_var_name,
+            func=func,
+            shim=shim,
+            info=info,
+        )
+
+    def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]:
         """Bind the decorator to a class.

         Args:
@@ -225,10 +265,15 @@ class Decorator(Generic[DecoratorInfoType]):
         Returns:
             The new decorator instance.
         """
-        pass
+        return self.build(
+            cls,
+            cls_var_name=self.cls_var_name,
+            shim=self.shim,
+            info=self.info,
+        )


-def get_bases(tp: type[Any]) ->tuple[type[Any], ...]:
+def get_bases(tp: type[Any]) -> tuple[type[Any], ...]:
     """Get the base classes of a class or typeddict.

     Args:
@@ -237,22 +282,64 @@ def get_bases(tp: type[Any]) ->tuple[type[Any], ...]:
     Returns:
         The base classes.
     """
-    pass
+    if is_typeddict(tp):
+        return tp.__orig_bases__  # type: ignore
+    try:
+        return tp.__bases__
+    except AttributeError:
+        return ()


-def mro(tp: type[Any]) ->tuple[type[Any], ...]:
+def mro(tp: type[Any]) -> tuple[type[Any], ...]:
     """Calculate the Method Resolution Order of bases using the C3 algorithm.

     See https://www.python.org/download/releases/2.3/mro/
     """
-    pass
+    # try to use the existing mro, for performance mainly
+    # but also because it helps verify the implementation below
+    if not is_typeddict(tp):
+        try:
+            return tp.__mro__
+        except AttributeError:
+            # GenericAlias and some other cases
+            pass
+
+    bases = get_bases(tp)
+    return (tp,) + mro_for_bases(bases)
+
+
+def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]:
+    def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]:
+        while True:
+            non_empty = [seq for seq in seqs if seq]
+            if not non_empty:
+                # Nothing left to process, we're done.
+                return
+            candidate: type[Any] | None = None
+            for seq in non_empty:  # Find merge candidates among seq heads.
+                candidate = seq[0]
+                not_head = [s for s in non_empty if candidate in islice(s, 1, None)]
+                if not_head:
+                    # Reject the candidate.
+                    candidate = None
+                else:
+                    break
+            if not candidate:
+                raise TypeError('Inconsistent hierarchy, no C3 MRO is possible')
+            yield candidate
+            for seq in non_empty:
+                # Remove candidate.
+                if seq[0] == candidate:
+                    seq.popleft()
+
+    seqs = [deque(mro(base)) for base in bases] + [deque(bases)]
+    return tuple(merge_seqs(seqs))


 _sentinel = object()


-def get_attribute_from_bases(tp: (type[Any] | tuple[type[Any], ...]), name: str
-    ) ->Any:
+def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any:
     """Get the attribute from the next class in the MRO that has it,
     aiming to simulate calling the method on the actual class.

@@ -271,10 +358,23 @@ def get_attribute_from_bases(tp: (type[Any] | tuple[type[Any], ...]), name: str
     Raises:
         AttributeError: If the attribute is not found in any class in the MRO.
     """
-    pass
+    if isinstance(tp, tuple):
+        for base in mro_for_bases(tp):
+            attribute = base.__dict__.get(name, _sentinel)
+            if attribute is not _sentinel:
+                attribute_get = getattr(attribute, '__get__', None)
+                if attribute_get is not None:
+                    return attribute_get(None, tp)
+                return attribute
+        raise AttributeError(f'{name} not found in {tp}')
+    else:
+        try:
+            return getattr(tp, name)
+        except AttributeError:
+            return get_attribute_from_bases(mro(tp), name)


-def get_attribute_from_base_dicts(tp: type[Any], name: str) ->Any:
+def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any:
     """Get an attribute out of the `__dict__` following the MRO.
     This prevents the call to `__get__` on the descriptor, and allows
     us to get the original function for classmethod properties.
@@ -289,7 +389,10 @@ def get_attribute_from_base_dicts(tp: type[Any], name: str) ->Any:
     Raises:
         KeyError: If the attribute is not found in any class's `__dict__` in the MRO.
     """
-    pass
+    for base in reversed(mro(tp)):
+        if name in base.__dict__:
+            return base.__dict__[name]
+    return tp.__dict__[name]  # raise the error


 @dataclass(**slots_true)
@@ -299,23 +402,17 @@ class DecoratorInfos:
     note that the name in the class namespace is the function or attribute name
     not the field name!
     """
-    validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(
-        default_factory=dict)
-    field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]
-        ] = field(default_factory=dict)
-    root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(
-        default_factory=dict)
-    field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]
-        ] = field(default_factory=dict)
-    model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]
-        ] = field(default_factory=dict)
-    model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]
-        ] = field(default_factory=dict)
-    computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(
-        default_factory=dict)
+
+    validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict)
+    field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict)
+    root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict)
+    field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict)
+    model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict)
+    model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict)
+    computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict)

     @staticmethod
-    def build(model_dc: type[Any]) ->DecoratorInfos:
+    def build(model_dc: type[Any]) -> DecoratorInfos:  # noqa: C901 (ignore complexity)
         """We want to collect all DecFunc instances that exist as
         attributes in the namespace of the class (a BaseModel or dataclass)
         that called us
@@ -328,11 +425,83 @@ class DecoratorInfos:
         If we do replace any functions we put the replacement into the position
         the replaced function was in; that is, we maintain the order.
         """
-        pass
-
-
-def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes
-    ) ->bool:
+        # reminder: dicts are ordered and replacement does not alter the order
+        res = DecoratorInfos()
+        for base in reversed(mro(model_dc)[1:]):
+            existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__')
+            if existing is None:
+                existing = DecoratorInfos.build(base)
+            res.validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.validators.items()})
+            res.field_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_validators.items()})
+            res.root_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.root_validators.items()})
+            res.field_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.field_serializers.items()})
+            res.model_serializers.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_serializers.items()})
+            res.model_validators.update({k: v.bind_to_cls(model_dc) for k, v in existing.model_validators.items()})
+            res.computed_fields.update({k: v.bind_to_cls(model_dc) for k, v in existing.computed_fields.items()})
+
+        to_replace: list[tuple[str, Any]] = []
+
+        for var_name, var_value in vars(model_dc).items():
+            if isinstance(var_value, PydanticDescriptorProxy):
+                info = var_value.decorator_info
+                if isinstance(info, ValidatorDecoratorInfo):
+                    res.validators[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                elif isinstance(info, FieldValidatorDecoratorInfo):
+                    res.field_validators[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                elif isinstance(info, RootValidatorDecoratorInfo):
+                    res.root_validators[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                elif isinstance(info, FieldSerializerDecoratorInfo):
+                    # check whether a serializer function is already registered for fields
+                    for field_serializer_decorator in res.field_serializers.values():
+                        # check that each field has at most one serializer function.
+                        # serializer functions for the same field in subclasses are allowed,
+                        # and are treated as overrides
+                        if field_serializer_decorator.cls_var_name == var_name:
+                            continue
+                        for f in info.fields:
+                            if f in field_serializer_decorator.info.fields:
+                                raise PydanticUserError(
+                                    'Multiple field serializer functions were defined '
+                                    f'for field {f!r}, this is not allowed.',
+                                    code='multiple-field-serializers',
+                                )
+                    res.field_serializers[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                elif isinstance(info, ModelValidatorDecoratorInfo):
+                    res.model_validators[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                elif isinstance(info, ModelSerializerDecoratorInfo):
+                    res.model_serializers[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=var_value.shim, info=info
+                    )
+                else:
+                    from ..fields import ComputedFieldInfo
+
+                    isinstance(var_value, ComputedFieldInfo)
+                    res.computed_fields[var_name] = Decorator.build(
+                        model_dc, cls_var_name=var_name, shim=None, info=info
+                    )
+                to_replace.append((var_name, var_value.wrapped))
+        if to_replace:
+            # If we can save `__pydantic_decorators__` on the class we'll be able to check for it above
+            # so then we don't need to re-process the type, which means we can discard our descriptor wrappers
+            # and replace them with the thing they are wrapping (see the other setattr call below)
+            # which allows validator class methods to also function as regular class methods
+            setattr(model_dc, '__pydantic_decorators__', res)
+            for name, value in to_replace:
+                setattr(model_dc, name, value)
+        return res
+
+
+def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes) -> bool:
     """Look at a field or model validator function and determine whether it takes an info argument.

     An error is raised if the function has an invalid signature.
@@ -344,11 +513,34 @@ def inspect_validator(validator: Callable[..., Any], mode: FieldValidatorModes
     Returns:
         Whether the validator takes an info argument.
     """
-    pass
+    try:
+        sig = signature(validator)
+    except (ValueError, TypeError):
+        # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+        # In this case, we assume no info argument is present:
+        return False
+    n_positional = count_positional_required_params(sig)
+    if mode == 'wrap':
+        if n_positional == 3:
+            return True
+        elif n_positional == 2:
+            return False
+    else:
+        assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
+        if n_positional == 2:
+            return True
+        elif n_positional == 1:
+            return False
+
+    raise PydanticUserError(
+        f'Unrecognized field_validator function signature for {validator} with `mode={mode}`:{sig}',
+        code='validator-signature',
+    )


-def inspect_field_serializer(serializer: Callable[..., Any], mode: Literal[
-    'plain', 'wrap'], computed_field: bool=False) ->tuple[bool, bool]:
+def inspect_field_serializer(
+    serializer: Callable[..., Any], mode: Literal['plain', 'wrap'], computed_field: bool = False
+) -> tuple[bool, bool]:
     """Look at a field serializer function and determine if it is a field serializer,
     and whether it takes an info argument.

@@ -363,11 +555,38 @@ def inspect_field_serializer(serializer: Callable[..., Any], mode: Literal[
     Returns:
         Tuple of (is_field_serializer, info_arg).
     """
-    pass
-
-
-def inspect_annotated_serializer(serializer: Callable[..., Any], mode:
-    Literal['plain', 'wrap']) ->bool:
+    try:
+        sig = signature(serializer)
+    except (ValueError, TypeError):
+        # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+        # In this case, we assume no info argument is present and this is not a method:
+        return (False, False)
+
+    first = next(iter(sig.parameters.values()), None)
+    is_field_serializer = first is not None and first.name == 'self'
+
+    n_positional = count_positional_required_params(sig)
+    if is_field_serializer:
+        # -1 to correct for self parameter
+        info_arg = _serializer_info_arg(mode, n_positional - 1)
+    else:
+        info_arg = _serializer_info_arg(mode, n_positional)
+
+    if info_arg is None:
+        raise PydanticUserError(
+            f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+            code='field-serializer-signature',
+        )
+    if info_arg and computed_field:
+        raise PydanticUserError(
+            'field_serializer on computed_field does not use info signature', code='field-serializer-signature'
+        )
+
+    else:
+        return is_field_serializer, info_arg
+
+
+def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
     """Look at a serializer function used via `Annotated` and determine whether it takes an info argument.

     An error is raised if the function has an invalid signature.
@@ -379,11 +598,23 @@ def inspect_annotated_serializer(serializer: Callable[..., Any], mode:
     Returns:
         info_arg
     """
-    pass
-
-
-def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal[
-    'plain', 'wrap']) ->bool:
+    try:
+        sig = signature(serializer)
+    except (ValueError, TypeError):
+        # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
+        # In this case, we assume no info argument is present:
+        return False
+    info_arg = _serializer_info_arg(mode, count_positional_required_params(sig))
+    if info_arg is None:
+        raise PydanticUserError(
+            f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+            code='field-serializer-signature',
+        )
+    else:
+        return info_arg
+
+
+def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool:
     """Look at a model serializer function and determine whether it takes an info argument.

     An error is raised if the function has an invalid signature.
@@ -395,15 +626,48 @@ def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal[
     Returns:
         `info_arg` - whether the function expects an info argument.
     """
-    pass
+    if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer):
+        raise PydanticUserError(
+            '`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method'
+        )
+
+    sig = signature(serializer)
+    info_arg = _serializer_info_arg(mode, count_positional_required_params(sig))
+    if info_arg is None:
+        raise PydanticUserError(
+            f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}',
+            code='model-serializer-signature',
+        )
+    else:
+        return info_arg
+
+
+def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None:
+    if mode == 'plain':
+        if n_positional == 1:
+            # (input_value: Any, /) -> Any
+            return False
+        elif n_positional == 2:
+            # (model: Any, input_value: Any, /) -> Any
+            return True
+    else:
+        assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
+        if n_positional == 2:
+            # (input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any
+            return False
+        elif n_positional == 3:
+            # (input_value: Any, serializer: SerializerFunctionWrapHandler, info: SerializationInfo, /) -> Any
+            return True
+
+    return None


 AnyDecoratorCallable: TypeAlias = (
     'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]'
-    )
+)


-def is_instance_method_from_sig(function: AnyDecoratorCallable) ->bool:
+def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool:
     """Whether the function is an instance method.

     It will consider a function as instance method if the first parameter of
@@ -415,11 +679,14 @@ def is_instance_method_from_sig(function: AnyDecoratorCallable) ->bool:
     Returns:
         `True` if the function is an instance method, `False` otherwise.
     """
-    pass
+    sig = signature(unwrap_wrapped_function(function))
+    first = next(iter(sig.parameters.values()), None)
+    if first and first.name == 'self':
+        return True
+    return False


-def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable
-    ) ->Any:
+def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any:
     """Apply the `@classmethod` decorator on the function.

     Args:
@@ -428,11 +695,27 @@ def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable
     Return:
         The `@classmethod` decorator applied function.
     """
-    pass
-
-
-def unwrap_wrapped_function(func: Any, *, unwrap_partial: bool=True,
-    unwrap_class_static_method: bool=True) ->Any:
+    if not isinstance(
+        unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod
+    ) and _is_classmethod_from_sig(function):
+        return classmethod(function)  # type: ignore[arg-type]
+    return function
+
+
+def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool:
+    sig = signature(unwrap_wrapped_function(function))
+    first = next(iter(sig.parameters.values()), None)
+    if first and first.name == 'cls':
+        return True
+    return False
+
+
+def unwrap_wrapped_function(
+    func: Any,
+    *,
+    unwrap_partial: bool = True,
+    unwrap_class_static_method: bool = True,
+) -> Any:
     """Recursively unwraps a wrapped function until the underlying function is reached.
     This handles property, functools.partial, functools.partialmethod, staticmethod, and classmethod.

@@ -445,11 +728,31 @@ def unwrap_wrapped_function(func: Any, *, unwrap_partial: bool=True,
     Returns:
         The underlying function of the wrapped function.
     """
-    pass
+    # Define the types we want to check against as a single tuple.
+    unwrap_types = (
+        (property, cached_property)
+        + ((partial, partialmethod) if unwrap_partial else ())
+        + ((staticmethod, classmethod) if unwrap_class_static_method else ())
+    )

+    while isinstance(func, unwrap_types):
+        if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)):
+            func = func.__func__
+        elif isinstance(func, (partial, partialmethod)):
+            func = func.func
+        elif isinstance(func, property):
+            func = func.fget  # arbitrary choice, convenient for computed fields
+        else:
+            # Make coverage happy as it can only get here in the last possible case
+            assert isinstance(func, cached_property)
+            func = func.func  # type: ignore

-def get_function_return_type(func: Any, explicit_return_type: Any,
-    types_namespace: (dict[str, Any] | None)=None) ->Any:
+    return func
+
+
+def get_function_return_type(
+    func: Any, explicit_return_type: Any, types_namespace: dict[str, Any] | None = None
+) -> Any:
     """Get the function return type.

     It gets the return type from the type annotation if `explicit_return_type` is `None`.
@@ -463,10 +766,17 @@ def get_function_return_type(func: Any, explicit_return_type: Any,
     Returns:
         The function return type.
     """
-    pass
+    if explicit_return_type is PydanticUndefined:
+        # try to get it from the type annotation
+        hints = get_function_type_hints(
+            unwrap_wrapped_function(func), include_keys={'return'}, types_namespace=types_namespace
+        )
+        return hints.get('return', PydanticUndefined)
+    else:
+        return explicit_return_type


-def count_positional_required_params(sig: Signature) ->int:
+def count_positional_required_params(sig: Signature) -> int:
     """Get the number of positional (required) arguments of a signature.

     This function should only be used to inspect signatures of validation and serialization functions.
@@ -476,10 +786,23 @@ def count_positional_required_params(sig: Signature) ->int:
     Returns:
         The number of positional arguments of a signature.
     """
-    pass
+    parameters = list(sig.parameters.values())
+    return sum(
+        1
+        for param in parameters
+        if can_be_positional(param)
+        # First argument is the value being validated/serialized, and can have a default value
+        # (e.g. `float`, which has signature `(x=0, /)`). We assume other parameters (the info arg
+        # for instance) should be required, and thus without any default value.
+        and (param.default is Parameter.empty or param == parameters[0])
+    )
+
+
+def can_be_positional(param: Parameter) -> bool:
+    return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)


-def ensure_property(f: Any) ->Any:
+def ensure_property(f: Any) -> Any:
     """Ensure that a function is a `property` or `cached_property`, or is a valid descriptor.

     Args:
@@ -488,4 +811,7 @@ def ensure_property(f: Any) ->Any:
     Returns:
         The function, or a `property` or `cached_property` instance wrapping the function.
     """
-    pass
+    if ismethoddescriptor(f) or isdatadescriptor(f):
+        return f
+    else:
+        return property(f)
diff --git a/pydantic/_internal/_decorators_v1.py b/pydantic/_internal/_decorators_v1.py
index 4645ddd95..0957e018b 100644
--- a/pydantic/_internal/_decorators_v1.py
+++ b/pydantic/_internal/_decorators_v1.py
@@ -1,9 +1,13 @@
 """Logic for V1 validators, e.g. `@validator` and `@root_validator`."""
+
 from __future__ import annotations as _annotations
+
 from inspect import Parameter, signature
 from typing import Any, Dict, Tuple, Union, cast
+
 from pydantic_core import core_schema
 from typing_extensions import Protocol
+
 from ..errors import PydanticUserError
 from ._decorators import can_be_positional

@@ -11,45 +15,43 @@ from ._decorators import can_be_positional
 class V1OnlyValueValidator(Protocol):
     """A simple validator, supported for V1 validators and V2 validators."""

-    def __call__(self, __value: Any) ->Any:
-        ...
+    def __call__(self, __value: Any) -> Any: ...


 class V1ValidatorWithValues(Protocol):
     """A validator with `values` argument, supported for V1 validators and V2 validators."""

-    def __call__(self, __value: Any, values: dict[str, Any]) ->Any:
-        ...
+    def __call__(self, __value: Any, values: dict[str, Any]) -> Any: ...


 class V1ValidatorWithValuesKwOnly(Protocol):
     """A validator with keyword only `values` argument, supported for V1 validators and V2 validators."""

-    def __call__(self, __value: Any, *, values: dict[str, Any]) ->Any:
-        ...
+    def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any: ...


 class V1ValidatorWithKwargs(Protocol):
     """A validator with `kwargs` argument, supported for V1 validators and V2 validators."""

-    def __call__(self, __value: Any, **kwargs: Any) ->Any:
-        ...
+    def __call__(self, __value: Any, **kwargs: Any) -> Any: ...


 class V1ValidatorWithValuesAndKwargs(Protocol):
     """A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators."""

-    def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any
-        ) ->Any:
-        ...
+    def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any: ...
+
+
+V1Validator = Union[
+    V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs
+]


-V1Validator = Union[V1ValidatorWithValues, V1ValidatorWithValuesKwOnly,
-    V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs]
+def can_be_keyword(param: Parameter) -> bool:
+    return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)


-def make_generic_v1_field_validator(validator: V1Validator
-    ) ->core_schema.WithInfoValidatorFunction:
+def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction:
     """Wrap a V1 style field validator for V2 compatibility.

     Args:
@@ -62,38 +64,75 @@ def make_generic_v1_field_validator(validator: V1Validator
         PydanticUserError: If the signature is not supported or the parameters are
             not available in Pydantic V2.
     """
-    pass
+    sig = signature(validator)
+
+    needs_values_kw = False
+
+    for param_num, (param_name, parameter) in enumerate(sig.parameters.items()):
+        if can_be_keyword(parameter) and param_name in ('field', 'config'):
+            raise PydanticUserError(
+                'The `field` and `config` parameters are not available in Pydantic V2, '
+                'please use the `info` parameter instead.',
+                code='validator-field-config-info',
+            )
+        if parameter.kind is Parameter.VAR_KEYWORD:
+            needs_values_kw = True
+        elif can_be_keyword(parameter) and param_name == 'values':
+            needs_values_kw = True
+        elif can_be_positional(parameter) and param_num == 0:
+            # value
+            continue
+        elif parameter.default is Parameter.empty:  # ignore params with defaults e.g. bound by functools.partial
+            raise PydanticUserError(
+                f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.',
+                code='validator-v1-signature',
+            )
+
+    if needs_values_kw:
+        # (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values)
+        val1 = cast(V1ValidatorWithValues, validator)
+
+        def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any:
+            return val1(value, values=info.data)
+
+        return wrapper1
+    else:
+        val2 = cast(V1OnlyValueValidator, validator)
+
+        def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any:
+            return val2(value)
+
+        return wrapper2


 RootValidatorValues = Dict[str, Any]
+# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars]
 RootValidatorFieldsTuple = Tuple[Any, ...]


 class V1RootValidatorFunction(Protocol):
     """A simple root validator, supported for V1 validators and V2 validators."""

-    def __call__(self, __values: RootValidatorValues) ->RootValidatorValues:
-        ...
+    def __call__(self, __values: RootValidatorValues) -> RootValidatorValues: ...


 class V2CoreBeforeRootValidator(Protocol):
     """V2 validator with mode='before'."""

-    def __call__(self, __values: RootValidatorValues, __info: core_schema.
-        ValidationInfo) ->RootValidatorValues:
-        ...
+    def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues: ...


 class V2CoreAfterRootValidator(Protocol):
     """V2 validator with mode='after'."""

-    def __call__(self, __fields_tuple: RootValidatorFieldsTuple, __info:
-        core_schema.ValidationInfo) ->RootValidatorFieldsTuple:
-        ...
+    def __call__(
+        self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo
+    ) -> RootValidatorFieldsTuple: ...


-def make_v1_generic_root_validator(validator: V1RootValidatorFunction, pre:
-    bool) ->(V2CoreBeforeRootValidator | V2CoreAfterRootValidator):
+def make_v1_generic_root_validator(
+    validator: V1RootValidatorFunction, pre: bool
+) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator:
     """Wrap a V1 style root validator for V2 compatibility.

     Args:
@@ -103,4 +142,33 @@ def make_v1_generic_root_validator(validator: V1RootValidatorFunction, pre:
     Returns:
         A wrapped V2 style validator.
     """
-    pass
+    if pre is True:
+        # mode='before' for pydantic-core
+        def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues:
+            return validator(values)
+
+        return _wrapper1
+
+    # mode='after' for pydantic-core
+    def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple:
+        if len(fields_tuple) == 2:
+            # dataclass, this is easy
+            values, init_vars = fields_tuple
+            values = validator(values)
+            return values, init_vars
+        else:
+            # ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields
+            # afterwards
+            model_dict, model_extra, fields_set = fields_tuple
+            if model_extra:
+                fields = set(model_dict.keys())
+                model_dict.update(model_extra)
+                model_dict_new = validator(model_dict)
+                for k in list(model_dict_new.keys()):
+                    if k not in fields:
+                        model_extra[k] = model_dict_new.pop(k)
+            else:
+                model_dict_new = validator(model_dict)
+            return model_dict_new, model_extra, fields_set
+
+    return _wrapper2
diff --git a/pydantic/_internal/_discriminated_union.py b/pydantic/_internal/_discriminated_union.py
index aa07168eb..a090c36f0 100644
--- a/pydantic/_internal/_discriminated_union.py
+++ b/pydantic/_internal/_discriminated_union.py
@@ -1,13 +1,20 @@
 from __future__ import annotations as _annotations
+
 from typing import TYPE_CHECKING, Any, Hashable, Sequence
+
 from pydantic_core import CoreSchema, core_schema
+
 from ..errors import PydanticUserError
 from . import _core_utils
-from ._core_utils import CoreSchemaField, collect_definitions
+from ._core_utils import (
+    CoreSchemaField,
+    collect_definitions,
+)
+
 if TYPE_CHECKING:
     from ..types import Discriminator
-CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = (
-    'pydantic.internal.union_discriminator')
+
+CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY = 'pydantic.internal.union_discriminator'


 class MissingDefinitionForUnionRef(Exception):
@@ -15,14 +22,47 @@ class MissingDefinitionForUnionRef(Exception):
     requires a definition that is not yet defined
     """

-    def __init__(self, ref: str) ->None:
+    def __init__(self, ref: str) -> None:
         self.ref = ref
         super().__init__(f'Missing definition for ref {self.ref!r}')


-def apply_discriminator(schema: core_schema.CoreSchema, discriminator: (str |
-    Discriminator), definitions: (dict[str, core_schema.CoreSchema] | None)
-    =None) ->core_schema.CoreSchema:
+def set_discriminator_in_metadata(schema: CoreSchema, discriminator: Any) -> None:
+    schema.setdefault('metadata', {})
+    metadata = schema.get('metadata')
+    assert metadata is not None
+    metadata[CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY] = discriminator
+
+
+def apply_discriminators(schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
+    # We recursively walk through the `schema` passed to `apply_discriminators`, applying discriminators
+    # where necessary at each level. During this recursion, we allow references to be resolved from the definitions
+    # that are originally present on the original, outermost `schema`. Before `apply_discriminators` is called,
+    # `simplify_schema_references` is called on the schema (in the `clean_schema` function),
+    # which often puts the definitions in the outermost schema.
+    global_definitions: dict[str, CoreSchema] = collect_definitions(schema)
+
+    def inner(s: core_schema.CoreSchema, recurse: _core_utils.Recurse) -> core_schema.CoreSchema:
+        nonlocal global_definitions
+
+        s = recurse(s, inner)
+        if s['type'] == 'tagged-union':
+            return s
+
+        metadata = s.get('metadata', {})
+        discriminator = metadata.pop(CORE_SCHEMA_METADATA_DISCRIMINATOR_PLACEHOLDER_KEY, None)
+        if discriminator is not None:
+            s = apply_discriminator(s, discriminator, global_definitions)
+        return s
+
+    return _core_utils.walk_core_schema(schema, inner)
+
+
+def apply_discriminator(
+    schema: core_schema.CoreSchema,
+    discriminator: str | Discriminator,
+    definitions: dict[str, core_schema.CoreSchema] | None = None,
+) -> core_schema.CoreSchema:
     """Applies the discriminator and returns a new core schema.

     Args:
@@ -46,7 +86,15 @@ def apply_discriminator(schema: core_schema.CoreSchema, discriminator: (str |
             - If discriminator fields have different aliases.
             - If discriminator field not of type `Literal`.
     """
-    pass
+    from ..types import Discriminator
+
+    if isinstance(discriminator, Discriminator):
+        if isinstance(discriminator.discriminator, str):
+            discriminator = discriminator.discriminator
+        else:
+            return discriminator._convert_schema(schema)
+
+    return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema)


 class _ApplyInferredDiscriminator:
@@ -62,18 +110,61 @@ class _ApplyInferredDiscriminator:
     to make it easier to maintain state while recursively walking the provided CoreSchema.
     """

-    def __init__(self, discriminator: str, definitions: dict[str,
-        core_schema.CoreSchema]):
+    def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]):
+        # `discriminator` should be the name of the field which will serve as the discriminator.
+        # It must be the python name of the field, and *not* the field's alias. Note that as of now,
+        # all members of a discriminated union _must_ use a field with the same name as the discriminator.
+        # This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices.
         self.discriminator = discriminator
+
+        # `definitions` should contain a mapping of schema ref to schema for all schemas which might
+        # be referenced by some choice
         self.definitions = definitions
+
+        # `_discriminator_alias` will hold the value, if present, of the alias for the discriminator
+        #
+        # Note: following the v1 implementation, we currently disallow the use of different aliases
+        # for different choices. This is not a limitation of pydantic_core, but if we try to handle
+        # this, the inference logic gets complicated very quickly, and could result in confusing
+        # debugging challenges for users making subtle mistakes.
+        #
+        # Rather than trying to do the most powerful inference possible, I think we should eventually
+        # expose a way to more-manually control the way the TaggedUnionSchema is constructed through
+        # the use of a new type which would be placed as an Annotation on the Union type. This would
+        # provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for
+        # more complex cases, without over-complicating the inference logic for the common cases.
         self._discriminator_alias: str | None = None
+
+        # `_should_be_nullable` indicates whether the converted union has `None` as an allowed value.
+        # If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while
+        # constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True.
+        # Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure
+        # that the final schema gets wrapped as a NullableSchema. This has the same semantics on the
+        # python side, but resolves the issue that `None` cannot correspond to any discriminator values.
         self._should_be_nullable = False
+
+        # `_is_nullable` is used to track if the final produced schema will definitely be nullable;
+        # we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved
+        # as an indication that, even if None is discovered as one of the union choices, we will not need to wrap
+        # the final value in another nullable schema.
+        #
+        # This is more complicated than just checking for the final outermost schema having type 'nullable' thanks
+        # to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc.
         self._is_nullable = False
+
+        # `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices
+        # from the union in the wrapped schema will be appended to this list, and the recursive choice-handling
+        # algorithm may add more choices to this stack as (nested) unions are encountered.
         self._choices_to_handle: list[core_schema.CoreSchema] = []
+
+        # `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included
+        # in the output TaggedUnionSchema that will replace the union from the input schema
         self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {}
+
+        # `_used` is changed to True after applying the discriminator to prevent accidental re-use
         self._used = False

-    def apply(self, schema: core_schema.CoreSchema) ->core_schema.CoreSchema:
+    def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
         """Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided
         to this class.

@@ -96,17 +187,70 @@ class _ApplyInferredDiscriminator:
                 - If discriminator fields have different aliases.
                 - If discriminator field not of type `Literal`.
         """
-        pass
+        assert not self._used
+        schema = self._apply_to_root(schema)
+        if self._should_be_nullable and not self._is_nullable:
+            schema = core_schema.nullable_schema(schema)
+        self._used = True
+        return schema

-    def _apply_to_root(self, schema: core_schema.CoreSchema
-        ) ->core_schema.CoreSchema:
+    def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
         """This method handles the outer-most stage of recursion over the input schema:
         unwrapping nullable or definitions schemas, and calling the `_handle_choice`
         method iteratively on the choices extracted (recursively) from the possibly-wrapped union.
         """
-        pass
+        if schema['type'] == 'nullable':
+            self._is_nullable = True
+            wrapped = self._apply_to_root(schema['schema'])
+            nullable_wrapper = schema.copy()
+            nullable_wrapper['schema'] = wrapped
+            return nullable_wrapper

-    def _handle_choice(self, choice: core_schema.CoreSchema) ->None:
+        if schema['type'] == 'definitions':
+            wrapped = self._apply_to_root(schema['schema'])
+            definitions_wrapper = schema.copy()
+            definitions_wrapper['schema'] = wrapped
+            return definitions_wrapper
+
+        if schema['type'] != 'union':
+            # If the schema is not a union, it probably means it just had a single member and
+            # was flattened by pydantic_core.
+            # However, it still may make sense to apply the discriminator to this schema,
+            # as a way to get discriminated-union-style error messages, so we allow this here.
+            schema = core_schema.union_schema([schema])
+
+        # Reverse the choices list before extending the stack so that they get handled in the order they occur
+        choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]]
+        self._choices_to_handle.extend(choices_schemas)
+        while self._choices_to_handle:
+            choice = self._choices_to_handle.pop()
+            self._handle_choice(choice)
+
+        if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator:
+            # * We need to annotate `discriminator` as a union here to handle both branches of this conditional
+            # * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the
+            #   invariance of list, and because list[list[str | int]] is the type of the discriminator argument
+            #   to tagged_union_schema below
+            # * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to
+            #   interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here
+            #   is the appropriate way to provide a list of fallback attributes to check for a discriminator value.)
+            discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]]
+        else:
+            discriminator = self.discriminator
+        return core_schema.tagged_union_schema(
+            choices=self._tagged_union_choices,
+            discriminator=discriminator,
+            custom_error_type=schema.get('custom_error_type'),
+            custom_error_message=schema.get('custom_error_message'),
+            custom_error_context=schema.get('custom_error_context'),
+            strict=False,
+            from_attributes=True,
+            ref=schema.get('ref'),
+            metadata=schema.get('metadata'),
+            serialization=schema.get('serialization'),
+        )
+
+    def _handle_choice(self, choice: core_schema.CoreSchema) -> None:
         """This method handles the "middle" stage of recursion over the input schema.
         Specifically, it is responsible for handling each choice of the outermost union
         (and any "coalesced" choices obtained from inner unions).
@@ -117,43 +261,243 @@ class _ApplyInferredDiscriminator:
         * Validating that each allowed discriminator value maps to a unique choice
         * Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema.
         """
-        pass
+        if choice['type'] == 'definition-ref':
+            if choice['schema_ref'] not in self.definitions:
+                raise MissingDefinitionForUnionRef(choice['schema_ref'])
+
+        if choice['type'] == 'none':
+            self._should_be_nullable = True
+        elif choice['type'] == 'definitions':
+            self._handle_choice(choice['schema'])
+        elif choice['type'] == 'nullable':
+            self._should_be_nullable = True
+            self._handle_choice(choice['schema'])  # unwrap the nullable schema
+        elif choice['type'] == 'union':
+            # Reverse the choices list before extending the stack so that they get handled in the order they occur
+            choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]]
+            self._choices_to_handle.extend(choices_schemas)
+        elif choice['type'] not in {
+            'model',
+            'typed-dict',
+            'tagged-union',
+            'lax-or-strict',
+            'dataclass',
+            'dataclass-args',
+            'definition-ref',
+        } and not _core_utils.is_function_with_inner_schema(choice):
+            # We should eventually handle 'definition-ref' as well
+            raise TypeError(
+                f'{choice["type"]!r} is not a valid discriminated union variant;'
+                ' should be a `BaseModel` or `dataclass`'
+            )
+        else:
+            if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice):
+                # In this case, this inner tagged-union is compatible with the outer tagged-union,
+                # and its choices can be coalesced into the outer TaggedUnionSchema.
+                subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
+                # Reverse the choices list before extending the stack so that they get handled in the order they occur
+                self._choices_to_handle.extend(subchoices[::-1])
+                return
+
+            inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None)
+            self._set_unique_choice_for_values(choice, inferred_discriminator_values)

-    def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema
-        ) ->bool:
+    def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool:
         """This method returns a boolean indicating whether the discriminator for the `choice`
         is the same as that being used for the outermost tagged union. This is used to
         determine whether this TaggedUnionSchema choice should be "coalesced" into the top level,
         or whether it should be treated as a separate (nested) choice.
         """
-        pass
+        inner_discriminator = choice['discriminator']
+        return inner_discriminator == self.discriminator or (
+            isinstance(inner_discriminator, list)
+            and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator)
+        )

-    def _infer_discriminator_values_for_choice(self, choice: core_schema.
-        CoreSchema, source_name: (str | None)) ->list[str | int]:
+    def _infer_discriminator_values_for_choice(  # noqa C901
+        self, choice: core_schema.CoreSchema, source_name: str | None
+    ) -> list[str | int]:
         """This function recurses over `choice`, extracting all discriminator values that should map to this choice.

         `model_name` is accepted for the purpose of producing useful error messages.
         """
-        pass
+        if choice['type'] == 'definitions':
+            return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
+        elif choice['type'] == 'function-plain':
+            raise TypeError(
+                f'{choice["type"]!r} is not a valid discriminated union variant;'
+                ' should be a `BaseModel` or `dataclass`'
+            )
+        elif _core_utils.is_function_with_inner_schema(choice):
+            return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name)
+        elif choice['type'] == 'lax-or-strict':
+            return sorted(
+                set(
+                    self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None)
+                    + self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None)
+                )
+            )
+
+        elif choice['type'] == 'tagged-union':
+            values: list[str | int] = []
+            # Ignore str/int "choices" since these are just references to other choices
+            subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))]
+            for subchoice in subchoices:
+                subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None)
+                values.extend(subchoice_values)
+            return values
+
+        elif choice['type'] == 'union':
+            values = []
+            for subchoice in choice['choices']:
+                subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice
+                subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None)
+                values.extend(subchoice_values)
+            return values
+
+        elif choice['type'] == 'nullable':
+            self._should_be_nullable = True
+            return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None)
+
+        elif choice['type'] == 'model':
+            return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
+
+        elif choice['type'] == 'dataclass':
+            return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__)
+
+        elif choice['type'] == 'model-fields':
+            return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name)

-    def _infer_discriminator_values_for_typed_dict_choice(self, choice:
-        core_schema.TypedDictSchema, source_name: (str | None)=None) ->list[
-        str | int]:
+        elif choice['type'] == 'dataclass-args':
+            return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name)
+
+        elif choice['type'] == 'typed-dict':
+            return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name)
+
+        elif choice['type'] == 'definition-ref':
+            schema_ref = choice['schema_ref']
+            if schema_ref not in self.definitions:
+                raise MissingDefinitionForUnionRef(schema_ref)
+            return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name)
+        else:
+            raise TypeError(
+                f'{choice["type"]!r} is not a valid discriminated union variant;'
+                ' should be a `BaseModel` or `dataclass`'
+            )
+
+    def _infer_discriminator_values_for_typed_dict_choice(
+        self, choice: core_schema.TypedDictSchema, source_name: str | None = None
+    ) -> list[str | int]:
         """This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema
         for the sake of readability.
         """
-        pass
+        source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}'
+        field = choice['fields'].get(self.discriminator)
+        if field is None:
+            raise PydanticUserError(
+                f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+            )
+        return self._infer_discriminator_values_for_field(field, source)
+
+    def _infer_discriminator_values_for_model_choice(
+        self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None
+    ) -> list[str | int]:
+        source = 'ModelFields' if source_name is None else f'Model {source_name!r}'
+        field = choice['fields'].get(self.discriminator)
+        if field is None:
+            raise PydanticUserError(
+                f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+            )
+        return self._infer_discriminator_values_for_field(field, source)
+
+    def _infer_discriminator_values_for_dataclass_choice(
+        self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None
+    ) -> list[str | int]:
+        source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}'
+        for field in choice['fields']:
+            if field['name'] == self.discriminator:
+                break
+        else:
+            raise PydanticUserError(
+                f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field'
+            )
+        return self._infer_discriminator_values_for_field(field, source)
+
+    def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]:
+        if field['type'] == 'computed-field':
+            # This should never occur as a discriminator, as it is only relevant to serialization
+            return []
+        alias = field.get('validation_alias', self.discriminator)
+        if not isinstance(alias, str):
+            raise PydanticUserError(
+                f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type'
+            )
+        if self._discriminator_alias is None:
+            self._discriminator_alias = alias
+        elif self._discriminator_alias != alias:
+            raise PydanticUserError(
+                f'Aliases for discriminator {self.discriminator!r} must be the same '
+                f'(got {alias}, {self._discriminator_alias})',
+                code='discriminator-alias',
+            )
+        return self._infer_discriminator_values_for_inner_schema(field['schema'], source)

-    def _infer_discriminator_values_for_inner_schema(self, schema:
-        core_schema.CoreSchema, source: str) ->list[str | int]:
+    def _infer_discriminator_values_for_inner_schema(
+        self, schema: core_schema.CoreSchema, source: str
+    ) -> list[str | int]:
         """When inferring discriminator values for a field, we typically extract the expected values from a literal
         schema. This function does that, but also handles nested unions and defaults.
         """
-        pass
+        if schema['type'] == 'literal':
+            return schema['expected']
+
+        elif schema['type'] == 'union':
+            # Generally when multiple values are allowed they should be placed in a single `Literal`, but
+            # we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s.
+            # For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]`
+            values: list[Any] = []
+            for choice in schema['choices']:
+                choice_schema = choice[0] if isinstance(choice, tuple) else choice
+                choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source)
+                values.extend(choice_values)
+            return values
+
+        elif schema['type'] == 'default':
+            # This will happen if the field has a default value; we ignore it while extracting the discriminator values
+            return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
+
+        elif schema['type'] == 'function-after':
+            # After validators don't affect the discriminator values
+            return self._infer_discriminator_values_for_inner_schema(schema['schema'], source)
+
+        elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}:
+            validator_type = repr(schema['type'].split('-')[1])
+            raise PydanticUserError(
+                f'Cannot use a mode={validator_type} validator in the'
+                f' discriminator field {self.discriminator!r} of {source}',
+                code='discriminator-validator',
+            )
+
+        else:
+            raise PydanticUserError(
+                f'{source} needs field {self.discriminator!r} to be of type `Literal`',
+                code='discriminator-needs-literal',
+            )

-    def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema,
-        values: Sequence[str | int]) ->None:
+    def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None:
         """This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the
         provided `choice`, validating that none of these values already map to another (different) choice.
         """
-        pass
+        for discriminator_value in values:
+            if discriminator_value in self._tagged_union_choices:
+                # It is okay if `value` is already in tagged_union_choices as long as it maps to the same value.
+                # Because tagged_union_choices may map values to other values, we need to walk the choices dict
+                # until we get to a "real" choice, and confirm that is equal to the one assigned.
+                existing_choice = self._tagged_union_choices[discriminator_value]
+                if existing_choice != choice:
+                    raise TypeError(
+                        f'Value {discriminator_value!r} for discriminator '
+                        f'{self.discriminator!r} mapped to multiple choices'
+                    )
+            else:
+                self._tagged_union_choices[discriminator_value] = choice
diff --git a/pydantic/_internal/_docs_extraction.py b/pydantic/_internal/_docs_extraction.py
index 88a14e9ff..685a6d069 100644
--- a/pydantic/_internal/_docs_extraction.py
+++ b/pydantic/_internal/_docs_extraction.py
@@ -1,5 +1,7 @@
 """Utilities related to attribute docstring extraction."""
+
 from __future__ import annotations
+
 import ast
 import inspect
 import textwrap
@@ -7,16 +9,76 @@ from typing import Any


 class DocstringVisitor(ast.NodeVisitor):
-
-    def __init__(self) ->None:
+    def __init__(self) -> None:
         super().__init__()
+
         self.target: str | None = None
         self.attrs: dict[str, str] = {}
         self.previous_node_type: type[ast.AST] | None = None

+    def visit(self, node: ast.AST) -> Any:
+        node_result = super().visit(node)
+        self.previous_node_type = type(node)
+        return node_result
+
+    def visit_AnnAssign(self, node: ast.AnnAssign) -> Any:
+        if isinstance(node.target, ast.Name):
+            self.target = node.target.id
+
+    def visit_Expr(self, node: ast.Expr) -> Any:
+        if (
+            isinstance(node.value, ast.Constant)
+            and isinstance(node.value.value, str)
+            and self.previous_node_type is ast.AnnAssign
+        ):
+            docstring = inspect.cleandoc(node.value.value)
+            if self.target:
+                self.attrs[self.target] = docstring
+            self.target = None
+
+
+def _dedent_source_lines(source: list[str]) -> str:
+    # Required for nested class definitions, e.g. in a function block
+    dedent_source = textwrap.dedent(''.join(source))
+    if dedent_source.startswith((' ', '\t')):
+        # We are in the case where there's a dedented (usually multiline) string
+        # at a lower indentation level than the class itself. We wrap our class
+        # in a function as a workaround.
+        dedent_source = f'def dedent_workaround():\n{dedent_source}'
+    return dedent_source

-def extract_docstrings_from_cls(cls: type[Any], use_inspect: bool=False
-    ) ->dict[str, str]:
+
+def _extract_source_from_frame(cls: type[Any]) -> list[str] | None:
+    frame = inspect.currentframe()
+
+    while frame:
+        if inspect.getmodule(frame) is inspect.getmodule(cls):
+            lnum = frame.f_lineno
+            try:
+                lines, _ = inspect.findsource(frame)
+            except OSError:
+                # Source can't be retrieved (maybe because running in an interactive terminal),
+                # we don't want to error here.
+                pass
+            else:
+                block_lines = inspect.getblock(lines[lnum - 1 :])
+                dedent_source = _dedent_source_lines(block_lines)
+                try:
+                    block_tree = ast.parse(dedent_source)
+                except SyntaxError:
+                    pass
+                else:
+                    stmt = block_tree.body[0]
+                    if isinstance(stmt, ast.FunctionDef) and stmt.name == 'dedent_workaround':
+                        # `_dedent_source_lines` wrapped the class around the workaround function
+                        stmt = stmt.body[0]
+                    if isinstance(stmt, ast.ClassDef) and stmt.name == cls.__name__:
+                        return block_lines
+
+        frame = frame.f_back
+
+
+def extract_docstrings_from_cls(cls: type[Any], use_inspect: bool = False) -> dict[str, str]:
     """Map model attributes and their corresponding docstring.

     Args:
@@ -27,4 +89,20 @@ def extract_docstrings_from_cls(cls: type[Any], use_inspect: bool=False
     Returns:
         A mapping containing attribute names and their corresponding docstring.
     """
-    pass
+    if use_inspect:
+        # Might not work as expected if two classes have the same name in the same source file.
+        try:
+            source, _ = inspect.getsourcelines(cls)
+        except OSError:
+            return {}
+    else:
+        source = _extract_source_from_frame(cls)
+
+    if not source:
+        return {}
+
+    dedent_source = _dedent_source_lines(source)
+
+    visitor = DocstringVisitor()
+    visitor.visit(ast.parse(dedent_source))
+    return visitor.attrs
diff --git a/pydantic/_internal/_fields.py b/pydantic/_internal/_fields.py
index 7e8b6c926..52cd08bf0 100644
--- a/pydantic/_internal/_fields.py
+++ b/pydantic/_internal/_fields.py
@@ -1,28 +1,38 @@
 """Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import sys
 import warnings
 from copy import copy
 from functools import lru_cache
 from typing import TYPE_CHECKING, Any
+
 from pydantic_core import PydanticUndefined
+
 from pydantic.errors import PydanticUserError
+
 from . import _typing_extra
 from ._config import ConfigWrapper
 from ._docs_extraction import extract_docstrings_from_cls
 from ._repr import Representation
 from ._typing_extra import get_cls_type_hints_lenient, get_type_hints, is_classvar, is_finalvar
+
 if TYPE_CHECKING:
     from annotated_types import BaseMetadata
+
     from ..fields import FieldInfo
     from ..main import BaseModel
     from ._dataclasses import StandardDataclass
     from ._decorators import DecoratorInfos


-def get_type_hints_infer_globalns(obj: Any, localns: (dict[str, Any] | None
-    )=None, include_extras: bool=False) ->dict[str, Any]:
+def get_type_hints_infer_globalns(
+    obj: Any,
+    localns: dict[str, Any] | None = None,
+    include_extras: bool = False,
+) -> dict[str, Any]:
     """Gets type hints for an object by inferring the global namespace.

     It uses the `typing.get_type_hints`, The only thing that we do here is fetching
@@ -36,15 +46,24 @@ def get_type_hints_infer_globalns(obj: Any, localns: (dict[str, Any] | None
     Returns:
         The object type hints.
     """
-    pass
+    module_name = getattr(obj, '__module__', None)
+    globalns: dict[str, Any] | None = None
+    if module_name:
+        try:
+            globalns = sys.modules[module_name].__dict__
+        except KeyError:
+            # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
+            pass
+    return get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)


 class PydanticMetadata(Representation):
     """Base class for annotation markers like `Strict`."""
+
     __slots__ = ()


-def pydantic_general_metadata(**metadata: Any) ->BaseMetadata:
+def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
     """Create a new `_PydanticGeneralMetadata` class with the given metadata.

     Args:
@@ -53,19 +72,39 @@ def pydantic_general_metadata(**metadata: Any) ->BaseMetadata:
     Returns:
         The new `_PydanticGeneralMetadata` class.
     """
-    pass
+    return _general_metadata_cls()(metadata)  # type: ignore


 @lru_cache(maxsize=None)
-def _general_metadata_cls() ->type[BaseMetadata]:
+def _general_metadata_cls() -> type[BaseMetadata]:
     """Do it this way to avoid importing `annotated_types` at import time."""
-    pass
+    from annotated_types import BaseMetadata
+
+    class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
+        """Pydantic general metadata like `max_digits`."""
+
+        def __init__(self, metadata: Any):
+            self.__dict__ = metadata
+
+    return _PydanticGeneralMetadata  # type: ignore
+

+def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper) -> None:
+    if config_wrapper.use_attribute_docstrings:
+        fields_docs = extract_docstrings_from_cls(cls)
+        for ann_name, field_info in fields.items():
+            if field_info.description is None and ann_name in fields_docs:
+                field_info.description = fields_docs[ann_name]

-def collect_model_fields(cls: type[BaseModel], bases: tuple[type[Any], ...],
-    config_wrapper: ConfigWrapper, types_namespace: (dict[str, Any] | None),
-    *, typevars_map: (dict[Any, Any] | None)=None) ->tuple[dict[str,
-    FieldInfo], set[str]]:
+
+def collect_model_fields(  # noqa: C901
+    cls: type[BaseModel],
+    bases: tuple[type[Any], ...],
+    config_wrapper: ConfigWrapper,
+    types_namespace: dict[str, Any] | None,
+    *,
+    typevars_map: dict[Any, Any] | None = None,
+) -> tuple[dict[str, FieldInfo], set[str]]:
     """Collect the fields of a nascent pydantic model.

     Also collect the names of any ClassVars present in the type hints.
@@ -88,12 +127,167 @@ def collect_model_fields(cls: type[BaseModel], bases: tuple[type[Any], ...],
             - If there is a field other than `root` in `RootModel`.
             - If a field shadows an attribute in the parent model.
     """
-    pass
+    from ..fields import FieldInfo
+
+    type_hints = get_cls_type_hints_lenient(cls, types_namespace)
+
+    # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
+    # annotations is only used for finding fields in parent classes
+    annotations = cls.__dict__.get('__annotations__', {})
+    fields: dict[str, FieldInfo] = {}
+
+    class_vars: set[str] = set()
+    for ann_name, ann_type in type_hints.items():
+        if ann_name == 'model_config':
+            # We never want to treat `model_config` as a field
+            # Note: we may need to change this logic if/when we introduce a `BareModel` class with no
+            # protected namespaces (where `model_config` might be allowed as a field name)
+            continue
+        for protected_namespace in config_wrapper.protected_namespaces:
+            if ann_name.startswith(protected_namespace):
+                for b in bases:
+                    if hasattr(b, ann_name):
+                        from ..main import BaseModel
+
+                        if not (issubclass(b, BaseModel) and ann_name in b.model_fields):
+                            raise NameError(
+                                f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}'
+                                f' of protected namespace "{protected_namespace}".'
+                            )
+                else:
+                    valid_namespaces = tuple(
+                        x for x in config_wrapper.protected_namespaces if not ann_name.startswith(x)
+                    )
+                    warnings.warn(
+                        f'Field "{ann_name}" has conflict with protected namespace "{protected_namespace}".'
+                        '\n\nYou may be able to resolve this warning by setting'
+                        f" `model_config['protected_namespaces'] = {valid_namespaces}`.",
+                        UserWarning,
+                    )
+        if is_classvar(ann_type):
+            class_vars.add(ann_name)
+            continue
+        if _is_finalvar_with_default_val(ann_type, getattr(cls, ann_name, PydanticUndefined)):
+            class_vars.add(ann_name)
+            continue
+        if not is_valid_field_name(ann_name):
+            continue
+        if cls.__pydantic_root_model__ and ann_name != 'root':
+            raise NameError(
+                f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
+            )
+
+        # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
+        # "... shadows an attribute" warnings
+        generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
+        for base in bases:
+            dataclass_fields = {
+                field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
+            }
+            if hasattr(base, ann_name):
+                if base is generic_origin:
+                    # Don't warn when "shadowing" of attributes in parametrized generics
+                    continue
+
+                if ann_name in dataclass_fields:
+                    # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
+                    # on the class instance.
+                    continue
+
+                if ann_name not in annotations:
+                    # Don't warn when a field exists in a parent class but has not been defined in the current class
+                    continue
+
+                warnings.warn(
+                    f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent '
+                    f'"{base.__qualname__}"',
+                    UserWarning,
+                )
+
+        try:
+            default = getattr(cls, ann_name, PydanticUndefined)
+            if default is PydanticUndefined:
+                raise AttributeError
+        except AttributeError:
+            if ann_name in annotations:
+                field_info = FieldInfo.from_annotation(ann_type)
+            else:
+                # if field has no default value and is not in __annotations__ this means that it is
+                # defined in a base class and we can take it from there
+                model_fields_lookup: dict[str, FieldInfo] = {}
+                for x in cls.__bases__[::-1]:
+                    model_fields_lookup.update(getattr(x, 'model_fields', {}))
+                if ann_name in model_fields_lookup:
+                    # The field was present on one of the (possibly multiple) base classes
+                    # copy the field to make sure typevar substitutions don't cause issues with the base classes
+                    field_info = copy(model_fields_lookup[ann_name])
+                else:
+                    # The field was not found on any base classes; this seems to be caused by fields not getting
+                    # generated thanks to models not being fully defined while initializing recursive models.
+                    # Nothing stops us from just creating a new FieldInfo for this type hint, so we do this.
+                    field_info = FieldInfo.from_annotation(ann_type)
+        else:
+            _warn_on_nested_alias_in_annotation(ann_type, ann_name)
+            field_info = FieldInfo.from_annotated_attribute(ann_type, default)
+            # attributes which are fields are removed from the class namespace:
+            # 1. To match the behaviour of annotation-only fields
+            # 2. To avoid false positives in the NameError check above
+            try:
+                delattr(cls, ann_name)
+            except AttributeError:
+                pass  # indicates the attribute was on a parent class
+
+        # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
+        # to make sure the decorators have already been built for this exact class
+        decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
+        if ann_name in decorators.computed_fields:
+            raise ValueError("you can't override a field with a computed field")
+        fields[ann_name] = field_info
+
+    if typevars_map:
+        for field in fields.values():
+            field.apply_typevars_map(typevars_map, types_namespace)

+    _update_fields_from_docstrings(cls, fields, config_wrapper)

-def collect_dataclass_fields(cls: type[StandardDataclass], types_namespace:
-    (dict[str, Any] | None), *, typevars_map: (dict[Any, Any] | None)=None,
-    config_wrapper: (ConfigWrapper | None)=None) ->dict[str, FieldInfo]:
+    return fields, class_vars
+
+
+def _warn_on_nested_alias_in_annotation(ann_type: type[Any], ann_name: str):
+    from ..fields import FieldInfo
+
+    if hasattr(ann_type, '__args__'):
+        for anno_arg in ann_type.__args__:
+            if _typing_extra.is_annotated(anno_arg):
+                for anno_type_arg in _typing_extra.get_args(anno_arg):
+                    if isinstance(anno_type_arg, FieldInfo) and anno_type_arg.alias is not None:
+                        warnings.warn(
+                            f'`alias` specification on field "{ann_name}" must be set on outermost annotation to take effect.',
+                            UserWarning,
+                        )
+                        break
+
+
+def _is_finalvar_with_default_val(type_: type[Any], val: Any) -> bool:
+    from ..fields import FieldInfo
+
+    if not is_finalvar(type_):
+        return False
+    elif val is PydanticUndefined:
+        return False
+    elif isinstance(val, FieldInfo) and (val.default is PydanticUndefined and val.default_factory is None):
+        return False
+    else:
+        return True
+
+
+def collect_dataclass_fields(
+    cls: type[StandardDataclass],
+    types_namespace: dict[str, Any] | None,
+    *,
+    typevars_map: dict[Any, Any] | None = None,
+    config_wrapper: ConfigWrapper | None = None,
+) -> dict[str, FieldInfo]:
     """Collect the fields of a dataclass.

     Args:
@@ -105,4 +299,63 @@ def collect_dataclass_fields(cls: type[StandardDataclass], types_namespace:
     Returns:
         The dataclass fields.
     """
-    pass
+    from ..fields import FieldInfo
+
+    fields: dict[str, FieldInfo] = {}
+    dataclass_fields: dict[str, dataclasses.Field] = cls.__dataclass_fields__
+    cls_localns = dict(vars(cls))  # this matches get_cls_type_hints_lenient, but all tests pass with `= None` instead
+
+    source_module = sys.modules.get(cls.__module__)
+    if source_module is not None:
+        types_namespace = {**source_module.__dict__, **(types_namespace or {})}
+
+    for ann_name, dataclass_field in dataclass_fields.items():
+        ann_type = _typing_extra.eval_type_lenient(dataclass_field.type, types_namespace, cls_localns)
+        if is_classvar(ann_type):
+            continue
+
+        if (
+            not dataclass_field.init
+            and dataclass_field.default == dataclasses.MISSING
+            and dataclass_field.default_factory == dataclasses.MISSING
+        ):
+            # TODO: We should probably do something with this so that validate_assignment behaves properly
+            #   Issue: https://github.com/pydantic/pydantic/issues/5470
+            continue
+
+        if isinstance(dataclass_field.default, FieldInfo):
+            if dataclass_field.default.init_var:
+                if dataclass_field.default.init is False:
+                    raise PydanticUserError(
+                        f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.',
+                        code='clashing-init-and-init-var',
+                    )
+
+                # TODO: same note as above re validate_assignment
+                continue
+            field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field.default)
+        else:
+            field_info = FieldInfo.from_annotated_attribute(ann_type, dataclass_field)
+
+        fields[ann_name] = field_info
+
+        if field_info.default is not PydanticUndefined and isinstance(getattr(cls, ann_name, field_info), FieldInfo):
+            # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
+            setattr(cls, ann_name, field_info.default)
+
+    if typevars_map:
+        for field in fields.values():
+            field.apply_typevars_map(typevars_map, types_namespace)
+
+    if config_wrapper is not None:
+        _update_fields_from_docstrings(cls, fields, config_wrapper)
+
+    return fields
+
+
+def is_valid_field_name(name: str) -> bool:
+    return not name.startswith('_')
+
+
+def is_valid_privateattr_name(name: str) -> bool:
+    return name.startswith('_') and not name.startswith('__')
diff --git a/pydantic/_internal/_forward_ref.py b/pydantic/_internal/_forward_ref.py
index 65d359cca..231f81d11 100644
--- a/pydantic/_internal/_forward_ref.py
+++ b/pydantic/_internal/_forward_ref.py
@@ -1,4 +1,5 @@
 from __future__ import annotations as _annotations
+
 from dataclasses import dataclass
 from typing import Union

@@ -6,16 +7,17 @@ from typing import Union
 @dataclass
 class PydanticRecursiveRef:
     type_ref: str
+
     __name__ = 'PydanticRecursiveRef'
     __hash__ = object.__hash__

-    def __call__(self) ->None:
+    def __call__(self) -> None:
         """Defining __call__ is necessary for the `typing` module to let you use an instance of
         this class as the result of resolving a standard ForwardRef.
         """

     def __or__(self, other):
-        return Union[self, other]
+        return Union[self, other]  # type: ignore

     def __ror__(self, other):
-        return Union[other, self]
+        return Union[other, self]  # type: ignore
diff --git a/pydantic/_internal/_generate_schema.py b/pydantic/_internal/_generate_schema.py
index c7465578b..edd8e722c 100644
--- a/pydantic/_internal/_generate_schema.py
+++ b/pydantic/_internal/_generate_schema.py
@@ -1,5 +1,7 @@
 """Convert python types to pydantic-core schema."""
+
 from __future__ import annotations as _annotations
+
 import collections.abc
 import dataclasses
 import inspect
@@ -15,10 +17,27 @@ from inspect import Parameter, _ParameterKind, signature
 from itertools import chain
 from operator import attrgetter
 from types import FunctionType, LambdaType, MethodType
-from typing import TYPE_CHECKING, Any, Callable, Dict, Final, ForwardRef, Iterable, Iterator, Mapping, Type, TypeVar, Union, cast, overload
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Final,
+    ForwardRef,
+    Iterable,
+    Iterator,
+    Mapping,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+    overload,
+)
 from warnings import warn
+
 from pydantic_core import CoreSchema, PydanticUndefined, core_schema, to_jsonable_python
 from typing_extensions import Annotated, Literal, TypeAliasType, TypedDict, get_args, get_origin, is_typeddict
+
 from ..aliases import AliasGenerator
 from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
 from ..config import ConfigDict, JsonDict, JsonEncoder
@@ -29,8 +48,31 @@ from ..warnings import PydanticDeprecatedSince20
 from . import _core_utils, _decorators, _discriminated_union, _known_annotated_metadata, _typing_extra
 from ._config import ConfigWrapper, ConfigWrapperStack
 from ._core_metadata import CoreMetadataHandler, build_metadata_dict
-from ._core_utils import CoreSchemaOrField, collect_invalid_schemas, define_expected_missing_refs, get_ref, get_type_ref, is_function_with_inner_schema, is_list_like_schema_with_items_schema, simplify_schema_references, validate_core_schema
-from ._decorators import Decorator, DecoratorInfos, FieldSerializerDecoratorInfo, FieldValidatorDecoratorInfo, ModelSerializerDecoratorInfo, ModelValidatorDecoratorInfo, RootValidatorDecoratorInfo, ValidatorDecoratorInfo, get_attribute_from_bases, inspect_field_serializer, inspect_model_serializer, inspect_validator
+from ._core_utils import (
+    CoreSchemaOrField,
+    collect_invalid_schemas,
+    define_expected_missing_refs,
+    get_ref,
+    get_type_ref,
+    is_function_with_inner_schema,
+    is_list_like_schema_with_items_schema,
+    simplify_schema_references,
+    validate_core_schema,
+)
+from ._decorators import (
+    Decorator,
+    DecoratorInfos,
+    FieldSerializerDecoratorInfo,
+    FieldValidatorDecoratorInfo,
+    ModelSerializerDecoratorInfo,
+    ModelValidatorDecoratorInfo,
+    RootValidatorDecoratorInfo,
+    ValidatorDecoratorInfo,
+    get_attribute_from_bases,
+    inspect_field_serializer,
+    inspect_model_serializer,
+    inspect_validator,
+)
 from ._docs_extraction import extract_docstrings_from_cls
 from ._fields import collect_dataclass_fields, get_type_hints_infer_globalns
 from ._forward_ref import PydanticRecursiveRef
@@ -39,6 +81,7 @@ from ._mock_val_ser import MockCoreSchema
 from ._schema_generation_shared import CallbackGetCoreSchemaHandler
 from ._typing_extra import is_finalvar, is_self_type
 from ._utils import lenient_issubclass
+
 if TYPE_CHECKING:
     from ..fields import ComputedFieldInfo, FieldInfo
     from ..main import BaseModel
@@ -46,28 +89,32 @@ if TYPE_CHECKING:
     from ..validators import FieldValidatorModes
     from ._dataclasses import StandardDataclass
     from ._schema_generation_shared import GetJsonSchemaFunction
+
 _SUPPORTS_TYPEDDICT = sys.version_info >= (3, 12)
 _AnnotatedType = type(Annotated[int, 123])
-FieldDecoratorInfo = Union[ValidatorDecoratorInfo,
-    FieldValidatorDecoratorInfo, FieldSerializerDecoratorInfo]
-FieldDecoratorInfoType = TypeVar('FieldDecoratorInfoType', bound=
-    FieldDecoratorInfo)
-AnyFieldDecorator = Union[Decorator[ValidatorDecoratorInfo], Decorator[
-    FieldValidatorDecoratorInfo], Decorator[FieldSerializerDecoratorInfo]]
+
+FieldDecoratorInfo = Union[ValidatorDecoratorInfo, FieldValidatorDecoratorInfo, FieldSerializerDecoratorInfo]
+FieldDecoratorInfoType = TypeVar('FieldDecoratorInfoType', bound=FieldDecoratorInfo)
+AnyFieldDecorator = Union[
+    Decorator[ValidatorDecoratorInfo],
+    Decorator[FieldValidatorDecoratorInfo],
+    Decorator[FieldSerializerDecoratorInfo],
+]
+
 ModifyCoreSchemaWrapHandler = GetCoreSchemaHandler
-GetCoreSchemaFunction = Callable[[Any, ModifyCoreSchemaWrapHandler],
-    core_schema.CoreSchema]
+GetCoreSchemaFunction = Callable[[Any, ModifyCoreSchemaWrapHandler], core_schema.CoreSchema]
+
 TUPLE_TYPES: list[type] = [tuple, typing.Tuple]
 LIST_TYPES: list[type] = [list, typing.List, collections.abc.MutableSequence]
 SET_TYPES: list[type] = [set, typing.Set, collections.abc.MutableSet]
-FROZEN_SET_TYPES: list[type] = [frozenset, typing.FrozenSet, collections.
-    abc.Set]
-DICT_TYPES: list[type] = [dict, typing.Dict, collections.abc.MutableMapping,
-    collections.abc.Mapping]
+FROZEN_SET_TYPES: list[type] = [frozenset, typing.FrozenSet, collections.abc.Set]
+DICT_TYPES: list[type] = [dict, typing.Dict, collections.abc.MutableMapping, collections.abc.Mapping]


-def check_validator_fields_against_field_name(info: FieldDecoratorInfo,
-    field: str) ->bool:
+def check_validator_fields_against_field_name(
+    info: FieldDecoratorInfo,
+    field: str,
+) -> bool:
     """Check if field name is in validator fields.

     Args:
@@ -77,11 +124,15 @@ def check_validator_fields_against_field_name(info: FieldDecoratorInfo,
     Returns:
         `True` if field name is in validator fields, `False` otherwise.
     """
-    pass
+    if '*' in info.fields:
+        return True
+    for v_field_name in info.fields:
+        if v_field_name == field:
+            return True
+    return False


-def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator],
-    fields: Iterable[str]) ->None:
+def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields: Iterable[str]) -> None:
     """Check if the defined fields in decorators exist in `fields` param.

     It ignores the check for a decorator if the decorator has `*` as field or `check_fields=False`.
@@ -93,12 +144,71 @@ def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator],
     Raises:
         PydanticUserError: If one of the field names does not exist in `fields` param.
     """
-    pass
-
-
-def modify_model_json_schema(schema_or_field: CoreSchemaOrField, handler:
-    GetJsonSchemaHandler, *, cls: Any, title: (str | None)=None
-    ) ->JsonSchemaValue:
+    fields = set(fields)
+    for dec in decorators:
+        if '*' in dec.info.fields:
+            continue
+        if dec.info.check_fields is False:
+            continue
+        for field in dec.info.fields:
+            if field not in fields:
+                raise PydanticUserError(
+                    f'Decorators defined with incorrect fields: {dec.cls_ref}.{dec.cls_var_name}'
+                    " (use check_fields=False if you're inheriting from the model and intended this)",
+                    code='decorator-missing-field',
+                )
+
+
+def filter_field_decorator_info_by_field(
+    validator_functions: Iterable[Decorator[FieldDecoratorInfoType]], field: str
+) -> list[Decorator[FieldDecoratorInfoType]]:
+    return [dec for dec in validator_functions if check_validator_fields_against_field_name(dec.info, field)]
+
+
+def apply_each_item_validators(
+    schema: core_schema.CoreSchema,
+    each_item_validators: list[Decorator[ValidatorDecoratorInfo]],
+    field_name: str | None,
+) -> core_schema.CoreSchema:
+    # This V1 compatibility shim should eventually be removed
+
+    # push down any `each_item=True` validators
+    # note that this won't work for any Annotated types that get wrapped by a function validator
+    # but that's okay because that didn't exist in V1
+    if schema['type'] == 'nullable':
+        schema['schema'] = apply_each_item_validators(schema['schema'], each_item_validators, field_name)
+        return schema
+    elif schema['type'] == 'tuple':
+        if (variadic_item_index := schema.get('variadic_item_index')) is not None:
+            schema['items_schema'][variadic_item_index] = apply_validators(
+                schema['items_schema'][variadic_item_index], each_item_validators, field_name
+            )
+    elif is_list_like_schema_with_items_schema(schema):
+        inner_schema = schema.get('items_schema', None)
+        if inner_schema is None:
+            inner_schema = core_schema.any_schema()
+        schema['items_schema'] = apply_validators(inner_schema, each_item_validators, field_name)
+    elif schema['type'] == 'dict':
+        # push down any `each_item=True` validators onto dict _values_
+        # this is super arbitrary but it's the V1 behavior
+        inner_schema = schema.get('values_schema', None)
+        if inner_schema is None:
+            inner_schema = core_schema.any_schema()
+        schema['values_schema'] = apply_validators(inner_schema, each_item_validators, field_name)
+    elif each_item_validators:
+        raise TypeError(
+            f"`@validator(..., each_item=True)` cannot be applied to fields with a schema of {schema['type']}"
+        )
+    return schema
+
+
+def modify_model_json_schema(
+    schema_or_field: CoreSchemaOrField,
+    handler: GetJsonSchemaHandler,
+    *,
+    cls: Any,
+    title: str | None = None,
+) -> JsonSchemaValue:
     """Add title and description for model-like classes' JSON schema.

     Args:
@@ -110,14 +220,37 @@ def modify_model_json_schema(schema_or_field: CoreSchemaOrField, handler:
     Returns:
         JsonSchemaValue: The updated JSON schema.
     """
-    pass
+    from ..dataclasses import is_pydantic_dataclass
+    from ..main import BaseModel
+    from ..root_model import RootModel
+    from ._dataclasses import is_builtin_dataclass
+
+    json_schema = handler(schema_or_field)
+    original_schema = handler.resolve_ref_schema(json_schema)
+    # Preserve the fact that definitions schemas should never have sibling keys:
+    if '$ref' in original_schema:
+        ref = original_schema['$ref']
+        original_schema.clear()
+        original_schema['allOf'] = [{'$ref': ref}]
+    if title is not None:
+        original_schema['title'] = title
+    elif 'title' not in original_schema:
+        original_schema['title'] = cls.__name__
+    # BaseModel + Dataclass; don't use cls.__doc__ as it will contain the verbose class signature by default
+    docstring = None if cls is BaseModel or is_builtin_dataclass(cls) or is_pydantic_dataclass(cls) else cls.__doc__
+    if docstring and 'description' not in original_schema:
+        original_schema['description'] = inspect.cleandoc(docstring)
+    elif issubclass(cls, RootModel) and cls.model_fields['root'].description:
+        original_schema['description'] = cls.model_fields['root'].description
+    return json_schema


 JsonEncoders = Dict[Type[Any], JsonEncoder]


-def _add_custom_serialization_from_json_encoders(json_encoders: (
-    JsonEncoders | None), tp: Any, schema: CoreSchema) ->CoreSchema:
+def _add_custom_serialization_from_json_encoders(
+    json_encoders: JsonEncoders | None, tp: Any, schema: CoreSchema
+) -> CoreSchema:
     """Iterate over the json_encoders and add the first matching encoder to the schema.

     Args:
@@ -125,7 +258,28 @@ def _add_custom_serialization_from_json_encoders(json_encoders: (
         tp: The type to check for a matching encoder.
         schema: The schema to add the encoder to.
     """
-    pass
+    if not json_encoders:
+        return schema
+    if 'serialization' in schema:
+        return schema
+    # Check the class type and its superclasses for a matching encoder
+    # Decimal.__class__.__mro__ (and probably other cases) doesn't include Decimal itself
+    # if the type is a GenericAlias (e.g. from list[int]) we need to use __class__ instead of .__mro__
+    for base in (tp, *getattr(tp, '__mro__', tp.__class__.__mro__)[:-1]):
+        encoder = json_encoders.get(base)
+        if encoder is None:
+            continue
+
+        warnings.warn(
+            f'`json_encoders` is deprecated. See https://docs.pydantic.dev/{version_short()}/concepts/serialization/#custom-serializers for alternatives',
+            PydanticDeprecatedSince20,
+        )
+
+        # TODO: in theory we should check that the schema accepts a serialization key
+        schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json')
+        return schema
+
+    return schema


 TypesNamespace = Union[Dict[str, Any], None]
@@ -137,24 +291,48 @@ class TypesNamespaceStack:
     def __init__(self, types_namespace: TypesNamespace):
         self._types_namespace_stack: list[TypesNamespace] = [types_namespace]

+    @property
+    def tail(self) -> TypesNamespace:
+        return self._types_namespace_stack[-1]

-def _get_first_non_null(a: Any, b: Any) ->Any:
+    @contextmanager
+    def push(self, for_type: type[Any]):
+        types_namespace = {**_typing_extra.get_cls_types_namespace(for_type), **(self.tail or {})}
+        self._types_namespace_stack.append(types_namespace)
+        try:
+            yield
+        finally:
+            self._types_namespace_stack.pop()
+
+
+def _get_first_non_null(a: Any, b: Any) -> Any:
     """Return the first argument if it is not None, otherwise return the second argument.

     Use case: serialization_alias (argument a) and alias (argument b) are both defined, and serialization_alias is ''.
     This function will return serialization_alias, which is the first argument, even though it is an empty string.
     """
-    pass
+    return a if a is not None else b


 class GenerateSchema:
     """Generate core schema for a Pydantic model, dataclass and types like `str`, `datetime`, ... ."""
-    __slots__ = ('_config_wrapper_stack', '_types_namespace_stack',
-        '_typevars_map', 'field_name_stack', 'model_type_stack', 'defs')

-    def __init__(self, config_wrapper: ConfigWrapper, types_namespace: (
-        dict[str, Any] | None), typevars_map: (dict[Any, Any] | None)=None
-        ) ->None:
+    __slots__ = (
+        '_config_wrapper_stack',
+        '_types_namespace_stack',
+        '_typevars_map',
+        'field_name_stack',
+        'model_type_stack',
+        'defs',
+    )
+
+    def __init__(
+        self,
+        config_wrapper: ConfigWrapper,
+        types_namespace: dict[str, Any] | None,
+        typevars_map: dict[Any, Any] | None = None,
+    ) -> None:
+        # we need a stack for recursing into child models
         self._config_wrapper_stack = ConfigWrapperStack(config_wrapper)
         self._types_namespace_stack = TypesNamespaceStack(types_namespace)
         self._typevars_map = typevars_map
@@ -162,16 +340,145 @@ class GenerateSchema:
         self.model_type_stack = _ModelTypeStack()
         self.defs = _Definitions()

-    def str_schema(self) ->CoreSchema:
+    @classmethod
+    def __from_parent(
+        cls,
+        config_wrapper_stack: ConfigWrapperStack,
+        types_namespace_stack: TypesNamespaceStack,
+        model_type_stack: _ModelTypeStack,
+        typevars_map: dict[Any, Any] | None,
+        defs: _Definitions,
+    ) -> GenerateSchema:
+        obj = cls.__new__(cls)
+        obj._config_wrapper_stack = config_wrapper_stack
+        obj._types_namespace_stack = types_namespace_stack
+        obj.model_type_stack = model_type_stack
+        obj._typevars_map = typevars_map
+        obj.field_name_stack = _FieldNameStack()
+        obj.defs = defs
+        return obj
+
+    @property
+    def _config_wrapper(self) -> ConfigWrapper:
+        return self._config_wrapper_stack.tail
+
+    @property
+    def _types_namespace(self) -> dict[str, Any] | None:
+        return self._types_namespace_stack.tail
+
+    @property
+    def _current_generate_schema(self) -> GenerateSchema:
+        cls = self._config_wrapper.schema_generator or GenerateSchema
+        return cls.__from_parent(
+            self._config_wrapper_stack,
+            self._types_namespace_stack,
+            self.model_type_stack,
+            self._typevars_map,
+            self.defs,
+        )
+
+    @property
+    def _arbitrary_types(self) -> bool:
+        return self._config_wrapper.arbitrary_types_allowed
+
+    def str_schema(self) -> CoreSchema:
         """Generate a CoreSchema for `str`"""
-        pass
-
+        return core_schema.str_schema()
+
+    # the following methods can be overridden but should be considered
+    # unstable / private APIs
+    def _list_schema(self, tp: Any, items_type: Any) -> CoreSchema:
+        return core_schema.list_schema(self.generate_schema(items_type))
+
+    def _dict_schema(self, tp: Any, keys_type: Any, values_type: Any) -> CoreSchema:
+        return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type))
+
+    def _set_schema(self, tp: Any, items_type: Any) -> CoreSchema:
+        return core_schema.set_schema(self.generate_schema(items_type))
+
+    def _frozenset_schema(self, tp: Any, items_type: Any) -> CoreSchema:
+        return core_schema.frozenset_schema(self.generate_schema(items_type))
+
+    def _arbitrary_type_schema(self, tp: Any) -> CoreSchema:
+        if not isinstance(tp, type):
+            warn(
+                f'{tp!r} is not a Python type (it may be an instance of an object),'
+                ' Pydantic will allow any object with no validation since we cannot even'
+                ' enforce that the input is an instance of the given type.'
+                ' To get rid of this error wrap the type with `pydantic.SkipValidation`.',
+                UserWarning,
+            )
+            return core_schema.any_schema()
+        return core_schema.is_instance_schema(tp)
+
+    def _unknown_type_schema(self, obj: Any) -> CoreSchema:
+        raise PydanticSchemaGenerationError(
+            f'Unable to generate pydantic-core schema for {obj!r}. '
+            'Set `arbitrary_types_allowed=True` in the model_config to ignore this error'
+            ' or implement `__get_pydantic_core_schema__` on your type to fully support it.'
+            '\n\nIf you got this error by calling handler(<some type>) within'
+            ' `__get_pydantic_core_schema__` then you likely need to call'
+            ' `handler.generate_schema(<some type>)` since we do not call'
+            ' `__get_pydantic_core_schema__` on `<some type>` otherwise to avoid infinite recursion.'
+        )
+
+    def _apply_discriminator_to_union(
+        self, schema: CoreSchema, discriminator: str | Discriminator | None
+    ) -> CoreSchema:
+        if discriminator is None:
+            return schema
+        try:
+            return _discriminated_union.apply_discriminator(
+                schema,
+                discriminator,
+            )
+        except _discriminated_union.MissingDefinitionForUnionRef:
+            # defer until defs are resolved
+            _discriminated_union.set_discriminator_in_metadata(
+                schema,
+                discriminator,
+            )
+            return schema

     class CollectedInvalid(Exception):
         pass

-    def generate_schema(self, obj: Any, from_dunder_get_core_schema: bool=True
-        ) ->core_schema.CoreSchema:
+    def clean_schema(self, schema: CoreSchema) -> CoreSchema:
+        schema = self.collect_definitions(schema)
+        schema = simplify_schema_references(schema)
+        if collect_invalid_schemas(schema):
+            raise self.CollectedInvalid()
+        schema = _discriminated_union.apply_discriminators(schema)
+        schema = validate_core_schema(schema)
+        return schema
+
+    def collect_definitions(self, schema: CoreSchema) -> CoreSchema:
+        ref = cast('str | None', schema.get('ref', None))
+        if ref:
+            self.defs.definitions[ref] = schema
+        if 'ref' in schema:
+            schema = core_schema.definition_reference_schema(schema['ref'])
+        return core_schema.definitions_schema(
+            schema,
+            list(self.defs.definitions.values()),
+        )
+
+    def _add_js_function(self, metadata_schema: CoreSchema, js_function: Callable[..., Any]) -> None:
+        metadata = CoreMetadataHandler(metadata_schema).metadata
+        pydantic_js_functions = metadata.setdefault('pydantic_js_functions', [])
+        # because of how we generate core schemas for nested generic models
+        # we can end up adding `BaseModel.__get_pydantic_json_schema__` multiple times
+        # this check may fail to catch duplicates if the function is a `functools.partial`
+        # or something like that
+        # but if it does it'll fail by inserting the duplicate
+        if js_function not in pydantic_js_functions:
+            pydantic_js_functions.append(js_function)
+
+    def generate_schema(
+        self,
+        obj: Any,
+        from_dunder_get_core_schema: bool = True,
+    ) -> core_schema.CoreSchema:
         """Generate core schema.

         Args:
@@ -194,36 +501,294 @@ class GenerateSchema:
                 - If `typing.TypedDict` is used instead of `typing_extensions.TypedDict` on Python < 3.12.
                 - If `__modify_schema__` method is used instead of `__get_pydantic_json_schema__`.
         """
-        pass
+        schema: CoreSchema | None = None
+
+        if from_dunder_get_core_schema:
+            from_property = self._generate_schema_from_property(obj, obj)
+            if from_property is not None:
+                schema = from_property

-    def _model_schema(self, cls: type[BaseModel]) ->core_schema.CoreSchema:
+        if schema is None:
+            schema = self._generate_schema_inner(obj)
+
+        metadata_js_function = _extract_get_pydantic_json_schema(obj, schema)
+        if metadata_js_function is not None:
+            metadata_schema = resolve_original_schema(schema, self.defs.definitions)
+            if metadata_schema:
+                self._add_js_function(metadata_schema, metadata_js_function)
+
+        schema = _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, obj, schema)
+
+        return schema
+
+    def _model_schema(self, cls: type[BaseModel]) -> core_schema.CoreSchema:
         """Generate schema for a Pydantic model."""
-        pass
+        with self.defs.get_schema_or_ref(cls) as (model_ref, maybe_schema):
+            if maybe_schema is not None:
+                return maybe_schema
+
+            fields = cls.model_fields
+            decorators = cls.__pydantic_decorators__
+            computed_fields = decorators.computed_fields
+            check_decorator_fields_exist(
+                chain(
+                    decorators.field_validators.values(),
+                    decorators.field_serializers.values(),
+                    decorators.validators.values(),
+                ),
+                {*fields.keys(), *computed_fields.keys()},
+            )
+            config_wrapper = ConfigWrapper(cls.model_config, check=False)
+            core_config = config_wrapper.core_config(cls)
+            title = self._get_model_title_from_config(cls, config_wrapper)
+            metadata = build_metadata_dict(js_functions=[partial(modify_model_json_schema, cls=cls, title=title)])
+
+            model_validators = decorators.model_validators.values()
+
+            extras_schema = None
+            if core_config.get('extra_fields_behavior') == 'allow':
+                assert cls.__mro__[0] is cls
+                assert cls.__mro__[-1] is object
+                for candidate_cls in cls.__mro__[:-1]:
+                    extras_annotation = getattr(candidate_cls, '__annotations__', {}).get('__pydantic_extra__', None)
+                    if extras_annotation is not None:
+                        if isinstance(extras_annotation, str):
+                            extras_annotation = _typing_extra.eval_type_backport(
+                                _typing_extra._make_forward_ref(extras_annotation, is_argument=False, is_class=True),
+                                self._types_namespace,
+                            )
+                        tp = get_origin(extras_annotation)
+                        if tp not in (Dict, dict):
+                            raise PydanticSchemaGenerationError(
+                                'The type annotation for `__pydantic_extra__` must be `Dict[str, ...]`'
+                            )
+                        extra_items_type = self._get_args_resolving_forward_refs(
+                            extras_annotation,
+                            required=True,
+                        )[1]
+                        if extra_items_type is not Any:
+                            extras_schema = self.generate_schema(extra_items_type)
+                            break
+
+            with self._config_wrapper_stack.push(config_wrapper), self._types_namespace_stack.push(cls):
+                self = self._current_generate_schema
+                if cls.__pydantic_root_model__:
+                    root_field = self._common_field_schema('root', fields['root'], decorators)
+                    inner_schema = root_field['schema']
+                    inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+                    model_schema = core_schema.model_schema(
+                        cls,
+                        inner_schema,
+                        custom_init=getattr(cls, '__pydantic_custom_init__', None),
+                        root_model=True,
+                        post_init=getattr(cls, '__pydantic_post_init__', None),
+                        config=core_config,
+                        ref=model_ref,
+                        metadata=metadata,
+                    )
+                else:
+                    fields_schema: core_schema.CoreSchema = core_schema.model_fields_schema(
+                        {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
+                        computed_fields=[
+                            self._computed_field_schema(d, decorators.field_serializers)
+                            for d in computed_fields.values()
+                        ],
+                        extras_schema=extras_schema,
+                        model_name=cls.__name__,
+                    )
+                    inner_schema = apply_validators(fields_schema, decorators.root_validators.values(), None)
+                    new_inner_schema = define_expected_missing_refs(inner_schema, recursively_defined_type_refs())
+                    if new_inner_schema is not None:
+                        inner_schema = new_inner_schema
+                    inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+
+                    model_schema = core_schema.model_schema(
+                        cls,
+                        inner_schema,
+                        custom_init=getattr(cls, '__pydantic_custom_init__', None),
+                        root_model=False,
+                        post_init=getattr(cls, '__pydantic_post_init__', None),
+                        config=core_config,
+                        ref=model_ref,
+                        metadata=metadata,
+                    )
+
+                schema = self._apply_model_serializers(model_schema, decorators.model_serializers.values())
+                schema = apply_model_validators(schema, model_validators, 'outer')
+                self.defs.definitions[model_ref] = schema
+                return core_schema.definition_reference_schema(model_ref)

     @staticmethod
-    def _get_model_title_from_config(model: type[BaseModel |
-        StandardDataclass], config_wrapper: (ConfigWrapper | None)=None) ->(str
-         | None):
+    def _get_model_title_from_config(
+        model: type[BaseModel | StandardDataclass], config_wrapper: ConfigWrapper | None = None
+    ) -> str | None:
         """Get the title of a model if `model_title_generator` or `title` are set in the config, else return None"""
-        pass
+        if config_wrapper is None:
+            return None

-    def _unpack_refs_defs(self, schema: CoreSchema) ->CoreSchema:
+        if config_wrapper.title:
+            return config_wrapper.title
+
+        model_title_generator = config_wrapper.model_title_generator
+        if model_title_generator:
+            title = model_title_generator(model)
+            if not isinstance(title, str):
+                raise TypeError(f'model_title_generator {model_title_generator} must return str, not {title.__class__}')
+            return title
+
+        return None
+
+    def _unpack_refs_defs(self, schema: CoreSchema) -> CoreSchema:
         """Unpack all 'definitions' schemas into `GenerateSchema.defs.definitions`
         and return the inner schema.
         """
-        pass

-    def _generate_schema_from_property(self, obj: Any, source: Any) ->(
-        core_schema.CoreSchema | None):
+        def get_ref(s: CoreSchema) -> str:
+            return s['ref']  # type: ignore
+
+        if schema['type'] == 'definitions':
+            self.defs.definitions.update({get_ref(s): s for s in schema['definitions']})
+            schema = schema['schema']
+        return schema
+
+    def _generate_schema_from_property(self, obj: Any, source: Any) -> core_schema.CoreSchema | None:
         """Try to generate schema from either the `__get_pydantic_core_schema__` function or
         `__pydantic_core_schema__` property.

         Note: `__get_pydantic_core_schema__` takes priority so it can
         decide whether to use a `__pydantic_core_schema__` attribute, or generate a fresh schema.
         """
-        pass
-
-    def match_type(self, obj: Any) ->core_schema.CoreSchema:
+        # avoid calling `__get_pydantic_core_schema__` if we've already visited this object
+        if is_self_type(obj):
+            obj = self.model_type_stack.get()
+        with self.defs.get_schema_or_ref(obj) as (_, maybe_schema):
+            if maybe_schema is not None:
+                return maybe_schema
+        if obj is source:
+            ref_mode = 'unpack'
+        else:
+            ref_mode = 'to-def'
+
+        schema: CoreSchema
+
+        if (get_schema := getattr(obj, '__get_pydantic_core_schema__', None)) is not None:
+            if len(inspect.signature(get_schema).parameters) == 1:
+                # (source) -> CoreSchema
+                schema = get_schema(source)
+            else:
+                schema = get_schema(
+                    source, CallbackGetCoreSchemaHandler(self._generate_schema_inner, self, ref_mode=ref_mode)
+                )
+        # fmt: off
+        elif (
+            (existing_schema := getattr(obj, '__pydantic_core_schema__', None)) is not None
+            and not isinstance(existing_schema, MockCoreSchema)
+            and existing_schema.get('cls', None) == obj
+        ):
+            schema = existing_schema
+        # fmt: on
+        elif (validators := getattr(obj, '__get_validators__', None)) is not None:
+            warn(
+                '`__get_validators__` is deprecated and will be removed, use `__get_pydantic_core_schema__` instead.',
+                PydanticDeprecatedSince20,
+            )
+            schema = core_schema.chain_schema([core_schema.with_info_plain_validator_function(v) for v in validators()])
+        else:
+            # we have no existing schema information on the property, exit early so that we can go generate a schema
+            return None
+
+        schema = self._unpack_refs_defs(schema)
+
+        if is_function_with_inner_schema(schema):
+            ref = schema['schema'].pop('ref', None)  # pyright: ignore[reportCallIssue, reportArgumentType]
+            if ref:
+                schema['ref'] = ref
+        else:
+            ref = get_ref(schema)
+
+        if ref:
+            self.defs.definitions[ref] = schema
+            return core_schema.definition_reference_schema(ref)
+
+        return schema
+
+    def _resolve_forward_ref(self, obj: Any) -> Any:
+        # we assume that types_namespace has the target of forward references in its scope,
+        # but this could fail, for example, if calling Validator on an imported type which contains
+        # forward references to other types only defined in the module from which it was imported
+        # `Validator(SomeImportedTypeAliasWithAForwardReference)`
+        # or the equivalent for BaseModel
+        # class Model(BaseModel):
+        #   x: SomeImportedTypeAliasWithAForwardReference
+        try:
+            obj = _typing_extra.eval_type_backport(obj, globalns=self._types_namespace)
+        except NameError as e:
+            raise PydanticUndefinedAnnotation.from_name_error(e) from e
+
+        # if obj is still a ForwardRef, it means we can't evaluate it, raise PydanticUndefinedAnnotation
+        if isinstance(obj, ForwardRef):
+            raise PydanticUndefinedAnnotation(obj.__forward_arg__, f'Unable to evaluate forward reference {obj}')
+
+        if self._typevars_map:
+            obj = replace_types(obj, self._typevars_map)
+
+        return obj
+
+    @overload
+    def _get_args_resolving_forward_refs(self, obj: Any, required: Literal[True]) -> tuple[Any, ...]: ...
+
+    @overload
+    def _get_args_resolving_forward_refs(self, obj: Any) -> tuple[Any, ...] | None: ...
+
+    def _get_args_resolving_forward_refs(self, obj: Any, required: bool = False) -> tuple[Any, ...] | None:
+        args = get_args(obj)
+        if args:
+            args = tuple([self._resolve_forward_ref(a) if isinstance(a, ForwardRef) else a for a in args])
+        elif required:  # pragma: no cover
+            raise TypeError(f'Expected {obj} to have generic parameters but it had none')
+        return args
+
+    def _get_first_arg_or_any(self, obj: Any) -> Any:
+        args = self._get_args_resolving_forward_refs(obj)
+        if not args:
+            return Any
+        return args[0]
+
+    def _get_first_two_args_or_any(self, obj: Any) -> tuple[Any, Any]:
+        args = self._get_args_resolving_forward_refs(obj)
+        if not args:
+            return (Any, Any)
+        if len(args) < 2:
+            origin = get_origin(obj)
+            raise TypeError(f'Expected two type arguments for {origin}, got 1')
+        return args[0], args[1]
+
+    def _generate_schema_inner(self, obj: Any) -> core_schema.CoreSchema:
+        if isinstance(obj, _AnnotatedType):
+            return self._annotated_schema(obj)
+
+        if isinstance(obj, dict):
+            # we assume this is already a valid schema
+            return obj  # type: ignore[return-value]
+
+        if isinstance(obj, str):
+            obj = ForwardRef(obj)
+
+        if isinstance(obj, ForwardRef):
+            return self.generate_schema(self._resolve_forward_ref(obj))
+
+        from ..main import BaseModel
+
+        if lenient_issubclass(obj, BaseModel):
+            with self.model_type_stack.push(obj):
+                return self._model_schema(obj)
+
+        if isinstance(obj, PydanticRecursiveRef):
+            return core_schema.definition_reference_schema(schema_ref=obj.type_ref)
+
+        return self.match_type(obj)
+
+    def match_type(self, obj: Any) -> core_schema.CoreSchema:  # noqa: C901
         """Main mapping of types to schemas.

         The general structure is a series of if statements starting with the simple cases
@@ -236,28 +801,184 @@ class GenerateSchema:
         The idea is that we'll evolve this into adding more and more user facing methods over time
         as they get requested and we figure out what the right API for them is.
         """
-        pass
-
-    def _generate_td_field_schema(self, name: str, field_info: FieldInfo,
-        decorators: DecoratorInfos, *, required: bool=True
-        ) ->core_schema.TypedDictField:
+        if obj is str:
+            return self.str_schema()
+        elif obj is bytes:
+            return core_schema.bytes_schema()
+        elif obj is int:
+            return core_schema.int_schema()
+        elif obj is float:
+            return core_schema.float_schema()
+        elif obj is bool:
+            return core_schema.bool_schema()
+        elif obj is Any or obj is object:
+            return core_schema.any_schema()
+        elif obj is None or obj is _typing_extra.NoneType:
+            return core_schema.none_schema()
+        elif obj in TUPLE_TYPES:
+            return self._tuple_schema(obj)
+        elif obj in LIST_TYPES:
+            return self._list_schema(obj, self._get_first_arg_or_any(obj))
+        elif obj in SET_TYPES:
+            return self._set_schema(obj, self._get_first_arg_or_any(obj))
+        elif obj in FROZEN_SET_TYPES:
+            return self._frozenset_schema(obj, self._get_first_arg_or_any(obj))
+        elif obj in DICT_TYPES:
+            return self._dict_schema(obj, *self._get_first_two_args_or_any(obj))
+        elif isinstance(obj, TypeAliasType):
+            return self._type_alias_type_schema(obj)
+        elif obj is type:
+            return self._type_schema()
+        elif _typing_extra.is_callable_type(obj):
+            return core_schema.callable_schema()
+        elif _typing_extra.is_literal_type(obj):
+            return self._literal_schema(obj)
+        elif is_typeddict(obj):
+            return self._typed_dict_schema(obj, None)
+        elif _typing_extra.is_namedtuple(obj):
+            return self._namedtuple_schema(obj, None)
+        elif _typing_extra.is_new_type(obj):
+            # NewType, can't use isinstance because it fails <3.10
+            return self.generate_schema(obj.__supertype__)
+        elif obj == re.Pattern:
+            return self._pattern_schema(obj)
+        elif obj is collections.abc.Hashable or obj is typing.Hashable:
+            return self._hashable_schema()
+        elif isinstance(obj, typing.TypeVar):
+            return self._unsubstituted_typevar_schema(obj)
+        elif is_finalvar(obj):
+            if obj is Final:
+                return core_schema.any_schema()
+            return self.generate_schema(
+                self._get_first_arg_or_any(obj),
+            )
+        elif isinstance(obj, (FunctionType, LambdaType, MethodType, partial)):
+            return self._callable_schema(obj)
+        elif inspect.isclass(obj) and issubclass(obj, Enum):
+            from ._std_types_schema import get_enum_core_schema
+
+            return get_enum_core_schema(obj, self._config_wrapper.config_dict)
+
+        if _typing_extra.is_dataclass(obj):
+            return self._dataclass_schema(obj, None)
+        res = self._get_prepare_pydantic_annotations_for_known_type(obj, ())
+        if res is not None:
+            source_type, annotations = res
+            return self._apply_annotations(source_type, annotations)
+
+        origin = get_origin(obj)
+        if origin is not None:
+            return self._match_generic_type(obj, origin)
+
+        if self._arbitrary_types:
+            return self._arbitrary_type_schema(obj)
+        return self._unknown_type_schema(obj)
+
+    def _match_generic_type(self, obj: Any, origin: Any) -> CoreSchema:  # noqa: C901
+        if isinstance(origin, TypeAliasType):
+            return self._type_alias_type_schema(obj)
+
+        # Need to handle generic dataclasses before looking for the schema properties because attribute accesses
+        # on _GenericAlias delegate to the origin type, so lose the information about the concrete parametrization
+        # As a result, currently, there is no way to cache the schema for generic dataclasses. This may be possible
+        # to resolve by modifying the value returned by `Generic.__class_getitem__`, but that is a dangerous game.
+        if _typing_extra.is_dataclass(origin):
+            return self._dataclass_schema(obj, origin)
+        if _typing_extra.is_namedtuple(origin):
+            return self._namedtuple_schema(obj, origin)
+
+        from_property = self._generate_schema_from_property(origin, obj)
+        if from_property is not None:
+            return from_property
+
+        if _typing_extra.origin_is_union(origin):
+            return self._union_schema(obj)
+        elif origin in TUPLE_TYPES:
+            return self._tuple_schema(obj)
+        elif origin in LIST_TYPES:
+            return self._list_schema(obj, self._get_first_arg_or_any(obj))
+        elif origin in SET_TYPES:
+            return self._set_schema(obj, self._get_first_arg_or_any(obj))
+        elif origin in FROZEN_SET_TYPES:
+            return self._frozenset_schema(obj, self._get_first_arg_or_any(obj))
+        elif origin in DICT_TYPES:
+            return self._dict_schema(obj, *self._get_first_two_args_or_any(obj))
+        elif is_typeddict(origin):
+            return self._typed_dict_schema(obj, origin)
+        elif origin in (typing.Type, type):
+            return self._subclass_schema(obj)
+        elif origin in {typing.Sequence, collections.abc.Sequence}:
+            return self._sequence_schema(obj)
+        elif origin in {typing.Iterable, collections.abc.Iterable, typing.Generator, collections.abc.Generator}:
+            return self._iterable_schema(obj)
+        elif origin in (re.Pattern, typing.Pattern):
+            return self._pattern_schema(obj)
+
+        if self._arbitrary_types:
+            return self._arbitrary_type_schema(origin)
+        return self._unknown_type_schema(obj)
+
+    def _generate_td_field_schema(
+        self,
+        name: str,
+        field_info: FieldInfo,
+        decorators: DecoratorInfos,
+        *,
+        required: bool = True,
+    ) -> core_schema.TypedDictField:
         """Prepare a TypedDictField to represent a model or typeddict field."""
-        pass
-
-    def _generate_md_field_schema(self, name: str, field_info: FieldInfo,
-        decorators: DecoratorInfos) ->core_schema.ModelField:
+        common_field = self._common_field_schema(name, field_info, decorators)
+        return core_schema.typed_dict_field(
+            common_field['schema'],
+            required=False if not field_info.is_required() else required,
+            serialization_exclude=common_field['serialization_exclude'],
+            validation_alias=common_field['validation_alias'],
+            serialization_alias=common_field['serialization_alias'],
+            metadata=common_field['metadata'],
+        )
+
+    def _generate_md_field_schema(
+        self,
+        name: str,
+        field_info: FieldInfo,
+        decorators: DecoratorInfos,
+    ) -> core_schema.ModelField:
         """Prepare a ModelField to represent a model field."""
-        pass
-
-    def _generate_dc_field_schema(self, name: str, field_info: FieldInfo,
-        decorators: DecoratorInfos) ->core_schema.DataclassField:
+        common_field = self._common_field_schema(name, field_info, decorators)
+        return core_schema.model_field(
+            common_field['schema'],
+            serialization_exclude=common_field['serialization_exclude'],
+            validation_alias=common_field['validation_alias'],
+            serialization_alias=common_field['serialization_alias'],
+            frozen=common_field['frozen'],
+            metadata=common_field['metadata'],
+        )
+
+    def _generate_dc_field_schema(
+        self,
+        name: str,
+        field_info: FieldInfo,
+        decorators: DecoratorInfos,
+    ) -> core_schema.DataclassField:
         """Prepare a DataclassField to represent the parameter/field, of a dataclass."""
-        pass
+        common_field = self._common_field_schema(name, field_info, decorators)
+        return core_schema.dataclass_field(
+            name,
+            common_field['schema'],
+            init=field_info.init,
+            init_only=field_info.init_var or None,
+            kw_only=None if field_info.kw_only else False,
+            serialization_exclude=common_field['serialization_exclude'],
+            validation_alias=common_field['validation_alias'],
+            serialization_alias=common_field['serialization_alias'],
+            frozen=common_field['frozen'],
+            metadata=common_field['metadata'],
+        )

     @staticmethod
-    def _apply_alias_generator_to_field_info(alias_generator: (Callable[[
-        str], str] | AliasGenerator), field_info: FieldInfo, field_name: str
-        ) ->None:
+    def _apply_alias_generator_to_field_info(
+        alias_generator: Callable[[str], str] | AliasGenerator, field_info: FieldInfo, field_name: str
+    ) -> None:
         """Apply an alias_generator to aliases on a FieldInfo instance if appropriate.

         Args:
@@ -265,12 +986,51 @@ class GenerateSchema:
             field_info: The FieldInfo instance to which the alias_generator is (maybe) applied.
             field_name: The name of the field from which to generate the alias.
         """
-        pass
+        # Apply an alias_generator if
+        # 1. An alias is not specified
+        # 2. An alias is specified, but the priority is <= 1
+        if (
+            field_info.alias_priority is None
+            or field_info.alias_priority <= 1
+            or field_info.alias is None
+            or field_info.validation_alias is None
+            or field_info.serialization_alias is None
+        ):
+            alias, validation_alias, serialization_alias = None, None, None
+
+            if isinstance(alias_generator, AliasGenerator):
+                alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name)
+            elif isinstance(alias_generator, Callable):
+                alias = alias_generator(field_name)
+                if not isinstance(alias, str):
+                    raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
+
+            # if priority is not set, we set to 1
+            # which supports the case where the alias_generator from a child class is used
+            # to generate an alias for a field in a parent class
+            if field_info.alias_priority is None or field_info.alias_priority <= 1:
+                field_info.alias_priority = 1
+
+            # if the priority is 1, then we set the aliases to the generated alias
+            if field_info.alias_priority == 1:
+                field_info.serialization_alias = _get_first_non_null(serialization_alias, alias)
+                field_info.validation_alias = _get_first_non_null(validation_alias, alias)
+                field_info.alias = alias
+
+            # if any of the aliases are not set, then we set them to the corresponding generated alias
+            if field_info.alias is None:
+                field_info.alias = alias
+            if field_info.serialization_alias is None:
+                field_info.serialization_alias = _get_first_non_null(serialization_alias, alias)
+            if field_info.validation_alias is None:
+                field_info.validation_alias = _get_first_non_null(validation_alias, alias)

     @staticmethod
-    def _apply_alias_generator_to_computed_field_info(alias_generator: (
-        Callable[[str], str] | AliasGenerator), computed_field_info:
-        ComputedFieldInfo, computed_field_name: str):
+    def _apply_alias_generator_to_computed_field_info(
+        alias_generator: Callable[[str], str] | AliasGenerator,
+        computed_field_info: ComputedFieldInfo,
+        computed_field_name: str,
+    ):
         """Apply an alias_generator to alias on a ComputedFieldInfo instance if appropriate.

         Args:
@@ -278,30 +1038,219 @@ class GenerateSchema:
             computed_field_info: The ComputedFieldInfo instance to which the alias_generator is (maybe) applied.
             computed_field_name: The name of the computed field from which to generate the alias.
         """
-        pass
+        # Apply an alias_generator if
+        # 1. An alias is not specified
+        # 2. An alias is specified, but the priority is <= 1
+
+        if (
+            computed_field_info.alias_priority is None
+            or computed_field_info.alias_priority <= 1
+            or computed_field_info.alias is None
+        ):
+            alias, validation_alias, serialization_alias = None, None, None
+
+            if isinstance(alias_generator, AliasGenerator):
+                alias, validation_alias, serialization_alias = alias_generator.generate_aliases(computed_field_name)
+            elif isinstance(alias_generator, Callable):
+                alias = alias_generator(computed_field_name)
+                if not isinstance(alias, str):
+                    raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
+
+            # if priority is not set, we set to 1
+            # which supports the case where the alias_generator from a child class is used
+            # to generate an alias for a field in a parent class
+            if computed_field_info.alias_priority is None or computed_field_info.alias_priority <= 1:
+                computed_field_info.alias_priority = 1
+
+            # if the priority is 1, then we set the aliases to the generated alias
+            # note that we use the serialization_alias with priority over alias, as computed_field
+            # aliases are used for serialization only (not validation)
+            if computed_field_info.alias_priority == 1:
+                computed_field_info.alias = _get_first_non_null(serialization_alias, alias)

     @staticmethod
-    def _apply_field_title_generator_to_field_info(config_wrapper:
-        ConfigWrapper, field_info: (FieldInfo | ComputedFieldInfo),
-        field_name: str) ->None:
+    def _apply_field_title_generator_to_field_info(
+        config_wrapper: ConfigWrapper, field_info: FieldInfo | ComputedFieldInfo, field_name: str
+    ) -> None:
         """Apply a field_title_generator on a FieldInfo or ComputedFieldInfo instance if appropriate
         Args:
             config_wrapper: The config of the model
             field_info: The FieldInfo or ComputedField instance to which the title_generator is (maybe) applied.
             field_name: The name of the field from which to generate the title.
         """
-        pass
-
-    def _union_schema(self, union_type: Any) ->core_schema.CoreSchema:
+        field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator
+
+        if field_title_generator is None:
+            return
+
+        if field_info.title is None:
+            title = field_title_generator(field_name, field_info)  # type: ignore
+            if not isinstance(title, str):
+                raise TypeError(f'field_title_generator {field_title_generator} must return str, not {title.__class__}')
+
+            field_info.title = title
+
+    def _common_field_schema(  # C901
+        self, name: str, field_info: FieldInfo, decorators: DecoratorInfos
+    ) -> _CommonField:
+        # Update FieldInfo annotation if appropriate:
+        from .. import AliasChoices, AliasPath
+        from ..fields import FieldInfo
+
+        if has_instance_in_type(field_info.annotation, (ForwardRef, str)):
+            types_namespace = self._types_namespace
+            if self._typevars_map:
+                types_namespace = (types_namespace or {}).copy()
+                # Ensure that typevars get mapped to their concrete types:
+                types_namespace.update({k.__name__: v for k, v in self._typevars_map.items()})
+
+            evaluated = _typing_extra.eval_type_lenient(field_info.annotation, types_namespace)
+            if evaluated is not field_info.annotation and not has_instance_in_type(evaluated, PydanticRecursiveRef):
+                new_field_info = FieldInfo.from_annotation(evaluated)
+                field_info.annotation = new_field_info.annotation
+
+                # Handle any field info attributes that may have been obtained from now-resolved annotations
+                for k, v in new_field_info._attributes_set.items():
+                    # If an attribute is already set, it means it was set by assigning to a call to Field (or just a
+                    # default value), and that should take the highest priority. So don't overwrite existing attributes.
+                    # We skip over "attributes" that are present in the metadata_lookup dict because these won't
+                    # actually end up as attributes of the `FieldInfo` instance.
+                    if k not in field_info._attributes_set and k not in field_info.metadata_lookup:
+                        setattr(field_info, k, v)
+
+                # Finally, ensure the field info also reflects all the `_attributes_set` that are actually metadata.
+                field_info.metadata = [*new_field_info.metadata, *field_info.metadata]
+
+        source_type, annotations = field_info.annotation, field_info.metadata
+
+        def set_discriminator(schema: CoreSchema) -> CoreSchema:
+            schema = self._apply_discriminator_to_union(schema, field_info.discriminator)
+            return schema
+
+        with self.field_name_stack.push(name):
+            if field_info.discriminator is not None:
+                schema = self._apply_annotations(source_type, annotations, transform_inner_schema=set_discriminator)
+            else:
+                schema = self._apply_annotations(
+                    source_type,
+                    annotations,
+                )
+
+        # This V1 compatibility shim should eventually be removed
+        # push down any `each_item=True` validators
+        # note that this won't work for any Annotated types that get wrapped by a function validator
+        # but that's okay because that didn't exist in V1
+        this_field_validators = filter_field_decorator_info_by_field(decorators.validators.values(), name)
+        if _validators_require_validate_default(this_field_validators):
+            field_info.validate_default = True
+        each_item_validators = [v for v in this_field_validators if v.info.each_item is True]
+        this_field_validators = [v for v in this_field_validators if v not in each_item_validators]
+        schema = apply_each_item_validators(schema, each_item_validators, name)
+
+        schema = apply_validators(schema, filter_field_decorator_info_by_field(this_field_validators, name), name)
+        schema = apply_validators(
+            schema, filter_field_decorator_info_by_field(decorators.field_validators.values(), name), name
+        )
+
+        # the default validator needs to go outside of any other validators
+        # so that it is the topmost validator for the field validator
+        # which uses it to check if the field has a default value or not
+        if not field_info.is_required():
+            schema = wrap_default(field_info, schema)
+
+        schema = self._apply_field_serializers(
+            schema, filter_field_decorator_info_by_field(decorators.field_serializers.values(), name)
+        )
+        self._apply_field_title_generator_to_field_info(self._config_wrapper, field_info, name)
+
+        json_schema_updates = {
+            'title': field_info.title,
+            'description': field_info.description,
+            'deprecated': bool(field_info.deprecated) or field_info.deprecated == '' or None,
+            'examples': to_jsonable_python(field_info.examples),
+        }
+        json_schema_updates = {k: v for k, v in json_schema_updates.items() if v is not None}
+
+        json_schema_extra = field_info.json_schema_extra
+
+        metadata = build_metadata_dict(
+            js_annotation_functions=[get_json_schema_update_func(json_schema_updates, json_schema_extra)]
+        )
+
+        alias_generator = self._config_wrapper.alias_generator
+        if alias_generator is not None:
+            self._apply_alias_generator_to_field_info(alias_generator, field_info, name)
+
+        if isinstance(field_info.validation_alias, (AliasChoices, AliasPath)):
+            validation_alias = field_info.validation_alias.convert_to_aliases()
+        else:
+            validation_alias = field_info.validation_alias
+
+        return _common_field(
+            schema,
+            serialization_exclude=True if field_info.exclude else None,
+            validation_alias=validation_alias,
+            serialization_alias=field_info.serialization_alias,
+            frozen=field_info.frozen,
+            metadata=metadata,
+        )
+
+    def _union_schema(self, union_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Union."""
-        pass
-
-    def _literal_schema(self, literal_type: Any) ->CoreSchema:
+        args = self._get_args_resolving_forward_refs(union_type, required=True)
+        choices: list[CoreSchema] = []
+        nullable = False
+        for arg in args:
+            if arg is None or arg is _typing_extra.NoneType:
+                nullable = True
+            else:
+                choices.append(self.generate_schema(arg))
+
+        if len(choices) == 1:
+            s = choices[0]
+        else:
+            choices_with_tags: list[CoreSchema | tuple[CoreSchema, str]] = []
+            for choice in choices:
+                tag = choice.get('metadata', {}).get(_core_utils.TAGGED_UNION_TAG_KEY)
+                if tag is not None:
+                    choices_with_tags.append((choice, tag))
+                else:
+                    choices_with_tags.append(choice)
+            s = core_schema.union_schema(choices_with_tags)
+
+        if nullable:
+            s = core_schema.nullable_schema(s)
+        return s
+
+    def _type_alias_type_schema(
+        self,
+        obj: Any,  # TypeAliasType
+    ) -> CoreSchema:
+        with self.defs.get_schema_or_ref(obj) as (ref, maybe_schema):
+            if maybe_schema is not None:
+                return maybe_schema
+
+            origin = get_origin(obj) or obj
+
+            annotation = origin.__value__
+            typevars_map = get_standard_typevars_map(obj)
+
+            with self._types_namespace_stack.push(origin):
+                annotation = _typing_extra.eval_type_lenient(annotation, self._types_namespace)
+                annotation = replace_types(annotation, typevars_map)
+                schema = self.generate_schema(annotation)
+                assert schema['type'] != 'definitions'
+                schema['ref'] = ref  # type: ignore
+            self.defs.definitions[ref] = schema
+            return core_schema.definition_reference_schema(ref)
+
+    def _literal_schema(self, literal_type: Any) -> CoreSchema:
         """Generate schema for a Literal."""
-        pass
+        expected = _typing_extra.all_literal_values(literal_type)
+        assert expected, f'literal "expected" cannot be empty, obj={literal_type}'
+        return core_schema.literal_schema(expected)

-    def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any
-        ) ->core_schema.CoreSchema:
+    def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.CoreSchema:
         """Generate schema for a TypedDict.

         It is not possible to track required/optional keys in TypedDict without __required_keys__
@@ -317,106 +1266,846 @@ class GenerateSchema:
         Hence to avoid creating validators that do not do what users expect we only
         support typing.TypedDict on Python >= 3.12 or typing_extension.TypedDict on all versions
         """
-        pass
-
-    def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any
-        ) ->core_schema.CoreSchema:
+        from ..fields import FieldInfo
+
+        with self.model_type_stack.push(typed_dict_cls), self.defs.get_schema_or_ref(typed_dict_cls) as (
+            typed_dict_ref,
+            maybe_schema,
+        ):
+            if maybe_schema is not None:
+                return maybe_schema
+
+            typevars_map = get_standard_typevars_map(typed_dict_cls)
+            if origin is not None:
+                typed_dict_cls = origin
+
+            if not _SUPPORTS_TYPEDDICT and type(typed_dict_cls).__module__ == 'typing':
+                raise PydanticUserError(
+                    'Please use `typing_extensions.TypedDict` instead of `typing.TypedDict` on Python < 3.12.',
+                    code='typed-dict-version',
+                )
+
+            try:
+                config: ConfigDict | None = get_attribute_from_bases(typed_dict_cls, '__pydantic_config__')
+            except AttributeError:
+                config = None
+
+            with self._config_wrapper_stack.push(config), self._types_namespace_stack.push(typed_dict_cls):
+                core_config = self._config_wrapper.core_config(typed_dict_cls)
+
+                self = self._current_generate_schema
+
+                required_keys: frozenset[str] = typed_dict_cls.__required_keys__
+
+                fields: dict[str, core_schema.TypedDictField] = {}
+
+                decorators = DecoratorInfos.build(typed_dict_cls)
+
+                if self._config_wrapper.use_attribute_docstrings:
+                    field_docstrings = extract_docstrings_from_cls(typed_dict_cls, use_inspect=True)
+                else:
+                    field_docstrings = None
+
+                for field_name, annotation in get_type_hints_infer_globalns(
+                    typed_dict_cls, localns=self._types_namespace, include_extras=True
+                ).items():
+                    annotation = replace_types(annotation, typevars_map)
+                    required = field_name in required_keys
+
+                    if get_origin(annotation) == _typing_extra.Required:
+                        required = True
+                        annotation = self._get_args_resolving_forward_refs(
+                            annotation,
+                            required=True,
+                        )[0]
+                    elif get_origin(annotation) == _typing_extra.NotRequired:
+                        required = False
+                        annotation = self._get_args_resolving_forward_refs(
+                            annotation,
+                            required=True,
+                        )[0]
+
+                    field_info = FieldInfo.from_annotation(annotation)
+                    if (
+                        field_docstrings is not None
+                        and field_info.description is None
+                        and field_name in field_docstrings
+                    ):
+                        field_info.description = field_docstrings[field_name]
+                    self._apply_field_title_generator_to_field_info(self._config_wrapper, field_info, field_name)
+                    fields[field_name] = self._generate_td_field_schema(
+                        field_name, field_info, decorators, required=required
+                    )
+
+                title = self._get_model_title_from_config(typed_dict_cls, ConfigWrapper(config))
+                metadata = build_metadata_dict(
+                    js_functions=[partial(modify_model_json_schema, cls=typed_dict_cls, title=title)],
+                    typed_dict_cls=typed_dict_cls,
+                )
+                td_schema = core_schema.typed_dict_schema(
+                    fields,
+                    computed_fields=[
+                        self._computed_field_schema(d, decorators.field_serializers)
+                        for d in decorators.computed_fields.values()
+                    ],
+                    ref=typed_dict_ref,
+                    metadata=metadata,
+                    config=core_config,
+                )
+
+                schema = self._apply_model_serializers(td_schema, decorators.model_serializers.values())
+                schema = apply_model_validators(schema, decorators.model_validators.values(), 'all')
+                self.defs.definitions[typed_dict_ref] = schema
+                return core_schema.definition_reference_schema(typed_dict_ref)
+
+    def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.CoreSchema:
         """Generate schema for a NamedTuple."""
-        pass
-
-    def _generate_parameter_schema(self, name: str, annotation: type[Any],
-        default: Any=Parameter.empty, mode: (Literal['positional_only',
-        'positional_or_keyword', 'keyword_only'] | None)=None
-        ) ->core_schema.ArgumentsParameter:
+        with self.model_type_stack.push(namedtuple_cls), self.defs.get_schema_or_ref(namedtuple_cls) as (
+            namedtuple_ref,
+            maybe_schema,
+        ):
+            if maybe_schema is not None:
+                return maybe_schema
+            typevars_map = get_standard_typevars_map(namedtuple_cls)
+            if origin is not None:
+                namedtuple_cls = origin
+
+            annotations: dict[str, Any] = get_type_hints_infer_globalns(
+                namedtuple_cls, include_extras=True, localns=self._types_namespace
+            )
+            if not annotations:
+                # annotations is empty, happens if namedtuple_cls defined via collections.namedtuple(...)
+                annotations = {k: Any for k in namedtuple_cls._fields}
+
+            if typevars_map:
+                annotations = {
+                    field_name: replace_types(annotation, typevars_map)
+                    for field_name, annotation in annotations.items()
+                }
+
+            arguments_schema = core_schema.arguments_schema(
+                [
+                    self._generate_parameter_schema(
+                        field_name, annotation, default=namedtuple_cls._field_defaults.get(field_name, Parameter.empty)
+                    )
+                    for field_name, annotation in annotations.items()
+                ],
+                metadata=build_metadata_dict(js_prefer_positional_arguments=True),
+            )
+            return core_schema.call_schema(arguments_schema, namedtuple_cls, ref=namedtuple_ref)
+
+    def _generate_parameter_schema(
+        self,
+        name: str,
+        annotation: type[Any],
+        default: Any = Parameter.empty,
+        mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None,
+    ) -> core_schema.ArgumentsParameter:
         """Prepare a ArgumentsParameter to represent a field in a namedtuple or function signature."""
-        pass
-
-    def _tuple_schema(self, tuple_type: Any) ->core_schema.CoreSchema:
+        from ..fields import FieldInfo
+
+        if default is Parameter.empty:
+            field = FieldInfo.from_annotation(annotation)
+        else:
+            field = FieldInfo.from_annotated_attribute(annotation, default)
+        assert field.annotation is not None, 'field.annotation should not be None when generating a schema'
+        source_type, annotations = field.annotation, field.metadata
+        with self.field_name_stack.push(name):
+            schema = self._apply_annotations(source_type, annotations)
+
+        if not field.is_required():
+            schema = wrap_default(field, schema)
+
+        parameter_schema = core_schema.arguments_parameter(name, schema)
+        if mode is not None:
+            parameter_schema['mode'] = mode
+        if field.alias is not None:
+            parameter_schema['alias'] = field.alias
+        else:
+            alias_generator = self._config_wrapper.alias_generator
+            if isinstance(alias_generator, AliasGenerator) and alias_generator.alias is not None:
+                parameter_schema['alias'] = alias_generator.alias(name)
+            elif isinstance(alias_generator, Callable):
+                parameter_schema['alias'] = alias_generator(name)
+        return parameter_schema
+
+    def _tuple_schema(self, tuple_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Tuple, e.g. `tuple[int, str]` or `tuple[int, ...]`."""
-        pass
-
-    def _union_is_subclass_schema(self, union_type: Any
-        ) ->core_schema.CoreSchema:
+        # TODO: do we really need to resolve type vars here?
+        typevars_map = get_standard_typevars_map(tuple_type)
+        params = self._get_args_resolving_forward_refs(tuple_type)
+
+        if typevars_map and params:
+            params = tuple(replace_types(param, typevars_map) for param in params)
+
+        # NOTE: subtle difference: `tuple[()]` gives `params=()`, whereas `typing.Tuple[()]` gives `params=((),)`
+        # This is only true for <3.11, on Python 3.11+ `typing.Tuple[()]` gives `params=()`
+        if not params:
+            if tuple_type in TUPLE_TYPES:
+                return core_schema.tuple_schema([core_schema.any_schema()], variadic_item_index=0)
+            else:
+                # special case for `tuple[()]` which means `tuple[]` - an empty tuple
+                return core_schema.tuple_schema([])
+        elif params[-1] is Ellipsis:
+            if len(params) == 2:
+                return core_schema.tuple_schema([self.generate_schema(params[0])], variadic_item_index=0)
+            else:
+                # TODO: something like https://github.com/pydantic/pydantic/issues/5952
+                raise ValueError('Variable tuples can only have one type')
+        elif len(params) == 1 and params[0] == ():
+            # special case for `Tuple[()]` which means `Tuple[]` - an empty tuple
+            # NOTE: This conditional can be removed when we drop support for Python 3.10.
+            return core_schema.tuple_schema([])
+        else:
+            return core_schema.tuple_schema([self.generate_schema(param) for param in params])
+
+    def _type_schema(self) -> core_schema.CoreSchema:
+        return core_schema.custom_error_schema(
+            core_schema.is_instance_schema(type),
+            custom_error_type='is_type',
+            custom_error_message='Input should be a type',
+        )
+
+    def _union_is_subclass_schema(self, union_type: Any) -> core_schema.CoreSchema:
         """Generate schema for `Type[Union[X, ...]]`."""
-        pass
+        args = self._get_args_resolving_forward_refs(union_type, required=True)
+        return core_schema.union_schema([self.generate_schema(typing.Type[args]) for args in args])

-    def _subclass_schema(self, type_: Any) ->core_schema.CoreSchema:
+    def _subclass_schema(self, type_: Any) -> core_schema.CoreSchema:
         """Generate schema for a Type, e.g. `Type[int]`."""
-        pass
-
-    def _sequence_schema(self, sequence_type: Any) ->core_schema.CoreSchema:
+        type_param = self._get_first_arg_or_any(type_)
+        if type_param == Any:
+            return self._type_schema()
+        elif isinstance(type_param, typing.TypeVar):
+            if type_param.__bound__:
+                if _typing_extra.origin_is_union(get_origin(type_param.__bound__)):
+                    return self._union_is_subclass_schema(type_param.__bound__)
+                return core_schema.is_subclass_schema(type_param.__bound__)
+            elif type_param.__constraints__:
+                return core_schema.union_schema(
+                    [self.generate_schema(typing.Type[c]) for c in type_param.__constraints__]
+                )
+            else:
+                return self._type_schema()
+        elif _typing_extra.origin_is_union(get_origin(type_param)):
+            return self._union_is_subclass_schema(type_param)
+        else:
+            return core_schema.is_subclass_schema(type_param)
+
+    def _sequence_schema(self, sequence_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Sequence, e.g. `Sequence[int]`."""
-        pass
+        from ._std_types_schema import serialize_sequence_via_list

-    def _iterable_schema(self, type_: Any) ->core_schema.GeneratorSchema:
-        """Generate a schema for an `Iterable`."""
-        pass
+        item_type = self._get_first_arg_or_any(sequence_type)
+        item_type_schema = self.generate_schema(item_type)
+        list_schema = core_schema.list_schema(item_type_schema)

-    def _dataclass_schema(self, dataclass: type[StandardDataclass], origin:
-        (type[StandardDataclass] | None)) ->core_schema.CoreSchema:
-        """Generate schema for a dataclass."""
-        pass
+        python_schema = core_schema.is_instance_schema(typing.Sequence, cls_repr='Sequence')
+        if item_type != Any:
+            from ._validators import sequence_validator

-    def _callable_schema(self, function: Callable[..., Any]
-        ) ->core_schema.CallSchema:
+            python_schema = core_schema.chain_schema(
+                [python_schema, core_schema.no_info_wrap_validator_function(sequence_validator, list_schema)],
+            )
+
+        serialization = core_schema.wrap_serializer_function_ser_schema(
+            serialize_sequence_via_list, schema=item_type_schema, info_arg=True
+        )
+        return core_schema.json_or_python_schema(
+            json_schema=list_schema, python_schema=python_schema, serialization=serialization
+        )
+
+    def _iterable_schema(self, type_: Any) -> core_schema.GeneratorSchema:
+        """Generate a schema for an `Iterable`."""
+        item_type = self._get_first_arg_or_any(type_)
+
+        return core_schema.generator_schema(self.generate_schema(item_type))
+
+    def _pattern_schema(self, pattern_type: Any) -> core_schema.CoreSchema:
+        from . import _validators
+
+        metadata = build_metadata_dict(js_functions=[lambda _1, _2: {'type': 'string', 'format': 'regex'}])
+        ser = core_schema.plain_serializer_function_ser_schema(
+            attrgetter('pattern'), when_used='json', return_schema=core_schema.str_schema()
+        )
+        if pattern_type == typing.Pattern or pattern_type == re.Pattern:
+            # bare type
+            return core_schema.no_info_plain_validator_function(
+                _validators.pattern_either_validator, serialization=ser, metadata=metadata
+            )
+
+        param = self._get_args_resolving_forward_refs(
+            pattern_type,
+            required=True,
+        )[0]
+        if param is str:
+            return core_schema.no_info_plain_validator_function(
+                _validators.pattern_str_validator, serialization=ser, metadata=metadata
+            )
+        elif param is bytes:
+            return core_schema.no_info_plain_validator_function(
+                _validators.pattern_bytes_validator, serialization=ser, metadata=metadata
+            )
+        else:
+            raise PydanticSchemaGenerationError(f'Unable to generate pydantic-core schema for {pattern_type!r}.')
+
+    def _hashable_schema(self) -> core_schema.CoreSchema:
+        return core_schema.custom_error_schema(
+            core_schema.is_instance_schema(collections.abc.Hashable),
+            custom_error_type='is_hashable',
+            custom_error_message='Input should be hashable',
+        )
+
+    def _dataclass_schema(
+        self, dataclass: type[StandardDataclass], origin: type[StandardDataclass] | None
+    ) -> core_schema.CoreSchema:
+        """Generate schema for a dataclass."""
+        with self.model_type_stack.push(dataclass), self.defs.get_schema_or_ref(dataclass) as (
+            dataclass_ref,
+            maybe_schema,
+        ):
+            if maybe_schema is not None:
+                return maybe_schema
+
+            typevars_map = get_standard_typevars_map(dataclass)
+            if origin is not None:
+                dataclass = origin
+
+            with ExitStack() as dataclass_bases_stack:
+                # Pushing a namespace prioritises items already in the stack, so iterate though the MRO forwards
+                for dataclass_base in dataclass.__mro__:
+                    if dataclasses.is_dataclass(dataclass_base):
+                        dataclass_bases_stack.enter_context(self._types_namespace_stack.push(dataclass_base))
+
+                # Pushing a config overwrites the previous config, so iterate though the MRO backwards
+                config = None
+                for dataclass_base in reversed(dataclass.__mro__):
+                    if dataclasses.is_dataclass(dataclass_base):
+                        config = getattr(dataclass_base, '__pydantic_config__', None)
+                        dataclass_bases_stack.enter_context(self._config_wrapper_stack.push(config))
+
+                core_config = self._config_wrapper.core_config(dataclass)
+
+                self = self._current_generate_schema
+
+                from ..dataclasses import is_pydantic_dataclass
+
+                if is_pydantic_dataclass(dataclass):
+                    fields = deepcopy(dataclass.__pydantic_fields__)
+                    if typevars_map:
+                        for field in fields.values():
+                            field.apply_typevars_map(typevars_map, self._types_namespace)
+                else:
+                    fields = collect_dataclass_fields(
+                        dataclass,
+                        self._types_namespace,
+                        typevars_map=typevars_map,
+                    )
+
+                # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass
+                if self._config_wrapper_stack.tail.extra == 'allow':
+                    # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass
+                    for field_name, field in fields.items():
+                        if field.init is False:
+                            raise PydanticUserError(
+                                f'Field {field_name} has `init=False` and dataclass has config setting `extra="allow"`. '
+                                f'This combination is not allowed.',
+                                code='dataclass-init-false-extra-allow',
+                            )
+
+                decorators = dataclass.__dict__.get('__pydantic_decorators__') or DecoratorInfos.build(dataclass)
+                # Move kw_only=False args to the start of the list, as this is how vanilla dataclasses work.
+                # Note that when kw_only is missing or None, it is treated as equivalent to kw_only=True
+                args = sorted(
+                    (self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()),
+                    key=lambda a: a.get('kw_only') is not False,
+                )
+                has_post_init = hasattr(dataclass, '__post_init__')
+                has_slots = hasattr(dataclass, '__slots__')
+
+                args_schema = core_schema.dataclass_args_schema(
+                    dataclass.__name__,
+                    args,
+                    computed_fields=[
+                        self._computed_field_schema(d, decorators.field_serializers)
+                        for d in decorators.computed_fields.values()
+                    ],
+                    collect_init_only=has_post_init,
+                )
+
+                inner_schema = apply_validators(args_schema, decorators.root_validators.values(), None)
+
+                model_validators = decorators.model_validators.values()
+                inner_schema = apply_model_validators(inner_schema, model_validators, 'inner')
+
+                title = self._get_model_title_from_config(dataclass, ConfigWrapper(config))
+                metadata = build_metadata_dict(
+                    js_functions=[partial(modify_model_json_schema, cls=dataclass, title=title)]
+                )
+
+                dc_schema = core_schema.dataclass_schema(
+                    dataclass,
+                    inner_schema,
+                    post_init=has_post_init,
+                    ref=dataclass_ref,
+                    fields=[field.name for field in dataclasses.fields(dataclass)],
+                    slots=has_slots,
+                    config=core_config,
+                    metadata=metadata,
+                )
+                schema = self._apply_model_serializers(dc_schema, decorators.model_serializers.values())
+                schema = apply_model_validators(schema, model_validators, 'outer')
+                self.defs.definitions[dataclass_ref] = schema
+                return core_schema.definition_reference_schema(dataclass_ref)
+
+            # Type checkers seem to assume ExitStack may suppress exceptions and therefore
+            # control flow can exit the `with` block without returning.
+            assert False, 'Unreachable'
+
+    def _callable_schema(self, function: Callable[..., Any]) -> core_schema.CallSchema:
         """Generate schema for a Callable.

         TODO support functional validators once we support them in Config
         """
-        pass
-
-    def _annotated_schema(self, annotated_type: Any) ->core_schema.CoreSchema:
+        sig = signature(function)
+
+        type_hints = _typing_extra.get_function_type_hints(function, types_namespace=self._types_namespace)
+
+        mode_lookup: dict[_ParameterKind, Literal['positional_only', 'positional_or_keyword', 'keyword_only']] = {
+            Parameter.POSITIONAL_ONLY: 'positional_only',
+            Parameter.POSITIONAL_OR_KEYWORD: 'positional_or_keyword',
+            Parameter.KEYWORD_ONLY: 'keyword_only',
+        }
+
+        arguments_list: list[core_schema.ArgumentsParameter] = []
+        var_args_schema: core_schema.CoreSchema | None = None
+        var_kwargs_schema: core_schema.CoreSchema | None = None
+
+        for name, p in sig.parameters.items():
+            if p.annotation is sig.empty:
+                annotation = typing.cast(Any, Any)
+            else:
+                annotation = type_hints[name]
+
+            parameter_mode = mode_lookup.get(p.kind)
+            if parameter_mode is not None:
+                arg_schema = self._generate_parameter_schema(name, annotation, p.default, parameter_mode)
+                arguments_list.append(arg_schema)
+            elif p.kind == Parameter.VAR_POSITIONAL:
+                var_args_schema = self.generate_schema(annotation)
+            else:
+                assert p.kind == Parameter.VAR_KEYWORD, p.kind
+                var_kwargs_schema = self.generate_schema(annotation)
+
+        return_schema: core_schema.CoreSchema | None = None
+        config_wrapper = self._config_wrapper
+        if config_wrapper.validate_return:
+            return_hint = type_hints.get('return')
+            if return_hint is not None:
+                return_schema = self.generate_schema(return_hint)
+
+        return core_schema.call_schema(
+            core_schema.arguments_schema(
+                arguments_list,
+                var_args_schema=var_args_schema,
+                var_kwargs_schema=var_kwargs_schema,
+                populate_by_name=config_wrapper.populate_by_name,
+            ),
+            function,
+            return_schema=return_schema,
+        )
+
+    def _unsubstituted_typevar_schema(self, typevar: typing.TypeVar) -> core_schema.CoreSchema:
+        assert isinstance(typevar, typing.TypeVar)
+
+        bound = typevar.__bound__
+        constraints = typevar.__constraints__
+
+        try:
+            typevar_has_default = typevar.has_default()  # type: ignore
+        except AttributeError:
+            # could still have a default if it's an old version of typing_extensions.TypeVar
+            typevar_has_default = getattr(typevar, '__default__', None) is not None
+
+        if (bound is not None) + (len(constraints) != 0) + typevar_has_default > 1:
+            raise NotImplementedError(
+                'Pydantic does not support mixing more than one of TypeVar bounds, constraints and defaults'
+            )
+
+        if typevar_has_default:
+            return self.generate_schema(typevar.__default__)  # type: ignore
+        elif constraints:
+            return self._union_schema(typing.Union[constraints])  # type: ignore
+        elif bound:
+            schema = self.generate_schema(bound)
+            schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+                lambda x, h: h(x), schema=core_schema.any_schema()
+            )
+            return schema
+        else:
+            return core_schema.any_schema()
+
+    def _computed_field_schema(
+        self,
+        d: Decorator[ComputedFieldInfo],
+        field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]],
+    ) -> core_schema.ComputedField:
+        try:
+            return_type = _decorators.get_function_return_type(d.func, d.info.return_type, self._types_namespace)
+        except NameError as e:
+            raise PydanticUndefinedAnnotation.from_name_error(e) from e
+        if return_type is PydanticUndefined:
+            raise PydanticUserError(
+                'Computed field is missing return type annotation or specifying `return_type`'
+                ' to the `@computed_field` decorator (e.g. `@computed_field(return_type=int|str)`)',
+                code='model-field-missing-annotation',
+            )
+
+        return_type = replace_types(return_type, self._typevars_map)
+        # Create a new ComputedFieldInfo so that different type parametrizations of the same
+        # generic model's computed field can have different return types.
+        d.info = dataclasses.replace(d.info, return_type=return_type)
+        return_type_schema = self.generate_schema(return_type)
+        # Apply serializers to computed field if there exist
+        return_type_schema = self._apply_field_serializers(
+            return_type_schema,
+            filter_field_decorator_info_by_field(field_serializers.values(), d.cls_var_name),
+            computed_field=True,
+        )
+
+        alias_generator = self._config_wrapper.alias_generator
+        if alias_generator is not None:
+            self._apply_alias_generator_to_computed_field_info(
+                alias_generator=alias_generator, computed_field_info=d.info, computed_field_name=d.cls_var_name
+            )
+        self._apply_field_title_generator_to_field_info(self._config_wrapper, d.info, d.cls_var_name)
+
+        def set_computed_field_metadata(schema: CoreSchemaOrField, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+            json_schema = handler(schema)
+
+            json_schema['readOnly'] = True
+
+            title = d.info.title
+            if title is not None:
+                json_schema['title'] = title
+
+            description = d.info.description
+            if description is not None:
+                json_schema['description'] = description
+
+            if d.info.deprecated or d.info.deprecated == '':
+                json_schema['deprecated'] = True
+
+            examples = d.info.examples
+            if examples is not None:
+                json_schema['examples'] = to_jsonable_python(examples)
+
+            json_schema_extra = d.info.json_schema_extra
+            if json_schema_extra is not None:
+                add_json_schema_extra(json_schema, json_schema_extra)
+
+            return json_schema
+
+        metadata = build_metadata_dict(js_annotation_functions=[set_computed_field_metadata])
+        return core_schema.computed_field(
+            d.cls_var_name, return_schema=return_type_schema, alias=d.info.alias, metadata=metadata
+        )
+
+    def _annotated_schema(self, annotated_type: Any) -> core_schema.CoreSchema:
         """Generate schema for an Annotated type, e.g. `Annotated[int, Field(...)]` or `Annotated[int, Gt(0)]`."""
-        pass
-
-    def _apply_annotations(self, source_type: Any, annotations: list[Any],
-        transform_inner_schema: Callable[[CoreSchema], CoreSchema]=lambda x: x
-        ) ->CoreSchema:
+        from ..fields import FieldInfo
+
+        source_type, *annotations = self._get_args_resolving_forward_refs(
+            annotated_type,
+            required=True,
+        )
+        schema = self._apply_annotations(source_type, annotations)
+        # put the default validator last so that TypeAdapter.get_default_value() works
+        # even if there are function validators involved
+        for annotation in annotations:
+            if isinstance(annotation, FieldInfo):
+                schema = wrap_default(annotation, schema)
+        return schema
+
+    def _get_prepare_pydantic_annotations_for_known_type(
+        self, obj: Any, annotations: tuple[Any, ...]
+    ) -> tuple[Any, list[Any]] | None:
+        from ._std_types_schema import PREPARE_METHODS
+
+        # Check for hashability
+        try:
+            hash(obj)
+        except TypeError:
+            # obj is definitely not a known type if this fails
+            return None
+
+        for gen in PREPARE_METHODS:
+            res = gen(obj, annotations, self._config_wrapper.config_dict)
+            if res is not None:
+                return res
+
+        return None
+
+    def _apply_annotations(
+        self,
+        source_type: Any,
+        annotations: list[Any],
+        transform_inner_schema: Callable[[CoreSchema], CoreSchema] = lambda x: x,
+    ) -> CoreSchema:
         """Apply arguments from `Annotated` or from `FieldInfo` to a schema.

         This gets called by `GenerateSchema._annotated_schema` but differs from it in that it does
         not expect `source_type` to be an `Annotated` object, it expects it to be  the first argument of that
         (in other words, `GenerateSchema._annotated_schema` just unpacks `Annotated`, this process it).
         """
-        pass
-
-    def _apply_field_serializers(self, schema: core_schema.CoreSchema,
+        annotations = list(_known_annotated_metadata.expand_grouped_metadata(annotations))
+        res = self._get_prepare_pydantic_annotations_for_known_type(source_type, tuple(annotations))
+        if res is not None:
+            source_type, annotations = res
+
+        pydantic_js_annotation_functions: list[GetJsonSchemaFunction] = []
+
+        def inner_handler(obj: Any) -> CoreSchema:
+            from_property = self._generate_schema_from_property(obj, source_type)
+            if from_property is None:
+                schema = self._generate_schema_inner(obj)
+            else:
+                schema = from_property
+            metadata_js_function = _extract_get_pydantic_json_schema(obj, schema)
+            if metadata_js_function is not None:
+                metadata_schema = resolve_original_schema(schema, self.defs.definitions)
+                if metadata_schema is not None:
+                    self._add_js_function(metadata_schema, metadata_js_function)
+            return transform_inner_schema(schema)
+
+        get_inner_schema = CallbackGetCoreSchemaHandler(inner_handler, self)
+
+        for annotation in annotations:
+            if annotation is None:
+                continue
+            get_inner_schema = self._get_wrapped_inner_schema(
+                get_inner_schema, annotation, pydantic_js_annotation_functions
+            )
+
+        schema = get_inner_schema(source_type)
+        if pydantic_js_annotation_functions:
+            metadata = CoreMetadataHandler(schema).metadata
+            metadata.setdefault('pydantic_js_annotation_functions', []).extend(pydantic_js_annotation_functions)
+        return _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, source_type, schema)
+
+    def _apply_single_annotation(self, schema: core_schema.CoreSchema, metadata: Any) -> core_schema.CoreSchema:
+        from ..fields import FieldInfo
+
+        if isinstance(metadata, FieldInfo):
+            for field_metadata in metadata.metadata:
+                schema = self._apply_single_annotation(schema, field_metadata)
+
+            if metadata.discriminator is not None:
+                schema = self._apply_discriminator_to_union(schema, metadata.discriminator)
+            return schema
+
+        if schema['type'] == 'nullable':
+            # for nullable schemas, metadata is automatically applied to the inner schema
+            inner = schema.get('schema', core_schema.any_schema())
+            inner = self._apply_single_annotation(inner, metadata)
+            if inner:
+                schema['schema'] = inner
+            return schema
+
+        original_schema = schema
+        ref = schema.get('ref', None)
+        if ref is not None:
+            schema = schema.copy()
+            new_ref = ref + f'_{repr(metadata)}'
+            if new_ref in self.defs.definitions:
+                return self.defs.definitions[new_ref]
+            schema['ref'] = new_ref  # type: ignore
+        elif schema['type'] == 'definition-ref':
+            ref = schema['schema_ref']
+            if ref in self.defs.definitions:
+                schema = self.defs.definitions[ref].copy()
+                new_ref = ref + f'_{repr(metadata)}'
+                if new_ref in self.defs.definitions:
+                    return self.defs.definitions[new_ref]
+                schema['ref'] = new_ref  # type: ignore
+
+        maybe_updated_schema = _known_annotated_metadata.apply_known_metadata(metadata, schema.copy())
+
+        if maybe_updated_schema is not None:
+            return maybe_updated_schema
+        return original_schema
+
+    def _apply_single_annotation_json_schema(
+        self, schema: core_schema.CoreSchema, metadata: Any
+    ) -> core_schema.CoreSchema:
+        from ..fields import FieldInfo
+
+        if isinstance(metadata, FieldInfo):
+            for field_metadata in metadata.metadata:
+                schema = self._apply_single_annotation_json_schema(schema, field_metadata)
+            json_schema_update: JsonSchemaValue = {}
+            if metadata.title:
+                json_schema_update['title'] = metadata.title
+            if metadata.description:
+                json_schema_update['description'] = metadata.description
+            if metadata.examples:
+                json_schema_update['examples'] = to_jsonable_python(metadata.examples)
+
+            json_schema_extra = metadata.json_schema_extra
+            if json_schema_update or json_schema_extra:
+                CoreMetadataHandler(schema).metadata.setdefault('pydantic_js_annotation_functions', []).append(
+                    get_json_schema_update_func(json_schema_update, json_schema_extra)
+                )
+        return schema
+
+    def _get_wrapped_inner_schema(
+        self,
+        get_inner_schema: GetCoreSchemaHandler,
+        annotation: Any,
+        pydantic_js_annotation_functions: list[GetJsonSchemaFunction],
+    ) -> CallbackGetCoreSchemaHandler:
+        metadata_get_schema: GetCoreSchemaFunction = getattr(annotation, '__get_pydantic_core_schema__', None) or (
+            lambda source, handler: handler(source)
+        )
+
+        def new_handler(source: Any) -> core_schema.CoreSchema:
+            schema = metadata_get_schema(source, get_inner_schema)
+            schema = self._apply_single_annotation(schema, annotation)
+            schema = self._apply_single_annotation_json_schema(schema, annotation)
+
+            metadata_js_function = _extract_get_pydantic_json_schema(annotation, schema)
+            if metadata_js_function is not None:
+                pydantic_js_annotation_functions.append(metadata_js_function)
+            return schema
+
+        return CallbackGetCoreSchemaHandler(new_handler, self)
+
+    def _apply_field_serializers(
+        self,
+        schema: core_schema.CoreSchema,
         serializers: list[Decorator[FieldSerializerDecoratorInfo]],
-        computed_field: bool=False) ->core_schema.CoreSchema:
+        computed_field: bool = False,
+    ) -> core_schema.CoreSchema:
         """Apply field serializers to a schema."""
-        pass
-
-    def _apply_model_serializers(self, schema: core_schema.CoreSchema,
-        serializers: Iterable[Decorator[ModelSerializerDecoratorInfo]]
-        ) ->core_schema.CoreSchema:
+        if serializers:
+            schema = copy(schema)
+            if schema['type'] == 'definitions':
+                inner_schema = schema['schema']
+                schema['schema'] = self._apply_field_serializers(inner_schema, serializers)
+                return schema
+            else:
+                ref = typing.cast('str|None', schema.get('ref', None))
+                if ref is not None:
+                    schema = core_schema.definition_reference_schema(ref)
+
+            # use the last serializer to make it easy to override a serializer set on a parent model
+            serializer = serializers[-1]
+            is_field_serializer, info_arg = inspect_field_serializer(
+                serializer.func, serializer.info.mode, computed_field=computed_field
+            )
+
+            try:
+                return_type = _decorators.get_function_return_type(
+                    serializer.func, serializer.info.return_type, self._types_namespace
+                )
+            except NameError as e:
+                raise PydanticUndefinedAnnotation.from_name_error(e) from e
+
+            if return_type is PydanticUndefined:
+                return_schema = None
+            else:
+                return_schema = self.generate_schema(return_type)
+
+            if serializer.info.mode == 'wrap':
+                schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+                    serializer.func,
+                    is_field_serializer=is_field_serializer,
+                    info_arg=info_arg,
+                    return_schema=return_schema,
+                    when_used=serializer.info.when_used,
+                )
+            else:
+                assert serializer.info.mode == 'plain'
+                schema['serialization'] = core_schema.plain_serializer_function_ser_schema(
+                    serializer.func,
+                    is_field_serializer=is_field_serializer,
+                    info_arg=info_arg,
+                    return_schema=return_schema,
+                    when_used=serializer.info.when_used,
+                )
+        return schema
+
+    def _apply_model_serializers(
+        self, schema: core_schema.CoreSchema, serializers: Iterable[Decorator[ModelSerializerDecoratorInfo]]
+    ) -> core_schema.CoreSchema:
         """Apply model serializers to a schema."""
-        pass
-
-
-_VALIDATOR_F_MATCH: Mapping[tuple[FieldValidatorModes, Literal['no-info',
-    'with-info']], Callable[[Callable[..., Any], core_schema.CoreSchema, 
-    str | None], core_schema.CoreSchema]] = {('before', 'no-info'): lambda
-    f, schema, _: core_schema.no_info_before_validator_function(f, schema),
-    ('after', 'no-info'): lambda f, schema, _: core_schema.
-    no_info_after_validator_function(f, schema), ('plain', 'no-info'): lambda
-    f, _1, _2: core_schema.no_info_plain_validator_function(f), ('wrap',
-    'no-info'): lambda f, schema, _: core_schema.
-    no_info_wrap_validator_function(f, schema), ('before', 'with-info'): lambda
-    f, schema, field_name: core_schema.with_info_before_validator_function(
-    f, schema, field_name=field_name), ('after', 'with-info'): lambda f,
-    schema, field_name: core_schema.with_info_after_validator_function(f,
-    schema, field_name=field_name), ('plain', 'with-info'): lambda f, _,
-    field_name: core_schema.with_info_plain_validator_function(f,
-    field_name=field_name), ('wrap', 'with-info'): lambda f, schema,
-    field_name: core_schema.with_info_wrap_validator_function(f, schema,
-    field_name=field_name)}
-
-
-def apply_validators(schema: core_schema.CoreSchema, validators: (Iterable[
-    Decorator[RootValidatorDecoratorInfo]] | Iterable[Decorator[
-    ValidatorDecoratorInfo]] | Iterable[Decorator[
-    FieldValidatorDecoratorInfo]]), field_name: (str | None)
-    ) ->core_schema.CoreSchema:
+        ref: str | None = schema.pop('ref', None)  # type: ignore
+        if serializers:
+            serializer = list(serializers)[-1]
+            info_arg = inspect_model_serializer(serializer.func, serializer.info.mode)
+
+            try:
+                return_type = _decorators.get_function_return_type(
+                    serializer.func, serializer.info.return_type, self._types_namespace
+                )
+            except NameError as e:
+                raise PydanticUndefinedAnnotation.from_name_error(e) from e
+            if return_type is PydanticUndefined:
+                return_schema = None
+            else:
+                return_schema = self.generate_schema(return_type)
+
+            if serializer.info.mode == 'wrap':
+                ser_schema: core_schema.SerSchema = core_schema.wrap_serializer_function_ser_schema(
+                    serializer.func,
+                    info_arg=info_arg,
+                    return_schema=return_schema,
+                    when_used=serializer.info.when_used,
+                )
+            else:
+                # plain
+                ser_schema = core_schema.plain_serializer_function_ser_schema(
+                    serializer.func,
+                    info_arg=info_arg,
+                    return_schema=return_schema,
+                    when_used=serializer.info.when_used,
+                )
+            schema['serialization'] = ser_schema
+        if ref:
+            schema['ref'] = ref  # type: ignore
+        return schema
+
+
+_VALIDATOR_F_MATCH: Mapping[
+    tuple[FieldValidatorModes, Literal['no-info', 'with-info']],
+    Callable[[Callable[..., Any], core_schema.CoreSchema, str | None], core_schema.CoreSchema],
+] = {
+    ('before', 'no-info'): lambda f, schema, _: core_schema.no_info_before_validator_function(f, schema),
+    ('after', 'no-info'): lambda f, schema, _: core_schema.no_info_after_validator_function(f, schema),
+    ('plain', 'no-info'): lambda f, _1, _2: core_schema.no_info_plain_validator_function(f),
+    ('wrap', 'no-info'): lambda f, schema, _: core_schema.no_info_wrap_validator_function(f, schema),
+    ('before', 'with-info'): lambda f, schema, field_name: core_schema.with_info_before_validator_function(
+        f, schema, field_name=field_name
+    ),
+    ('after', 'with-info'): lambda f, schema, field_name: core_schema.with_info_after_validator_function(
+        f, schema, field_name=field_name
+    ),
+    ('plain', 'with-info'): lambda f, _, field_name: core_schema.with_info_plain_validator_function(
+        f, field_name=field_name
+    ),
+    ('wrap', 'with-info'): lambda f, schema, field_name: core_schema.with_info_wrap_validator_function(
+        f, schema, field_name=field_name
+    ),
+}
+
+
+def apply_validators(
+    schema: core_schema.CoreSchema,
+    validators: Iterable[Decorator[RootValidatorDecoratorInfo]]
+    | Iterable[Decorator[ValidatorDecoratorInfo]]
+    | Iterable[Decorator[FieldValidatorDecoratorInfo]],
+    field_name: str | None,
+) -> core_schema.CoreSchema:
     """Apply validators to a schema.

     Args:
@@ -427,11 +2116,15 @@ def apply_validators(schema: core_schema.CoreSchema, validators: (Iterable[
     Returns:
         The updated schema.
     """
-    pass
+    for validator in validators:
+        info_arg = inspect_validator(validator.func, validator.info.mode)
+        val_type = 'with-info' if info_arg else 'no-info'
+
+        schema = _VALIDATOR_F_MATCH[(validator.info.mode, val_type)](validator.func, schema, field_name)
+    return schema


-def _validators_require_validate_default(validators: Iterable[Decorator[
-    ValidatorDecoratorInfo]]) ->bool:
+def _validators_require_validate_default(validators: Iterable[Decorator[ValidatorDecoratorInfo]]) -> bool:
     """In v1, if any of the validators for a field had `always=True`, the default value would be validated.

     This serves as an auxiliary function for re-implementing that logic, by looping over a provided
@@ -441,12 +2134,17 @@ def _validators_require_validate_default(validators: Iterable[Decorator[
     for v1-style validator decorators. (Or we can extend it and keep it if we add something equivalent
     to the v1-validator `always` kwarg to `field_validator`.)
     """
-    pass
+    for validator in validators:
+        if validator.info.always:
+            return True
+    return False


-def apply_model_validators(schema: core_schema.CoreSchema, validators:
-    Iterable[Decorator[ModelValidatorDecoratorInfo]], mode: Literal['inner',
-    'outer', 'all']) ->core_schema.CoreSchema:
+def apply_model_validators(
+    schema: core_schema.CoreSchema,
+    validators: Iterable[Decorator[ModelValidatorDecoratorInfo]],
+    mode: Literal['inner', 'outer', 'all'],
+) -> core_schema.CoreSchema:
     """Apply model validators to a schema.

     If mode == 'inner', only "before" validators are applied
@@ -461,11 +2159,35 @@ def apply_model_validators(schema: core_schema.CoreSchema, validators:
     Returns:
         The updated schema.
     """
-    pass
-
-
-def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema
-    ) ->core_schema.CoreSchema:
+    ref: str | None = schema.pop('ref', None)  # type: ignore
+    for validator in validators:
+        if mode == 'inner' and validator.info.mode != 'before':
+            continue
+        if mode == 'outer' and validator.info.mode == 'before':
+            continue
+        info_arg = inspect_validator(validator.func, validator.info.mode)
+        if validator.info.mode == 'wrap':
+            if info_arg:
+                schema = core_schema.with_info_wrap_validator_function(function=validator.func, schema=schema)
+            else:
+                schema = core_schema.no_info_wrap_validator_function(function=validator.func, schema=schema)
+        elif validator.info.mode == 'before':
+            if info_arg:
+                schema = core_schema.with_info_before_validator_function(function=validator.func, schema=schema)
+            else:
+                schema = core_schema.no_info_before_validator_function(function=validator.func, schema=schema)
+        else:
+            assert validator.info.mode == 'after'
+            if info_arg:
+                schema = core_schema.with_info_after_validator_function(function=validator.func, schema=schema)
+            else:
+                schema = core_schema.no_info_after_validator_function(function=validator.func, schema=schema)
+    if ref:
+        schema['ref'] = ref  # type: ignore
+    return schema
+
+
+def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
     """Wrap schema with default schema if default value or `default_factory` are available.

     Args:
@@ -475,13 +2197,69 @@ def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema
     Returns:
         Updated schema by default value or `default_factory`.
     """
-    pass
+    if field_info.default_factory:
+        return core_schema.with_default_schema(
+            schema, default_factory=field_info.default_factory, validate_default=field_info.validate_default
+        )
+    elif field_info.default is not PydanticUndefined:
+        return core_schema.with_default_schema(
+            schema, default=field_info.default, validate_default=field_info.validate_default
+        )
+    else:
+        return schema
+
+
+def _extract_get_pydantic_json_schema(tp: Any, schema: CoreSchema) -> GetJsonSchemaFunction | None:
+    """Extract `__get_pydantic_json_schema__` from a type, handling the deprecated `__modify_schema__`."""
+    js_modify_function = getattr(tp, '__get_pydantic_json_schema__', None)

+    if hasattr(tp, '__modify_schema__'):
+        from pydantic import BaseModel  # circular reference
+
+        has_custom_v2_modify_js_func = (
+            js_modify_function is not None
+            and BaseModel.__get_pydantic_json_schema__.__func__  # type: ignore
+            not in (js_modify_function, getattr(js_modify_function, '__func__', None))
+        )
+
+        if not has_custom_v2_modify_js_func:
+            cls_name = getattr(tp, '__name__', None)
+            raise PydanticUserError(
+                f'The `__modify_schema__` method is not supported in Pydantic v2. '
+                f'Use `__get_pydantic_json_schema__` instead{f" in class `{cls_name}`" if cls_name else ""}.',
+                code='custom-json-schema',
+            )
+
+    # handle GenericAlias' but ignore Annotated which "lies" about its origin (in this case it would be `int`)
+    if hasattr(tp, '__origin__') and not isinstance(tp, type(Annotated[int, 'placeholder'])):
+        return _extract_get_pydantic_json_schema(tp.__origin__, schema)
+
+    if js_modify_function is None:
+        return None
+
+    return js_modify_function

-def _extract_get_pydantic_json_schema(tp: Any, schema: CoreSchema) ->(
-    GetJsonSchemaFunction | None):
-    """Extract `__get_pydantic_json_schema__` from a type, handling the deprecated `__modify_schema__`."""
-    pass
+
+def get_json_schema_update_func(
+    json_schema_update: JsonSchemaValue, json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None
+) -> GetJsonSchemaFunction:
+    def json_schema_update_func(
+        core_schema_or_field: CoreSchemaOrField, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
+        json_schema = {**handler(core_schema_or_field), **json_schema_update}
+        add_json_schema_extra(json_schema, json_schema_extra)
+        return json_schema
+
+    return json_schema_update_func
+
+
+def add_json_schema_extra(
+    json_schema: JsonSchemaValue, json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None
+):
+    if isinstance(json_schema_extra, dict):
+        json_schema.update(to_jsonable_python(json_schema_extra))
+    elif callable(json_schema_extra):
+        json_schema_extra(json_schema)


 class _CommonField(TypedDict):
@@ -493,16 +2271,34 @@ class _CommonField(TypedDict):
     metadata: dict[str, Any]


+def _common_field(
+    schema: core_schema.CoreSchema,
+    *,
+    validation_alias: str | list[str | int] | list[list[str | int]] | None = None,
+    serialization_alias: str | None = None,
+    serialization_exclude: bool | None = None,
+    frozen: bool | None = None,
+    metadata: Any = None,
+) -> _CommonField:
+    return {
+        'schema': schema,
+        'validation_alias': validation_alias,
+        'serialization_alias': serialization_alias,
+        'serialization_exclude': serialization_exclude,
+        'frozen': frozen,
+        'metadata': metadata,
+    }
+
+
 class _Definitions:
     """Keeps track of references and definitions."""

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self.seen: set[str] = set()
         self.definitions: dict[str, core_schema.CoreSchema] = {}

     @contextmanager
-    def get_schema_or_ref(self, tp: Any) ->Iterator[tuple[str, None] |
-        tuple[str, CoreSchema]]:
+    def get_schema_or_ref(self, tp: Any) -> Iterator[tuple[str, None] | tuple[str, CoreSchema]]:
         """Get a definition for `tp` if one exists.

         If a definition exists, a tuple of `(ref_string, CoreSchema)` is returned.
@@ -521,18 +2317,60 @@ class _Definitions:
         - TypedDict
         - TypeAliasType
         """
-        pass
+        ref = get_type_ref(tp)
+        # return the reference if we're either (1) in a cycle or (2) it was already defined
+        if ref in self.seen or ref in self.definitions:
+            yield (ref, core_schema.definition_reference_schema(ref))
+        else:
+            self.seen.add(ref)
+            try:
+                yield (ref, None)
+            finally:
+                self.seen.discard(ref)
+
+
+def resolve_original_schema(schema: CoreSchema, definitions: dict[str, CoreSchema]) -> CoreSchema | None:
+    if schema['type'] == 'definition-ref':
+        return definitions.get(schema['schema_ref'], None)
+    elif schema['type'] == 'definitions':
+        return schema['schema']
+    else:
+        return schema


 class _FieldNameStack:
-    __slots__ = '_stack',
+    __slots__ = ('_stack',)

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._stack: list[str] = []

+    @contextmanager
+    def push(self, field_name: str) -> Iterator[None]:
+        self._stack.append(field_name)
+        yield
+        self._stack.pop()
+
+    def get(self) -> str | None:
+        if self._stack:
+            return self._stack[-1]
+        else:
+            return None
+

 class _ModelTypeStack:
-    __slots__ = '_stack',
+    __slots__ = ('_stack',)

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._stack: list[type] = []
+
+    @contextmanager
+    def push(self, type_obj: type) -> Iterator[None]:
+        self._stack.append(type_obj)
+        yield
+        self._stack.pop()
+
+    def get(self) -> type | None:
+        if self._stack:
+            return self._stack[-1]
+        else:
+            return None
diff --git a/pydantic/_internal/_generics.py b/pydantic/_internal/_generics.py
index 6087db94e..0bd106b24 100644
--- a/pydantic/_internal/_generics.py
+++ b/pydantic/_internal/_generics.py
@@ -1,4 +1,5 @@
 from __future__ import annotations
+
 import sys
 import types
 import typing
@@ -8,58 +9,70 @@ from contextvars import ContextVar
 from types import prepare_class
 from typing import TYPE_CHECKING, Any, Iterator, List, Mapping, MutableMapping, Tuple, TypeVar
 from weakref import WeakValueDictionary
+
 import typing_extensions
+
 from ._core_utils import get_type_ref
 from ._forward_ref import PydanticRecursiveRef
 from ._typing_extra import TypeVarType, typing_base
 from ._utils import all_identical, is_model_class
+
 if sys.version_info >= (3, 10):
-    from typing import _UnionGenericAlias
+    from typing import _UnionGenericAlias  # type: ignore[attr-defined]
+
 if TYPE_CHECKING:
     from ..main import BaseModel
+
 GenericTypesCacheKey = Tuple[Any, Any, Tuple[Any, ...]]
+
+# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching.
+#   Right now, to handle recursive generics, we some types must remain cached for brief periods without references.
+#   By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references,
+#   while also retaining a limited number of types even without references. This is generally enough to build
+#   specific recursive generic models without losing required items out of the cache.
+
 KT = TypeVar('KT')
 VT = TypeVar('VT')
 _LIMITED_DICT_SIZE = 100
 if TYPE_CHECKING:

-
     class LimitedDict(dict, MutableMapping[KT, VT]):
+        def __init__(self, size_limit: int = _LIMITED_DICT_SIZE): ...

-        def __init__(self, size_limit: int=_LIMITED_DICT_SIZE):
-            ...
 else:

-
     class LimitedDict(dict):
         """Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage.

         Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache.
         """

-        def __init__(self, size_limit: int=_LIMITED_DICT_SIZE):
+        def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
             self.size_limit = size_limit
             super().__init__()

-        def __setitem__(self, key: Any, value: Any, /) ->None:
+        def __setitem__(self, key: Any, value: Any, /) -> None:
             super().__setitem__(key, value)
             if len(self) > self.size_limit:
                 excess = len(self) - self.size_limit + self.size_limit // 10
                 to_remove = list(self.keys())[:excess]
                 for k in to_remove:
                     del self[k]
-if sys.version_info >= (3, 9):
-    GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey,
-        'type[BaseModel]']
+
+
+# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
+# once they are no longer referenced by the caller.
+if sys.version_info >= (3, 9):  # Typing for weak dictionaries available at 3.9
+    GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]']
 else:
     GenericTypesCache = WeakValueDictionary
-if TYPE_CHECKING:

+if TYPE_CHECKING:

-    class DeepChainMap(ChainMap[KT, VT]):
+    class DeepChainMap(ChainMap[KT, VT]):  # type: ignore
         ...
-else:

+else:

     class DeepChainMap(ChainMap):
         """Variant of ChainMap that allows direct updates to inner scopes.
@@ -68,11 +81,15 @@ else:
         with some light modifications for this use case.
         """

-        def __setitem__(self, key: KT, value: VT) ->None:
+        def clear(self) -> None:
+            for mapping in self.maps:
+                mapping.clear()
+
+        def __setitem__(self, key: KT, value: VT) -> None:
             for mapping in self.maps:
                 mapping[key] = value

-        def __delitem__(self, key: KT) ->None:
+        def __delitem__(self, key: KT) -> None:
             hit = False
             for mapping in self.maps:
                 if key in mapping:
@@ -80,17 +97,24 @@ else:
                     hit = True
             if not hit:
                 raise KeyError(key)
+
+
+# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it
+# and discover later on that we need to re-add all this infrastructure...
+# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict())
+
 _GENERIC_TYPES_CACHE = GenericTypesCache()


 class PydanticGenericMetadata(typing_extensions.TypedDict):
-    origin: type[BaseModel] | None
-    args: tuple[Any, ...]
-    parameters: tuple[type[Any], ...]
+    origin: type[BaseModel] | None  # analogous to typing._GenericAlias.__origin__
+    args: tuple[Any, ...]  # analogous to typing._GenericAlias.__args__
+    parameters: tuple[type[Any], ...]  # analogous to typing.Generic.__parameters__


-def create_generic_submodel(model_name: str, origin: type[BaseModel], args:
-    tuple[Any, ...], params: tuple[Any, ...]) ->type[BaseModel]:
+def create_generic_submodel(
+    model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...]
+) -> type[BaseModel]:
     """Dynamically create a submodel of a provided (generic) BaseModel.

     This is used when producing concrete parametrizations of generic models. This function
@@ -106,10 +130,36 @@ def create_generic_submodel(model_name: str, origin: type[BaseModel], args:
     Returns:
         The created submodel.
     """
-    pass
-
-
-def _get_caller_frame_info(depth: int=2) ->tuple[str | None, bool]:
+    namespace: dict[str, Any] = {'__module__': origin.__module__}
+    bases = (origin,)
+    meta, ns, kwds = prepare_class(model_name, bases)
+    namespace.update(ns)
+    created_model = meta(
+        model_name,
+        bases,
+        namespace,
+        __pydantic_generic_metadata__={
+            'origin': origin,
+            'args': args,
+            'parameters': params,
+        },
+        __pydantic_reset_parent_namespace__=False,
+        **kwds,
+    )
+
+    model_module, called_globally = _get_caller_frame_info(depth=3)
+    if called_globally:  # create global reference and therefore allow pickling
+        object_by_reference = None
+        reference_name = model_name
+        reference_module_globals = sys.modules[created_model.__module__].__dict__
+        while object_by_reference is not created_model:
+            object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
+            reference_name += '_'
+
+    return created_model
+
+
+def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]:
     """Used inside a function to check whether it was called globally.

     Args:
@@ -121,41 +171,85 @@ def _get_caller_frame_info(depth: int=2) ->tuple[str | None, bool]:
     Raises:
         RuntimeError: If the function is not called inside a function.
     """
-    pass
+    try:
+        previous_caller_frame = sys._getframe(depth)
+    except ValueError as e:
+        raise RuntimeError('This function must be used inside another function') from e
+    except AttributeError:  # sys module does not have _getframe function, so there's nothing we can do about it
+        return None, False
+    frame_globals = previous_caller_frame.f_globals
+    return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals


 DictValues: type[Any] = {}.values().__class__


-def iter_contained_typevars(v: Any) ->Iterator[TypeVarType]:
+def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
     """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.

     This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias,
     since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list.
     """
-    pass
-
-
-def get_standard_typevars_map(cls: type[Any]) ->(dict[TypeVarType, Any] | None
-    ):
+    if isinstance(v, TypeVar):
+        yield v
+    elif is_model_class(v):
+        yield from v.__pydantic_generic_metadata__['parameters']
+    elif isinstance(v, (DictValues, list)):
+        for var in v:
+            yield from iter_contained_typevars(var)
+    else:
+        args = get_args(v)
+        for arg in args:
+            yield from iter_contained_typevars(arg)
+
+
+def get_args(v: Any) -> Any:
+    pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
+    if pydantic_generic_metadata:
+        return pydantic_generic_metadata.get('args')
+    return typing_extensions.get_args(v)
+
+
+def get_origin(v: Any) -> Any:
+    pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None)
+    if pydantic_generic_metadata:
+        return pydantic_generic_metadata.get('origin')
+    return typing_extensions.get_origin(v)
+
+
+def get_standard_typevars_map(cls: type[Any]) -> dict[TypeVarType, Any] | None:
     """Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the
     `replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias.
     """
-    pass
+    origin = get_origin(cls)
+    if origin is None:
+        return None
+    if not hasattr(origin, '__parameters__'):
+        return None

+    # In this case, we know that cls is a _GenericAlias, and origin is the generic type
+    # So it is safe to access cls.__args__ and origin.__parameters__
+    args: tuple[Any, ...] = cls.__args__  # type: ignore
+    parameters: tuple[TypeVarType, ...] = origin.__parameters__
+    return dict(zip(parameters, args))

-def get_model_typevars_map(cls: type[BaseModel]) ->(dict[TypeVarType, Any] |
-    None):
+
+def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVarType, Any] | None:
     """Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible
     with the `replace_types` function.

     Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is
     stored in the __pydantic_generic_metadata__ attribute, we need special handling here.
     """
-    pass
+    # TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata
+    #   in the __origin__, __args__, and __parameters__ attributes of the model.
+    generic_metadata = cls.__pydantic_generic_metadata__
+    origin = generic_metadata['origin']
+    args = generic_metadata['args']
+    return dict(zip(iter_contained_typevars(origin), args))


-def replace_types(type_: Any, type_map: (Mapping[Any, Any] | None)) ->Any:
+def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any:
     """Return type with all occurrences of `type_map` keys recursively replaced with their values.

     Args:
@@ -176,18 +270,99 @@ def replace_types(type_: Any, type_map: (Mapping[Any, Any] | None)) ->Any:
         #> Tuple[int, Union[List[int], float]]
         ```
     """
-    pass
-
-
-def has_instance_in_type(type_: Any, isinstance_target: Any) ->bool:
+    if not type_map:
+        return type_
+
+    type_args = get_args(type_)
+    origin_type = get_origin(type_)
+
+    if origin_type is typing_extensions.Annotated:
+        annotated_type, *annotations = type_args
+        annotated = replace_types(annotated_type, type_map)
+        for annotation in annotations:
+            annotated = typing_extensions.Annotated[annotated, annotation]
+        return annotated
+
+    # Having type args is a good indicator that this is a typing module
+    # class instantiation or a generic alias of some sort.
+    if type_args:
+        resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
+        if all_identical(type_args, resolved_type_args):
+            # If all arguments are the same, there is no need to modify the
+            # type or create a new object at all
+            return type_
+        if (
+            origin_type is not None
+            and isinstance(type_, typing_base)
+            and not isinstance(origin_type, typing_base)
+            and getattr(type_, '_name', None) is not None
+        ):
+            # In python < 3.9 generic aliases don't exist so any of these like `list`,
+            # `type` or `collections.abc.Callable` need to be translated.
+            # See: https://www.python.org/dev/peps/pep-0585
+            origin_type = getattr(typing, type_._name)
+        assert origin_type is not None
+        # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
+        # We also cannot use isinstance() since we have to compare types.
+        if sys.version_info >= (3, 10) and origin_type is types.UnionType:
+            return _UnionGenericAlias(origin_type, resolved_type_args)
+        # NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below
+        return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args]
+
+    # We handle pydantic generic models separately as they don't have the same
+    # semantics as "typing" classes or generic aliases
+
+    if not origin_type and is_model_class(type_):
+        parameters = type_.__pydantic_generic_metadata__['parameters']
+        if not parameters:
+            return type_
+        resolved_type_args = tuple(replace_types(t, type_map) for t in parameters)
+        if all_identical(parameters, resolved_type_args):
+            return type_
+        return type_[resolved_type_args]
+
+    # Handle special case for typehints that can have lists as arguments.
+    # `typing.Callable[[int, str], int]` is an example for this.
+    if isinstance(type_, (List, list)):
+        resolved_list = list(replace_types(element, type_map) for element in type_)
+        if all_identical(type_, resolved_list):
+            return type_
+        return resolved_list
+
+    # If all else fails, we try to resolve the type directly and otherwise just
+    # return the input with no modifications.
+    return type_map.get(type_, type_)
+
+
+def has_instance_in_type(type_: Any, isinstance_target: Any) -> bool:
     """Checks if the type, or any of its arbitrary nested args, satisfy
     `isinstance(<type>, isinstance_target)`.
     """
-    pass
+    if isinstance(type_, isinstance_target):
+        return True
+
+    type_args = get_args(type_)
+    origin_type = get_origin(type_)
+
+    if origin_type is typing_extensions.Annotated:
+        annotated_type, *annotations = type_args
+        return has_instance_in_type(annotated_type, isinstance_target)

+    # Having type args is a good indicator that this is a typing module
+    # class instantiation or a generic alias of some sort.
+    if any(has_instance_in_type(a, isinstance_target) for a in type_args):
+        return True

-def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]
-    ) ->None:
+    # Handle special case for typehints that can have lists as arguments.
+    # `typing.Callable[[int, str], int]` is an example for this.
+    if isinstance(type_, (List, list)) and not isinstance(type_, typing_extensions.ParamSpec):
+        if any(has_instance_in_type(element, isinstance_target) for element in type_):
+            return True
+
+    return False
+
+
+def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]) -> None:
     """Check the generic model parameters count is equal.

     Args:
@@ -197,16 +372,20 @@ def check_parameters_count(cls: type[BaseModel], parameters: tuple[Any, ...]
     Raises:
         TypeError: If the passed parameters count is not equal to generic model parameters count.
     """
-    pass
+    actual = len(parameters)
+    expected = len(cls.__pydantic_generic_metadata__['parameters'])
+    if actual != expected:
+        description = 'many' if actual > expected else 'few'
+        raise TypeError(f'Too {description} parameters for {cls}; actual {actual}, expected {expected}')


-_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar(
-    '_generic_recursion_cache', default=None)
+_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None)


 @contextmanager
-def generic_recursion_self_type(origin: type[BaseModel], args: tuple[Any, ...]
-    ) ->Iterator[PydanticRecursiveRef | None]:
+def generic_recursion_self_type(
+    origin: type[BaseModel], args: tuple[Any, ...]
+) -> Iterator[PydanticRecursiveRef | None]:
     """This contextmanager should be placed around the recursive calls used to build a generic type,
     and accept as arguments the generic origin type and the type arguments being passed to it.

@@ -214,11 +393,35 @@ def generic_recursion_self_type(origin: type[BaseModel], args: tuple[Any, ...]
     can be used while building the core schema, and will produce a schema_ref that will be valid in the
     final parent schema.
     """
-    pass
-
-
-def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any
-    ) ->(type[BaseModel] | None):
+    previously_seen_type_refs = _generic_recursion_cache.get()
+    if previously_seen_type_refs is None:
+        previously_seen_type_refs = set()
+        token = _generic_recursion_cache.set(previously_seen_type_refs)
+    else:
+        token = None
+
+    try:
+        type_ref = get_type_ref(origin, args_override=args)
+        if type_ref in previously_seen_type_refs:
+            self_type = PydanticRecursiveRef(type_ref=type_ref)
+            yield self_type
+        else:
+            previously_seen_type_refs.add(type_ref)
+            yield None
+    finally:
+        if token:
+            _generic_recursion_cache.reset(token)
+
+
+def recursively_defined_type_refs() -> set[str]:
+    visited = _generic_recursion_cache.get()
+    if not visited:
+        return set()  # not in a generic recursion, so there are no types
+
+    return visited.copy()  # don't allow modifications
+
+
+def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None:
     """The use of a two-stage cache lookup approach was necessary to have the highest performance possible for
     repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime),
     while still ensuring that certain alternative parametrizations ultimately resolve to the same type.
@@ -235,26 +438,37 @@ def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any
     during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually
     equal.
     """
-    pass
+    return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values))


-def get_cached_generic_type_late(parent: type[BaseModel], typevar_values:
-    Any, origin: type[BaseModel], args: tuple[Any, ...]) ->(type[BaseModel] |
-    None):
+def get_cached_generic_type_late(
+    parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...]
+) -> type[BaseModel] | None:
     """See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup."""
-    pass
-
-
-def set_cached_generic_type(parent: type[BaseModel], typevar_values: tuple[
-    Any, ...], type_: type[BaseModel], origin: (type[BaseModel] | None)=
-    None, args: (tuple[Any, ...] | None)=None) ->None:
+    cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values))
+    if cached is not None:
+        set_cached_generic_type(parent, typevar_values, cached, origin, args)
+    return cached
+
+
+def set_cached_generic_type(
+    parent: type[BaseModel],
+    typevar_values: tuple[Any, ...],
+    type_: type[BaseModel],
+    origin: type[BaseModel] | None = None,
+    args: tuple[Any, ...] | None = None,
+) -> None:
     """See the docstring of `get_cached_generic_type_early` for more information about why items are cached with
     two different keys.
     """
-    pass
+    _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_
+    if len(typevar_values) == 1:
+        _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_
+    if origin and args:
+        _GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_


-def _union_orderings_key(typevar_values: Any) ->Any:
+def _union_orderings_key(typevar_values: Any) -> Any:
     """This is intended to help differentiate between Union types with the same arguments in different order.

     Thanks to caching internal to the `typing` module, it is not possible to distinguish between
@@ -267,11 +481,18 @@ def _union_orderings_key(typevar_values: Any) ->Any:
     get the exact-correct order of items in the union, but that would require a change to the `typing` module itself.
     (See https://github.com/python/cpython/issues/86483 for reference.)
     """
-    pass
-
-
-def _early_cache_key(cls: type[BaseModel], typevar_values: Any
-    ) ->GenericTypesCacheKey:
+    if isinstance(typevar_values, tuple):
+        args_data = []
+        for value in typevar_values:
+            args_data.append(_union_orderings_key(value))
+        return tuple(args_data)
+    elif typing_extensions.get_origin(typevar_values) is typing.Union:
+        return get_args(typevar_values)
+    else:
+        return ()
+
+
+def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey:
     """This is intended for minimal computational overhead during lookups of cached types.

     Note that this is overly simplistic, and it's possible that two different cls/typevar_values
@@ -280,14 +501,16 @@ def _early_cache_key(cls: type[BaseModel], typevar_values: Any
     lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__
     would result in the same type.
     """
-    pass
+    return cls, typevar_values, _union_orderings_key(typevar_values)


-def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...],
-    typevar_values: Any) ->GenericTypesCacheKey:
+def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey:
     """This is intended for use later in the process of creating a new type, when we have more information
     about the exact args that will be passed. If it turns out that a different set of inputs to
     __class_getitem__ resulted in the same inputs to the generic type creation process, we can still
     return the cached type, and update the cache with the _early_cache_key as well.
     """
-    pass
+    # The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an
+    # _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key,
+    # whereas this function will always produce a tuple as the first item in the key.
+    return _union_orderings_key(typevar_values), origin, args
diff --git a/pydantic/_internal/_git.py b/pydantic/_internal/_git.py
index c2419c23d..bff0dca38 100644
--- a/pydantic/_internal/_git.py
+++ b/pydantic/_internal/_git.py
@@ -1,19 +1,27 @@
 """Git utilities, adopted from mypy's git utilities (https://github.com/python/mypy/blob/master/mypy/git.py)."""
+
 from __future__ import annotations
+
 import os
 import subprocess


-def is_git_repo(dir: str) ->bool:
+def is_git_repo(dir: str) -> bool:
     """Is the given directory version-controlled with git?"""
-    pass
+    return os.path.exists(os.path.join(dir, '.git'))


-def have_git() ->bool:
+def have_git() -> bool:
     """Can we run the git executable?"""
-    pass
+    try:
+        subprocess.check_output(['git', '--help'])
+        return True
+    except subprocess.CalledProcessError:
+        return False
+    except OSError:
+        return False


-def git_revision(dir: str) ->str:
+def git_revision(dir: str) -> str:
     """Get the SHA-1 of the HEAD of a git repository."""
-    pass
+    return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip()
diff --git a/pydantic/_internal/_internal_dataclass.py b/pydantic/_internal/_internal_dataclass.py
index 3c25d53b8..33e152cc8 100644
--- a/pydantic/_internal/_internal_dataclass.py
+++ b/pydantic/_internal/_internal_dataclass.py
@@ -1,4 +1,6 @@
 import sys
+
+# `slots` is available on Python >= 3.10
 if sys.version_info >= (3, 10):
     slots_true = {'slots': True}
 else:
diff --git a/pydantic/_internal/_known_annotated_metadata.py b/pydantic/_internal/_known_annotated_metadata.py
index 4df2ee94b..971ab6856 100644
--- a/pydantic/_internal/_known_annotated_metadata.py
+++ b/pydantic/_internal/_known_annotated_metadata.py
@@ -1,65 +1,123 @@
 from __future__ import annotations
+
 from collections import defaultdict
 from copy import copy
 from functools import lru_cache, partial
 from typing import TYPE_CHECKING, Any, Callable, Iterable
+
 from pydantic_core import CoreSchema, PydanticCustomError, to_jsonable_python
 from pydantic_core import core_schema as cs
+
 from ._fields import PydanticMetadata
+
 if TYPE_CHECKING:
     from ..annotated_handlers import GetJsonSchemaHandler
+
 STRICT = {'strict'}
 FAIL_FAST = {'fail_fast'}
 LENGTH_CONSTRAINTS = {'min_length', 'max_length'}
 INEQUALITY = {'le', 'ge', 'lt', 'gt'}
 NUMERIC_CONSTRAINTS = {'multiple_of', *INEQUALITY}
 ALLOW_INF_NAN = {'allow_inf_nan'}
-STR_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, 'strip_whitespace',
-    'to_lower', 'to_upper', 'pattern', 'coerce_numbers_to_str'}
+
+STR_CONSTRAINTS = {
+    *LENGTH_CONSTRAINTS,
+    *STRICT,
+    'strip_whitespace',
+    'to_lower',
+    'to_upper',
+    'pattern',
+    'coerce_numbers_to_str',
+}
 BYTES_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
+
 LIST_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
 TUPLE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
 SET_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST}
 DICT_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
 GENERATOR_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT}
 SEQUENCE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *FAIL_FAST}
+
 FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT}
 DECIMAL_CONSTRAINTS = {'max_digits', 'decimal_places', *FLOAT_CONSTRAINTS}
 INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT}
 BOOL_CONSTRAINTS = STRICT
 UUID_CONSTRAINTS = STRICT
+
 DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
 TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
 TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT}
 LAX_OR_STRICT_CONSTRAINTS = STRICT
 ENUM_CONSTRAINTS = STRICT
+
 UNION_CONSTRAINTS = {'union_mode'}
-URL_CONSTRAINTS = {'max_length', 'allowed_schemes', 'host_required',
-    'default_host', 'default_port', 'default_path'}
-TEXT_SCHEMA_TYPES = 'str', 'bytes', 'url', 'multi-host-url'
-SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator',
-    *TEXT_SCHEMA_TYPES)
-NUMERIC_SCHEMA_TYPES = 'float', 'int', 'date', 'time', 'timedelta', 'datetime'
+URL_CONSTRAINTS = {
+    'max_length',
+    'allowed_schemes',
+    'host_required',
+    'default_host',
+    'default_port',
+    'default_path',
+}
+
+TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url')
+SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES)
+NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime')
+
 CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set)
-constraint_schema_pairings: list[tuple[set[str], tuple[str, ...]]] = [(
-    STR_CONSTRAINTS, TEXT_SCHEMA_TYPES), (BYTES_CONSTRAINTS, ('bytes',)), (
-    LIST_CONSTRAINTS, ('list',)), (TUPLE_CONSTRAINTS, ('tuple',)), (
-    SET_CONSTRAINTS, ('set', 'frozenset')), (DICT_CONSTRAINTS, ('dict',)),
-    (GENERATOR_CONSTRAINTS, ('generator',)), (FLOAT_CONSTRAINTS, ('float',)
-    ), (INT_CONSTRAINTS, ('int',)), (DATE_TIME_CONSTRAINTS, ('date', 'time',
-    'datetime')), (TIMEDELTA_CONSTRAINTS, ('timedelta',)), (
-    TIME_CONSTRAINTS, ('time',)), (STRICT, (*TEXT_SCHEMA_TYPES, *
-    SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model')),
-    (UNION_CONSTRAINTS, ('union',)), (URL_CONSTRAINTS, ('url',
-    'multi-host-url')), (BOOL_CONSTRAINTS, ('bool',)), (UUID_CONSTRAINTS, (
-    'uuid',)), (LAX_OR_STRICT_CONSTRAINTS, ('lax-or-strict',)), (
-    ENUM_CONSTRAINTS, ('enum',)), (DECIMAL_CONSTRAINTS, ('decimal',))]
+
+constraint_schema_pairings: list[tuple[set[str], tuple[str, ...]]] = [
+    (STR_CONSTRAINTS, TEXT_SCHEMA_TYPES),
+    (BYTES_CONSTRAINTS, ('bytes',)),
+    (LIST_CONSTRAINTS, ('list',)),
+    (TUPLE_CONSTRAINTS, ('tuple',)),
+    (SET_CONSTRAINTS, ('set', 'frozenset')),
+    (DICT_CONSTRAINTS, ('dict',)),
+    (GENERATOR_CONSTRAINTS, ('generator',)),
+    (FLOAT_CONSTRAINTS, ('float',)),
+    (INT_CONSTRAINTS, ('int',)),
+    (DATE_TIME_CONSTRAINTS, ('date', 'time', 'datetime')),
+    (TIMEDELTA_CONSTRAINTS, ('timedelta',)),
+    (TIME_CONSTRAINTS, ('time',)),
+    # TODO: this is a bit redundant, we could probably avoid some of these
+    (STRICT, (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model')),
+    (UNION_CONSTRAINTS, ('union',)),
+    (URL_CONSTRAINTS, ('url', 'multi-host-url')),
+    (BOOL_CONSTRAINTS, ('bool',)),
+    (UUID_CONSTRAINTS, ('uuid',)),
+    (LAX_OR_STRICT_CONSTRAINTS, ('lax-or-strict',)),
+    (ENUM_CONSTRAINTS, ('enum',)),
+    (DECIMAL_CONSTRAINTS, ('decimal',)),
+]
+
 for constraints, schemas in constraint_schema_pairings:
     for c in constraints:
         CONSTRAINTS_TO_ALLOWED_SCHEMAS[c].update(schemas)


-def expand_grouped_metadata(annotations: Iterable[Any]) ->Iterable[Any]:
+def add_js_update_schema(s: cs.CoreSchema, f: Callable[[], dict[str, Any]]) -> None:
+    def update_js_schema(s: cs.CoreSchema, handler: GetJsonSchemaHandler) -> dict[str, Any]:
+        js_schema = handler(s)
+        js_schema.update(f())
+        return js_schema
+
+    if 'metadata' in s:
+        metadata = s['metadata']
+        if 'pydantic_js_functions' in s:
+            metadata['pydantic_js_functions'].append(update_js_schema)
+        else:
+            metadata['pydantic_js_functions'] = [update_js_schema]
+    else:
+        s['metadata'] = {'pydantic_js_functions': [update_js_schema]}
+
+
+def as_jsonable_value(v: Any) -> Any:
+    if type(v) not in (int, str, float, bytes, bool, type(None)):
+        return to_jsonable_python(v)
+    return v
+
+
+def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]:
     """Expand the annotations.

     Args:
@@ -78,11 +136,29 @@ def expand_grouped_metadata(annotations: Iterable[Any]) ->Iterable[Any]:
         #> [Ge(ge=4), MinLen(min_length=5)]
         ```
     """
-    pass
+    import annotated_types as at
+
+    from pydantic.fields import FieldInfo  # circular import
+
+    for annotation in annotations:
+        if isinstance(annotation, at.GroupedMetadata):
+            yield from annotation
+        elif isinstance(annotation, FieldInfo):
+            yield from annotation.metadata
+            # this is a bit problematic in that it results in duplicate metadata
+            # all of our "consumers" can handle it, but it is not ideal
+            # we probably should split up FieldInfo into:
+            # - annotated types metadata
+            # - individual metadata known only to Pydantic
+            annotation = copy(annotation)
+            annotation.metadata = []
+            yield annotation
+        else:
+            yield annotation


 @lru_cache
-def _get_at_to_constraint_map() ->dict[type, str]:
+def _get_at_to_constraint_map() -> dict[type, str]:
     """Return a mapping of annotated types to constraints.

     Normally, we would define a mapping like this in the module scope, but we can't do that
@@ -90,11 +166,20 @@ def _get_at_to_constraint_map() ->dict[type, str]:
     the import time of `pydantic`. We still only want to have this dictionary defined in one place,
     so we use this function to cache the result.
     """
-    pass
+    import annotated_types as at
+
+    return {
+        at.Gt: 'gt',
+        at.Ge: 'ge',
+        at.Lt: 'lt',
+        at.Le: 'le',
+        at.MultipleOf: 'multiple_of',
+        at.MinLen: 'min_length',
+        at.MaxLen: 'max_length',
+    }


-def apply_known_metadata(annotation: Any, schema: CoreSchema) ->(CoreSchema |
-    None):
+def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None:  # noqa: C901
     """Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.).
     Otherwise return `None`.

@@ -113,11 +198,104 @@ def apply_known_metadata(annotation: Any, schema: CoreSchema) ->(CoreSchema |
     Raises:
         PydanticCustomError: If `Predicate` fails.
     """
-    pass
+    import annotated_types as at
+
+    from ._validators import forbid_inf_nan_check, get_constraint_validator
+
+    schema = schema.copy()
+    schema_update, other_metadata = collect_known_metadata([annotation])
+    schema_type = schema['type']
+
+    chain_schema_constraints: set[str] = {
+        'pattern',
+        'strip_whitespace',
+        'to_lower',
+        'to_upper',
+        'coerce_numbers_to_str',
+    }
+    chain_schema_steps: list[CoreSchema] = []
+
+    for constraint, value in schema_update.items():
+        if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS:
+            raise ValueError(f'Unknown constraint {constraint}')
+        allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint]
+
+        # if it becomes necessary to handle more than one constraint
+        # in this recursive case with function-after or function-wrap, we should refactor
+        # this is a bit challenging because we sometimes want to apply constraints to the inner schema,
+        # whereas other times we want to wrap the existing schema with a new one that enforces a new constraint.
+        if schema_type in {'function-before', 'function-wrap', 'function-after'} and constraint == 'strict':
+            schema['schema'] = apply_known_metadata(annotation, schema['schema'])  # type: ignore  # schema is function-after schema
+            return schema
+
+        if schema_type in allowed_schemas:
+            if constraint == 'union_mode' and schema_type == 'union':
+                schema['mode'] = value  # type: ignore  # schema is UnionSchema
+            else:
+                schema[constraint] = value
+            continue

+        if constraint in chain_schema_constraints:
+            chain_schema_steps.append(cs.str_schema(**{constraint: value}))
+        elif constraint in {*NUMERIC_CONSTRAINTS, *LENGTH_CONSTRAINTS}:
+            if constraint in NUMERIC_CONSTRAINTS:
+                json_schema_constraint = constraint
+            elif constraint in LENGTH_CONSTRAINTS:
+                inner_schema = schema
+                while inner_schema['type'] in {'function-before', 'function-wrap', 'function-after'}:
+                    inner_schema = inner_schema['schema']  # type: ignore
+                inner_schema_type = inner_schema['type']
+                if inner_schema_type == 'list' or (
+                    inner_schema_type == 'json-or-python' and inner_schema['json_schema']['type'] == 'list'  # type: ignore
+                ):
+                    json_schema_constraint = 'minItems' if constraint == 'min_length' else 'maxItems'
+                else:
+                    json_schema_constraint = 'minLength' if constraint == 'min_length' else 'maxLength'

-def collect_known_metadata(annotations: Iterable[Any]) ->tuple[dict[str,
-    Any], list[Any]]:
+            schema = cs.no_info_after_validator_function(
+                partial(get_constraint_validator(constraint), **{constraint: value}), schema
+            )
+            add_js_update_schema(schema, lambda: {json_schema_constraint: as_jsonable_value(value)})
+        elif constraint == 'allow_inf_nan' and value is False:
+            schema = cs.no_info_after_validator_function(
+                forbid_inf_nan_check,
+                schema,
+            )
+        else:
+            raise RuntimeError(f'Unable to apply constraint {constraint} to schema {schema_type}')
+
+    for annotation in other_metadata:
+        if (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()):
+            constraint = at_to_constraint_map[annotation_type]
+            schema = cs.no_info_after_validator_function(
+                partial(get_constraint_validator(constraint), {constraint: getattr(annotation, constraint)}), schema
+            )
+            continue
+        elif isinstance(annotation, at.Predicate):
+            predicate_name = f'{annotation.func.__qualname__} ' if hasattr(annotation.func, '__qualname__') else ''
+
+            def val_func(v: Any) -> Any:
+                # annotation.func may also raise an exception, let it pass through
+                if not annotation.func(v):
+                    raise PydanticCustomError(
+                        'predicate_failed',
+                        f'Predicate {predicate_name}failed',  # type: ignore
+                    )
+                return v
+
+            schema = cs.no_info_after_validator_function(val_func, schema)
+        else:
+            # ignore any other unknown metadata
+            return None
+
+    if chain_schema_steps:
+        chain_schema_steps = [schema] + chain_schema_steps
+        return cs.chain_schema(chain_schema_steps)
+
+    return schema
+
+
+def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]:
     """Split `annotations` into known metadata and unknown annotations.

     Args:
@@ -136,11 +314,33 @@ def collect_known_metadata(annotations: Iterable[Any]) ->tuple[dict[str,
         #> ({'gt': 1, 'min_length': 42}, [Ellipsis])
         ```
     """
-    pass
+    annotations = expand_grouped_metadata(annotations)
+
+    res: dict[str, Any] = {}
+    remaining: list[Any] = []
+
+    for annotation in annotations:
+        # isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata
+        if isinstance(annotation, PydanticMetadata):
+            res.update(annotation.__dict__)
+        # we don't use dataclasses.asdict because that recursively calls asdict on the field values
+        elif (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()):
+            constraint = at_to_constraint_map[annotation_type]
+            res[constraint] = getattr(annotation, constraint)
+        elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata):
+            # also support PydanticMetadata classes being used without initialisation,
+            # e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]`
+            res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')})
+        else:
+            remaining.append(annotation)
+    # Nones can sneak in but pydantic-core will reject them
+    # it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier)
+    # but this is simple enough to kick that can down the road
+    res = {k: v for k, v in res.items() if v is not None}
+    return res, remaining


-def check_metadata(metadata: dict[str, Any], allowed: Iterable[str],
-    source_type: Any) ->None:
+def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None:
     """A small utility function to validate that the given metadata can be applied to the target.
     More than saving lines of code, this gives us a consistent error message for all of our internal implementations.

@@ -152,4 +352,8 @@ def check_metadata(metadata: dict[str, Any], allowed: Iterable[str],
     Raises:
         TypeError: If there is metadatas that can't be applied on source type.
     """
-    pass
+    unknown = metadata.keys() - set(allowed)
+    if unknown:
+        raise TypeError(
+            f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}'
+        )
diff --git a/pydantic/_internal/_mock_val_ser.py b/pydantic/_internal/_mock_val_ser.py
index ff4c61394..3e55af70d 100644
--- a/pydantic/_internal/_mock_val_ser.py
+++ b/pydantic/_internal/_mock_val_ser.py
@@ -1,14 +1,19 @@
 from __future__ import annotations
+
 from typing import TYPE_CHECKING, Any, Callable, Generic, Iterator, Mapping, TypeVar, Union
+
 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
 from typing_extensions import Literal
+
 from ..errors import PydanticErrorCodes, PydanticUserError
 from ..plugin._schema_validator import PluggableSchemaValidator
+
 if TYPE_CHECKING:
     from ..dataclasses import PydanticDataclass
     from ..main import BaseModel
-ValSer = TypeVar('ValSer', bound=Union[SchemaValidator,
-    PluggableSchemaValidator, SchemaSerializer])
+
+
+ValSer = TypeVar('ValSer', bound=Union[SchemaValidator, PluggableSchemaValidator, SchemaSerializer])
 T = TypeVar('T')


@@ -16,52 +21,94 @@ class MockCoreSchema(Mapping[str, Any]):
     """Mocker for `pydantic_core.CoreSchema` which optionally attempts to
     rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
     """
+
     __slots__ = '_error_message', '_code', '_attempt_rebuild', '_built_memo'

-    def __init__(self, error_message: str, *, code: PydanticErrorCodes,
-        attempt_rebuild: (Callable[[], CoreSchema | None] | None)=None) ->None:
+    def __init__(
+        self,
+        error_message: str,
+        *,
+        code: PydanticErrorCodes,
+        attempt_rebuild: Callable[[], CoreSchema | None] | None = None,
+    ) -> None:
         self._error_message = error_message
         self._code: PydanticErrorCodes = code
         self._attempt_rebuild = attempt_rebuild
         self._built_memo: CoreSchema | None = None

-    def __getitem__(self, key: str) ->Any:
+    def __getitem__(self, key: str) -> Any:
         return self._get_built().__getitem__(key)

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return self._get_built().__len__()

-    def __iter__(self) ->Iterator[str]:
+    def __iter__(self) -> Iterator[str]:
         return self._get_built().__iter__()

+    def _get_built(self) -> CoreSchema:
+        if self._built_memo is not None:
+            return self._built_memo
+
+        if self._attempt_rebuild:
+            schema = self._attempt_rebuild()
+            if schema is not None:
+                self._built_memo = schema
+                return schema
+        raise PydanticUserError(self._error_message, code=self._code)
+
+    def rebuild(self) -> CoreSchema | None:
+        self._built_memo = None
+        if self._attempt_rebuild:
+            val_ser = self._attempt_rebuild()
+            if val_ser is not None:
+                return val_ser
+            else:
+                raise PydanticUserError(self._error_message, code=self._code)
+        return None
+

 class MockValSer(Generic[ValSer]):
     """Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to
     rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails.
     """
+
     __slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild'

-    def __init__(self, error_message: str, *, code: PydanticErrorCodes,
-        val_or_ser: Literal['validator', 'serializer'], attempt_rebuild: (
-        Callable[[], ValSer | None] | None)=None) ->None:
+    def __init__(
+        self,
+        error_message: str,
+        *,
+        code: PydanticErrorCodes,
+        val_or_ser: Literal['validator', 'serializer'],
+        attempt_rebuild: Callable[[], ValSer | None] | None = None,
+    ) -> None:
         self._error_message = error_message
-        self._val_or_ser = (SchemaValidator if val_or_ser == 'validator' else
-            SchemaSerializer)
+        self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer
         self._code: PydanticErrorCodes = code
         self._attempt_rebuild = attempt_rebuild

-    def __getattr__(self, item: str) ->None:
+    def __getattr__(self, item: str) -> None:
         __tracebackhide__ = True
         if self._attempt_rebuild:
             val_ser = self._attempt_rebuild()
             if val_ser is not None:
                 return getattr(val_ser, item)
+
+        # raise an AttributeError if `item` doesn't exist
         getattr(self._val_or_ser, item)
         raise PydanticUserError(self._error_message, code=self._code)

+    def rebuild(self) -> ValSer | None:
+        if self._attempt_rebuild:
+            val_ser = self._attempt_rebuild()
+            if val_ser is not None:
+                return val_ser
+            else:
+                raise PydanticUserError(self._error_message, code=self._code)
+        return None
+

-def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name:
-    str='all referenced types') ->None:
+def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name: str = 'all referenced types') -> None:
     """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a model.

     Args:
@@ -69,11 +116,42 @@ def set_model_mocks(cls: type[BaseModel], cls_name: str, undefined_name:
         cls_name: Name of the model class, used in error messages
         undefined_name: Name of the undefined thing, used in error messages
     """
-    pass
-
-
-def set_dataclass_mocks(cls: type[PydanticDataclass], cls_name: str,
-    undefined_name: str='all referenced types') ->None:
+    undefined_type_error_message = (
+        f'`{cls_name}` is not fully defined; you should define {undefined_name},'
+        f' then call `{cls_name}.model_rebuild()`.'
+    )
+
+    def attempt_rebuild_fn(attr_fn: Callable[[type[BaseModel]], T]) -> Callable[[], T | None]:
+        def handler() -> T | None:
+            if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False:
+                return attr_fn(cls)
+            else:
+                return None
+
+        return handler
+
+    cls.__pydantic_core_schema__ = MockCoreSchema(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__),
+    )
+    cls.__pydantic_validator__ = MockValSer(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        val_or_ser='validator',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__),
+    )
+    cls.__pydantic_serializer__ = MockValSer(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        val_or_ser='serializer',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__),
+    )
+
+
+def set_dataclass_mocks(
+    cls: type[PydanticDataclass], cls_name: str, undefined_name: str = 'all referenced types'
+) -> None:
     """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass.

     Args:
@@ -81,4 +159,36 @@ def set_dataclass_mocks(cls: type[PydanticDataclass], cls_name: str,
         cls_name: Name of the model class, used in error messages
         undefined_name: Name of the undefined thing, used in error messages
     """
-    pass
+    from ..dataclasses import rebuild_dataclass
+
+    undefined_type_error_message = (
+        f'`{cls_name}` is not fully defined; you should define {undefined_name},'
+        f' then call `pydantic.dataclasses.rebuild_dataclass({cls_name})`.'
+    )
+
+    def attempt_rebuild_fn(attr_fn: Callable[[type[PydanticDataclass]], T]) -> Callable[[], T | None]:
+        def handler() -> T | None:
+            if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False:
+                return attr_fn(cls)
+            else:
+                return None
+
+        return handler
+
+    cls.__pydantic_core_schema__ = MockCoreSchema(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__),
+    )
+    cls.__pydantic_validator__ = MockValSer(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        val_or_ser='validator',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__),
+    )
+    cls.__pydantic_serializer__ = MockValSer(  # type: ignore[assignment]
+        undefined_type_error_message,
+        code='class-not-fully-defined',
+        val_or_ser='validator',
+        attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__),
+    )
diff --git a/pydantic/_internal/_model_construction.py b/pydantic/_internal/_model_construction.py
index f8876857b..3212476cf 100644
--- a/pydantic/_internal/_model_construction.py
+++ b/pydantic/_internal/_model_construction.py
@@ -1,5 +1,7 @@
 """Private logic for creating models."""
+
 from __future__ import annotations as _annotations
+
 import builtins
 import operator
 import typing
@@ -9,9 +11,11 @@ from abc import ABCMeta
 from functools import partial
 from types import FunctionType
 from typing import Any, Callable, Generic, NoReturn
+
 import typing_extensions
 from pydantic_core import PydanticUndefined, SchemaSerializer
 from typing_extensions import dataclass_transform, deprecated
+
 from ..errors import PydanticUndefinedAnnotation, PydanticUserError
 from ..plugin._schema_validator import create_schema_validator
 from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20
@@ -26,15 +30,19 @@ from ._signature import generate_pydantic_signature
 from ._typing_extra import get_cls_types_namespace, is_annotated, is_classvar, parent_frame_namespace
 from ._utils import ClassAttribute, SafeGetItemProxy
 from ._validate_call import ValidateCallWrapper
+
 if typing.TYPE_CHECKING:
     from ..fields import Field as PydanticModelField
     from ..fields import FieldInfo, ModelPrivateAttr
     from ..fields import PrivateAttr as PydanticModelPrivateAttr
     from ..main import BaseModel
 else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
     PydanticModelField = object()
     PydanticModelPrivateAttr = object()
+
 object_setattr = object.__setattr__


@@ -43,25 +51,26 @@ class _ModelNamespaceDict(dict):
     warns about overriding of decorators.
     """

-    def __setitem__(self, k: str, v: object) ->None:
+    def __setitem__(self, k: str, v: object) -> None:
         existing: Any = self.get(k, None)
-        if existing and v is not existing and isinstance(existing,
-            PydanticDescriptorProxy):
-            warnings.warn(
-                f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator'
-                )
+        if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy):
+            warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator')
+
         return super().__setitem__(k, v)


-@dataclass_transform(kw_only_default=True, field_specifiers=(
-    PydanticModelField, PydanticModelPrivateAttr))
+@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField, PydanticModelPrivateAttr))
 class ModelMetaclass(ABCMeta):
-
-    def __new__(mcs, cls_name: str, bases: tuple[type[Any], ...], namespace:
-        dict[str, Any], __pydantic_generic_metadata__: (
-        PydanticGenericMetadata | None)=None,
-        __pydantic_reset_parent_namespace__: bool=True,
-        _create_model_module: (str | None)=None, **kwargs: Any) ->type:
+    def __new__(
+        mcs,
+        cls_name: str,
+        bases: tuple[type[Any], ...],
+        namespace: dict[str, Any],
+        __pydantic_generic_metadata__: PydanticGenericMetadata | None = None,
+        __pydantic_reset_parent_namespace__: bool = True,
+        _create_model_module: str | None = None,
+        **kwargs: Any,
+    ) -> type:
         """Metaclass for creating Pydantic models.

         Args:
@@ -76,111 +85,151 @@ class ModelMetaclass(ABCMeta):
         Returns:
             The new class created by the metaclass.
         """
+        # Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact
+        # that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__`
+        # call we're in the middle of is for the `BaseModel` class.
         if bases:
-            base_field_names, class_vars, base_private_attributes = (mcs.
-                _collect_bases_data(bases))
+            base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases)
+
             config_wrapper = ConfigWrapper.for_model(bases, namespace, kwargs)
             namespace['model_config'] = config_wrapper.config_dict
-            private_attributes = inspect_namespace(namespace,
-                config_wrapper.ignored_types, class_vars, base_field_names)
+            private_attributes = inspect_namespace(
+                namespace, config_wrapper.ignored_types, class_vars, base_field_names
+            )
             if private_attributes or base_private_attributes:
-                original_model_post_init = get_model_post_init(namespace, bases
-                    )
+                original_model_post_init = get_model_post_init(namespace, bases)
                 if original_model_post_init is not None:
+                    # if there are private_attributes and a model_post_init function, we handle both

-                    def wrapped_model_post_init(self: BaseModel, context:
-                        Any, /) ->None:
+                    def wrapped_model_post_init(self: BaseModel, context: Any, /) -> None:
                         """We need to both initialize private attributes and call the user-defined model_post_init
                         method.
                         """
                         init_private_attributes(self, context)
                         original_model_post_init(self, context)
+
                     namespace['model_post_init'] = wrapped_model_post_init
                 else:
                     namespace['model_post_init'] = init_private_attributes
+
             namespace['__class_vars__'] = class_vars
-            namespace['__private_attributes__'] = {**
-                base_private_attributes, **private_attributes}
-            cls: type[BaseModel] = super().__new__(mcs, cls_name, bases,
-                namespace, **kwargs)
+            namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes}
+
+            cls: type[BaseModel] = super().__new__(mcs, cls_name, bases, namespace, **kwargs)  # type: ignore
+
             from ..main import BaseModel
+
             mro = cls.__mro__
             if Generic in mro and mro.index(Generic) < mro.index(BaseModel):
-                warnings.warn(GenericBeforeBaseModelWarning(
-                    'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) for pydantic generics to work properly.'
-                    ), stacklevel=2)
-            cls.__pydantic_custom_init__ = not getattr(cls.__init__,
-                '__pydantic_base_init__', False)
-            cls.__pydantic_post_init__ = (None if cls.model_post_init is
-                BaseModel.model_post_init else 'model_post_init')
+                warnings.warn(
+                    GenericBeforeBaseModelWarning(
+                        'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) '
+                        'for pydantic generics to work properly.'
+                    ),
+                    stacklevel=2,
+                )
+
+            cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False)
+            cls.__pydantic_post_init__ = None if cls.model_post_init is BaseModel.model_post_init else 'model_post_init'
+
             cls.__pydantic_decorators__ = DecoratorInfos.build(cls)
+
+            # Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class
             if __pydantic_generic_metadata__:
-                cls.__pydantic_generic_metadata__ = (
-                    __pydantic_generic_metadata__)
+                cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__
             else:
-                parent_parameters = getattr(cls,
-                    '__pydantic_generic_metadata__', {}).get('parameters', ())
-                parameters = getattr(cls, '__parameters__', None
-                    ) or parent_parameters
-                if parameters and parent_parameters and not all(x in
-                    parameters for x in parent_parameters):
+                parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ())
+                parameters = getattr(cls, '__parameters__', None) or parent_parameters
+                if parameters and parent_parameters and not all(x in parameters for x in parent_parameters):
                     from ..root_model import RootModelRootType
-                    missing_parameters = tuple(x for x in parameters if x
-                         not in parent_parameters)
-                    if (RootModelRootType in parent_parameters and 
-                        RootModelRootType not in parameters):
-                        parameters_str = ', '.join([x.__name__ for x in
-                            missing_parameters])
+
+                    missing_parameters = tuple(x for x in parameters if x not in parent_parameters)
+                    if RootModelRootType in parent_parameters and RootModelRootType not in parameters:
+                        # This is a special case where the user has subclassed `RootModel`, but has not parametrized
+                        # RootModel with the generic type identifiers being used. Ex:
+                        # class MyModel(RootModel, Generic[T]):
+                        #    root: T
+                        # Should instead just be:
+                        # class MyModel(RootModel[T]):
+                        #   root: T
+                        parameters_str = ', '.join([x.__name__ for x in missing_parameters])
                         error_message = (
-                            f'{cls.__name__} is a subclass of `RootModel`, but does not include the generic type identifier(s) {parameters_str} in its parameters. You should parametrize RootModel directly, e.g., `class {cls.__name__}(RootModel[{parameters_str}]): ...`.'
-                            )
+                            f'{cls.__name__} is a subclass of `RootModel`, but does not include the generic type identifier(s) '
+                            f'{parameters_str} in its parameters. '
+                            f'You should parametrize RootModel directly, e.g., `class {cls.__name__}(RootModel[{parameters_str}]): ...`.'
+                        )
                     else:
-                        combined_parameters = (parent_parameters +
-                            missing_parameters)
-                        parameters_str = ', '.join([str(x) for x in
-                            combined_parameters])
-                        generic_type_label = (
-                            f'typing.Generic[{parameters_str}]')
+                        combined_parameters = parent_parameters + missing_parameters
+                        parameters_str = ', '.join([str(x) for x in combined_parameters])
+                        generic_type_label = f'typing.Generic[{parameters_str}]'
                         error_message = (
-                            f'All parameters must be present on typing.Generic; you should inherit from {generic_type_label}.'
-                            )
-                        if Generic not in bases:
-                            bases_str = ', '.join([x.__name__ for x in
-                                bases] + [generic_type_label])
+                            f'All parameters must be present on typing.Generic;'
+                            f' you should inherit from {generic_type_label}.'
+                        )
+                        if Generic not in bases:  # pragma: no cover
+                            # We raise an error here not because it is desirable, but because some cases are mishandled.
+                            # It would be nice to remove this error and still have things behave as expected, it's just
+                            # challenging because we are using a custom `__class_getitem__` to parametrize generic models,
+                            # and not returning a typing._GenericAlias from it.
+                            bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label])
                             error_message += (
                                 f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)'
-                                )
+                            )
                     raise TypeError(error_message)
-                cls.__pydantic_generic_metadata__ = {'origin': None, 'args':
-                    (), 'parameters': parameters}
-            cls.__pydantic_complete__ = False
+
+                cls.__pydantic_generic_metadata__ = {
+                    'origin': None,
+                    'args': (),
+                    'parameters': parameters,
+                }
+
+            cls.__pydantic_complete__ = False  # Ensure this specific class gets completed
+
+            # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
+            # for attributes not in `new_namespace` (e.g. private attributes)
             for name, obj in private_attributes.items():
                 obj.__set_name__(cls, name)
+
             if __pydantic_reset_parent_namespace__:
-                cls.__pydantic_parent_namespace__ = (
-                    build_lenient_weakvaluedict(parent_frame_namespace()))
-            parent_namespace = getattr(cls, '__pydantic_parent_namespace__',
-                None)
+                cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace())
+            parent_namespace = getattr(cls, '__pydantic_parent_namespace__', None)
             if isinstance(parent_namespace, dict):
-                parent_namespace = unpack_lenient_weakvaluedict(
-                    parent_namespace)
+                parent_namespace = unpack_lenient_weakvaluedict(parent_namespace)
+
             types_namespace = get_cls_types_namespace(cls, parent_namespace)
             set_model_fields(cls, bases, config_wrapper, types_namespace)
+
             if config_wrapper.frozen and '__hash__' not in namespace:
                 set_default_hash_func(cls, bases)
-            complete_model_class(cls, cls_name, config_wrapper,
-                raise_errors=False, types_namespace=types_namespace,
-                create_model_module=_create_model_module)
-            cls.model_computed_fields = {k: v.info for k, v in cls.
-                __pydantic_decorators__.computed_fields.items()}
+
+            complete_model_class(
+                cls,
+                cls_name,
+                config_wrapper,
+                raise_errors=False,
+                types_namespace=types_namespace,
+                create_model_module=_create_model_module,
+            )
+
+            # If this is placed before the complete_model_class call above,
+            # the generic computed fields return type is set to PydanticUndefined
+            cls.model_computed_fields = {k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()}
+
             set_deprecated_descriptors(cls)
-            super(cls, cls).__pydantic_init_subclass__(**kwargs)
+
+            # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__
+            # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is
+            # only hit for _proper_ subclasses of BaseModel
+            super(cls, cls).__pydantic_init_subclass__(**kwargs)  # type: ignore[misc]
             return cls
         else:
+            # this is the BaseModel class itself being created, no logic required
             return super().__new__(mcs, cls_name, bases, namespace, **kwargs)
-    if not typing.TYPE_CHECKING:

-        def __getattr__(self, item: str) ->Any:
+    if not typing.TYPE_CHECKING:  # pragma: no branch
+        # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
+
+        def __getattr__(self, item: str) -> Any:
             """This is necessary to keep attribute access working for class attribute access."""
             private_attributes = self.__dict__.get('__private_attributes__')
             if private_attributes and item in private_attributes:
@@ -188,35 +237,47 @@ class ModelMetaclass(ABCMeta):
             raise AttributeError(item)

     @classmethod
-    def __prepare__(cls, *args: Any, **kwargs: Any) ->dict[str, object]:
+    def __prepare__(cls, *args: Any, **kwargs: Any) -> dict[str, object]:
         return _ModelNamespaceDict()

-    def __instancecheck__(self, instance: Any) ->bool:
+    def __instancecheck__(self, instance: Any) -> bool:
         """Avoid calling ABC _abc_subclasscheck unless we're pretty sure.

         See #3829 and python/cpython#92810
         """
-        return hasattr(instance, '__pydantic_validator__') and super(
-            ).__instancecheck__(instance)
+        return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance)
+
+    @staticmethod
+    def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]:
+        from ..main import BaseModel
+
+        field_names: set[str] = set()
+        class_vars: set[str] = set()
+        private_attributes: dict[str, ModelPrivateAttr] = {}
+        for base in bases:
+            if issubclass(base, BaseModel) and base is not BaseModel:
+                # model_fields might not be defined yet in the case of generics, so we use getattr here:
+                field_names.update(getattr(base, 'model_fields', {}).keys())
+                class_vars.update(base.__class_vars__)
+                private_attributes.update(base.__private_attributes__)
+        return field_names, class_vars, private_attributes

     @property
-    @deprecated(
-        'The `__fields__` attribute is deprecated, use `model_fields` instead.'
-        , category=None)
-    def __fields__(self) ->dict[str, FieldInfo]:
+    @deprecated('The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None)
+    def __fields__(self) -> dict[str, FieldInfo]:
         warnings.warn(
-            'The `__fields__` attribute is deprecated, use `model_fields` instead.'
-            , PydanticDeprecatedSince20)
-        return self.model_fields
+            'The `__fields__` attribute is deprecated, use `model_fields` instead.', PydanticDeprecatedSince20
+        )
+        return self.model_fields  # type: ignore

-    def __dir__(self) ->list[str]:
+    def __dir__(self) -> list[str]:
         attributes = list(super().__dir__())
         if '__fields__' in attributes:
             attributes.remove('__fields__')
         return attributes


-def init_private_attributes(self: BaseModel, context: Any, /) ->None:
+def init_private_attributes(self: BaseModel, context: Any, /) -> None:
     """This function is meant to behave like a BaseModel method to initialise private attributes.

     It takes context as an argument since that's what pydantic-core passes when calling it.
@@ -225,18 +286,33 @@ def init_private_attributes(self: BaseModel, context: Any, /) ->None:
         self: The BaseModel instance.
         context: The context.
     """
-    pass
+    if getattr(self, '__pydantic_private__', None) is None:
+        pydantic_private = {}
+        for name, private_attr in self.__private_attributes__.items():
+            default = private_attr.get_default()
+            if default is not PydanticUndefined:
+                pydantic_private[name] = default
+        object_setattr(self, '__pydantic_private__', pydantic_private)


-def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]
-    ) ->(Callable[..., Any] | None):
+def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None:
     """Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined."""
-    pass
+    if 'model_post_init' in namespace:
+        return namespace['model_post_init']

+    from ..main import BaseModel
+
+    model_post_init = get_attribute_from_bases(bases, 'model_post_init')
+    if model_post_init is not BaseModel.model_post_init:
+        return model_post_init

-def inspect_namespace(namespace: dict[str, Any], ignored_types: tuple[type[
-    Any], ...], base_class_vars: set[str], base_class_fields: set[str]) ->dict[
-    str, ModelPrivateAttr]:
+
+def inspect_namespace(  # noqa C901
+    namespace: dict[str, Any],
+    ignored_types: tuple[type[Any], ...],
+    base_class_vars: set[str],
+    base_class_fields: set[str],
+) -> dict[str, ModelPrivateAttr]:
     """Iterate over the namespace and:
     * gather private attributes
     * check for items which look like fields but are not (e.g. have no annotation) and warn.
@@ -257,11 +333,130 @@ def inspect_namespace(namespace: dict[str, Any], ignored_types: tuple[type[
             - If a field does not have a type annotation.
             - If a field on base class was overridden by a non-annotated attribute.
     """
-    pass
-
+    from ..fields import FieldInfo, ModelPrivateAttr, PrivateAttr
+
+    all_ignored_types = ignored_types + default_ignored_types()
+
+    private_attributes: dict[str, ModelPrivateAttr] = {}
+    raw_annotations = namespace.get('__annotations__', {})
+
+    if '__root__' in raw_annotations or '__root__' in namespace:
+        raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'")
+
+    ignored_names: set[str] = set()
+    for var_name, value in list(namespace.items()):
+        if var_name == 'model_config' or var_name == '__pydantic_extra__':
+            continue
+        elif (
+            isinstance(value, type)
+            and value.__module__ == namespace['__module__']
+            and '__qualname__' in namespace
+            and value.__qualname__.startswith(namespace['__qualname__'])
+        ):
+            # `value` is a nested type defined in this namespace; don't error
+            continue
+        elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools':
+            ignored_names.add(var_name)
+            continue
+        elif isinstance(value, ModelPrivateAttr):
+            if var_name.startswith('__'):
+                raise NameError(
+                    'Private attributes must not use dunder names;'
+                    f' use a single underscore prefix instead of {var_name!r}.'
+                )
+            elif is_valid_field_name(var_name):
+                raise NameError(
+                    'Private attributes must not use valid field names;'
+                    f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.'
+                )
+            private_attributes[var_name] = value
+            del namespace[var_name]
+        elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name):
+            suggested_name = var_name.lstrip('_') or 'my_field'  # don't suggest '' for all-underscore name
+            raise NameError(
+                f'Fields must not use names with leading underscores;'
+                f' e.g., use {suggested_name!r} instead of {var_name!r}.'
+            )
+
+        elif var_name.startswith('__'):
+            continue
+        elif is_valid_privateattr_name(var_name):
+            if var_name not in raw_annotations or not is_classvar(raw_annotations[var_name]):
+                private_attributes[var_name] = PrivateAttr(default=value)
+                del namespace[var_name]
+        elif var_name in base_class_vars:
+            continue
+        elif var_name not in raw_annotations:
+            if var_name in base_class_fields:
+                raise PydanticUserError(
+                    f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. '
+                    f'All field definitions, including overrides, require a type annotation.',
+                    code='model-field-overridden',
+                )
+            elif isinstance(value, FieldInfo):
+                raise PydanticUserError(
+                    f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation'
+                )
+            else:
+                raise PydanticUserError(
+                    f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a '
+                    f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this '
+                    f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.",
+                    code='model-field-missing-annotation',
+                )

-def set_model_fields(cls: type[BaseModel], bases: tuple[type[Any], ...],
-    config_wrapper: ConfigWrapper, types_namespace: dict[str, Any]) ->None:
+    for ann_name, ann_type in raw_annotations.items():
+        if (
+            is_valid_privateattr_name(ann_name)
+            and ann_name not in private_attributes
+            and ann_name not in ignored_names
+            and not is_classvar(ann_type)
+            and ann_type not in all_ignored_types
+            and getattr(ann_type, '__module__', None) != 'functools'
+        ):
+            if is_annotated(ann_type):
+                _, *metadata = typing_extensions.get_args(ann_type)
+                private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None)
+                if private_attr is not None:
+                    private_attributes[ann_name] = private_attr
+                    continue
+            private_attributes[ann_name] = PrivateAttr()
+
+    return private_attributes
+
+
+def set_default_hash_func(cls: type[BaseModel], bases: tuple[type[Any], ...]) -> None:
+    base_hash_func = get_attribute_from_bases(bases, '__hash__')
+    new_hash_func = make_hash_func(cls)
+    if base_hash_func in {None, object.__hash__} or getattr(base_hash_func, '__code__', None) == new_hash_func.__code__:
+        # If `__hash__` is some default, we generate a hash function.
+        # It will be `None` if not overridden from BaseModel.
+        # It may be `object.__hash__` if there is another
+        # parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`).
+        # It may be a value set by `set_default_hash_func` if `cls` is a subclass of another frozen model.
+        # In the last case we still need a new hash function to account for new `model_fields`.
+        cls.__hash__ = new_hash_func
+
+
+def make_hash_func(cls: type[BaseModel]) -> Any:
+    getter = operator.itemgetter(*cls.model_fields.keys()) if cls.model_fields else lambda _: 0
+
+    def hash_func(self: Any) -> int:
+        try:
+            return hash(getter(self.__dict__))
+        except KeyError:
+            # In rare cases (such as when using the deprecated copy method), the __dict__ may not contain
+            # all model fields, which is how we can get here.
+            # getter(self.__dict__) is much faster than any 'safe' method that accounts for missing keys,
+            # and wrapping it in a `try` doesn't slow things down much in the common case.
+            return hash(getter(SafeGetItemProxy(self.__dict__)))
+
+    return hash_func
+
+
+def set_model_fields(
+    cls: type[BaseModel], bases: tuple[type[Any], ...], config_wrapper: ConfigWrapper, types_namespace: dict[str, Any]
+) -> None:
     """Collect and set `cls.model_fields` and `cls.__class_vars__`.

     Args:
@@ -270,13 +465,34 @@ def set_model_fields(cls: type[BaseModel], bases: tuple[type[Any], ...],
         config_wrapper: The config wrapper instance.
         types_namespace: Optional extra namespace to look for types in.
     """
-    pass
-
-
-def complete_model_class(cls: type[BaseModel], cls_name: str,
-    config_wrapper: ConfigWrapper, *, raise_errors: bool=True,
-    types_namespace: (dict[str, Any] | None), create_model_module: (str |
-    None)=None) ->bool:
+    typevars_map = get_model_typevars_map(cls)
+    fields, class_vars = collect_model_fields(cls, bases, config_wrapper, types_namespace, typevars_map=typevars_map)
+
+    cls.model_fields = fields
+    cls.__class_vars__.update(class_vars)
+
+    for k in class_vars:
+        # Class vars should not be private attributes
+        #     We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars,
+        #     but private attributes are determined by inspecting the namespace _prior_ to class creation.
+        #     In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using
+        #     `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it
+        #     evaluated to a classvar
+
+        value = cls.__private_attributes__.pop(k, None)
+        if value is not None and value.default is not PydanticUndefined:
+            setattr(cls, k, value.default)
+
+
+def complete_model_class(
+    cls: type[BaseModel],
+    cls_name: str,
+    config_wrapper: ConfigWrapper,
+    *,
+    raise_errors: bool = True,
+    types_namespace: dict[str, Any] | None,
+    create_model_module: str | None = None,
+) -> bool:
     """Finish building a model class.

     This logic must be called after class has been created since validation functions must be bound
@@ -297,12 +513,79 @@ def complete_model_class(cls: type[BaseModel], cls_name: str,
         PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
             and `raise_errors=True`.
     """
-    pass
-
-
-def set_deprecated_descriptors(cls: type[BaseModel]) ->None:
+    typevars_map = get_model_typevars_map(cls)
+    gen_schema = GenerateSchema(
+        config_wrapper,
+        types_namespace,
+        typevars_map,
+    )
+
+    handler = CallbackGetCoreSchemaHandler(
+        partial(gen_schema.generate_schema, from_dunder_get_core_schema=False),
+        gen_schema,
+        ref_mode='unpack',
+    )
+
+    if config_wrapper.defer_build and 'model' in config_wrapper.experimental_defer_build_mode:
+        set_model_mocks(cls, cls_name)
+        return False
+
+    try:
+        schema = cls.__get_pydantic_core_schema__(cls, handler)
+    except PydanticUndefinedAnnotation as e:
+        if raise_errors:
+            raise
+        set_model_mocks(cls, cls_name, f'`{e.name}`')
+        return False
+
+    core_config = config_wrapper.core_config(cls)
+
+    try:
+        schema = gen_schema.clean_schema(schema)
+    except gen_schema.CollectedInvalid:
+        set_model_mocks(cls, cls_name)
+        return False
+
+    # debug(schema)
+    cls.__pydantic_core_schema__ = schema
+
+    cls.__pydantic_validator__ = create_schema_validator(
+        schema,
+        cls,
+        create_model_module or cls.__module__,
+        cls.__qualname__,
+        'create_model' if create_model_module else 'BaseModel',
+        core_config,
+        config_wrapper.plugin_settings,
+    )
+    cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config)
+    cls.__pydantic_complete__ = True
+
+    # set __signature__ attr only for model class, but not for its instances
+    cls.__signature__ = ClassAttribute(
+        '__signature__',
+        generate_pydantic_signature(init=cls.__init__, fields=cls.model_fields, config_wrapper=config_wrapper),
+    )
+    return True
+
+
+def set_deprecated_descriptors(cls: type[BaseModel]) -> None:
     """Set data descriptors on the class for deprecated fields."""
-    pass
+    for field, field_info in cls.model_fields.items():
+        if (msg := field_info.deprecation_message) is not None:
+            desc = _DeprecatedFieldDescriptor(msg)
+            desc.__set_name__(cls, field)
+            setattr(cls, field, desc)
+
+    for field, computed_field_info in cls.model_computed_fields.items():
+        if (
+            (msg := computed_field_info.deprecation_message) is not None
+            # Avoid having two warnings emitted:
+            and not hasattr(unwrap_wrapped_function(computed_field_info.wrapped_property), '__deprecated__')
+        ):
+            desc = _DeprecatedFieldDescriptor(msg, computed_field_info.wrapped_property)
+            desc.__set_name__(cls, field)
+            setattr(cls, field, desc)


 class _DeprecatedFieldDescriptor:
@@ -313,26 +596,30 @@ class _DeprecatedFieldDescriptor:
         wrapped_property: The property instance if the deprecated field is a computed field, or `None`.
         field_name: The name of the field being deprecated.
     """
+
     field_name: str

-    def __init__(self, msg: str, wrapped_property: (property | None)=None
-        ) ->None:
+    def __init__(self, msg: str, wrapped_property: property | None = None) -> None:
         self.msg = msg
         self.wrapped_property = wrapped_property

-    def __set_name__(self, cls: type[BaseModel], name: str) ->None:
+    def __set_name__(self, cls: type[BaseModel], name: str) -> None:
         self.field_name = name

-    def __get__(self, obj: (BaseModel | None), obj_type: (type[BaseModel] |
-        None)=None) ->Any:
+    def __get__(self, obj: BaseModel | None, obj_type: type[BaseModel] | None = None) -> Any:
         if obj is None:
             raise AttributeError(self.field_name)
+
         warnings.warn(self.msg, builtins.DeprecationWarning, stacklevel=2)
+
         if self.wrapped_property is not None:
             return self.wrapped_property.__get__(obj, obj_type)
         return obj.__dict__[self.field_name]

-    def __set__(self, obj: Any, value: Any) ->NoReturn:
+    # Defined to take precedence over the instance's dictionary
+    # Note that it will not be called when setting a value on a model instance
+    # as `BaseModel.__setattr__` is defined and takes priority.
+    def __set__(self, obj: Any, value: Any) -> NoReturn:
         raise AttributeError(self.field_name)


@@ -355,23 +642,23 @@ class _PydanticWeakRef:

     def __init__(self, obj: Any):
         if obj is None:
+            # The object will be `None` upon deserialization if the serialized weakref
+            # had lost its underlying object.
             self._wr = None
         else:
             self._wr = weakref.ref(obj)

-    def __call__(self) ->Any:
+    def __call__(self) -> Any:
         if self._wr is None:
             return None
         else:
             return self._wr()

-    def __reduce__(self) ->tuple[Callable, tuple[weakref.ReferenceType | None]
-        ]:
+    def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]:
         return _PydanticWeakRef, (self(),)


-def build_lenient_weakvaluedict(d: (dict[str, Any] | None)) ->(dict[str,
-    Any] | None):
+def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
     """Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs.

     We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values
@@ -379,10 +666,43 @@ def build_lenient_weakvaluedict(d: (dict[str, Any] | None)) ->(dict[str,

     The `unpack_lenient_weakvaluedict` function can be used to reverse this operation.
     """
-    pass
-
-
-def unpack_lenient_weakvaluedict(d: (dict[str, Any] | None)) ->(dict[str,
-    Any] | None):
+    if d is None:
+        return None
+    result = {}
+    for k, v in d.items():
+        try:
+            proxy = _PydanticWeakRef(v)
+        except TypeError:
+            proxy = v
+        result[k] = proxy
+    return result
+
+
+def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None:
     """Inverts the transform performed by `build_lenient_weakvaluedict`."""
-    pass
+    if d is None:
+        return None
+
+    result = {}
+    for k, v in d.items():
+        if isinstance(v, _PydanticWeakRef):
+            v = v()
+            if v is not None:
+                result[k] = v
+        else:
+            result[k] = v
+    return result
+
+
+def default_ignored_types() -> tuple[type[Any], ...]:
+    from ..fields import ComputedFieldInfo
+
+    return (
+        FunctionType,
+        property,
+        classmethod,
+        staticmethod,
+        PydanticDescriptorProxy,
+        ComputedFieldInfo,
+        ValidateCallWrapper,
+    )
diff --git a/pydantic/_internal/_repr.py b/pydantic/_internal/_repr.py
index ad15026c1..83cb4cc7d 100644
--- a/pydantic/_internal/_repr.py
+++ b/pydantic/_internal/_repr.py
@@ -1,16 +1,20 @@
 """Tools to provide pretty/human-readable display of objects."""
+
 from __future__ import annotations as _annotations
+
 import types
 import typing
 from typing import Any
+
 import typing_extensions
+
 from . import _typing_extra
+
 if typing.TYPE_CHECKING:
-    ReprArgs: typing_extensions.TypeAlias = (
-        'typing.Iterable[tuple[str | None, Any]]')
+    ReprArgs: typing_extensions.TypeAlias = 'typing.Iterable[tuple[str | None, Any]]'
     RichReprResult: typing_extensions.TypeAlias = (
         'typing.Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]]'
-        )
+    )


 class PlainRepr(str):
@@ -18,14 +22,20 @@ class PlainRepr(str):
     representation of something that is valid (or pseudo-valid) python.
     """

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return str(self)


 class Representation:
-    __slots__ = tuple()
+    # Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods.
+    # `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/).
+    # `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html).
+    # (this is not a docstring to avoid adding a docstring to classes which inherit from Representation)
+
+    # we don't want to use a type annotation here as it can break get_type_hints
+    __slots__ = tuple()  # type: typing.Collection[str]

-    def __repr_args__(self) ->ReprArgs:
+    def __repr_args__(self) -> ReprArgs:
         """Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.

         Can either return:
@@ -38,16 +48,14 @@ class Representation:
         attrs = ((s, getattr(self, s)) for s in attrs_names)
         return [(a, v) for a, v in attrs if v is not None]

-    def __repr_name__(self) ->str:
+    def __repr_name__(self) -> str:
         """Name of the instance's class, used in __repr__."""
         return self.__class__.__name__

-    def __repr_str__(self, join_str: str) ->str:
-        return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a,
-            v in self.__repr_args__())
+    def __repr_str__(self, join_str: str) -> str:
+        return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())

-    def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any
-        ) ->typing.Generator[Any, None, None]:
+    def __pretty__(self, fmt: typing.Callable[[Any], Any], **kwargs: Any) -> typing.Generator[Any, None, None]:
         """Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects."""
         yield self.__repr_name__() + '('
         yield 1
@@ -60,7 +68,7 @@ class Representation:
         yield -1
         yield ')'

-    def __rich_repr__(self) ->RichReprResult:
+    def __rich_repr__(self) -> RichReprResult:
         """Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects."""
         for name, field_repr in self.__repr_args__():
             if name is None:
@@ -68,16 +76,43 @@ class Representation:
             else:
                 yield name, field_repr

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.__repr_str__(' ')

-    def __repr__(self) ->str:
-        return f"{self.__repr_name__()}({self.__repr_str__(', ')})"
+    def __repr__(self) -> str:
+        return f'{self.__repr_name__()}({self.__repr_str__(", ")})'


-def display_as_type(obj: Any) ->str:
+def display_as_type(obj: Any) -> str:
     """Pretty representation of a type, should be as close as possible to the original type definition string.

     Takes some logic from `typing._type_repr`.
     """
-    pass
+    if isinstance(obj, types.FunctionType):
+        return obj.__name__
+    elif obj is ...:
+        return '...'
+    elif isinstance(obj, Representation):
+        return repr(obj)
+    elif isinstance(obj, typing_extensions.TypeAliasType):
+        return str(obj)
+
+    if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)):
+        obj = obj.__class__
+
+    if _typing_extra.origin_is_union(typing_extensions.get_origin(obj)):
+        args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
+        return f'Union[{args}]'
+    elif isinstance(obj, _typing_extra.WithArgsTypes):
+        if typing_extensions.get_origin(obj) == typing_extensions.Literal:
+            args = ', '.join(map(repr, typing_extensions.get_args(obj)))
+        else:
+            args = ', '.join(map(display_as_type, typing_extensions.get_args(obj)))
+        try:
+            return f'{obj.__qualname__}[{args}]'
+        except AttributeError:
+            return str(obj)  # handles TypeAliasType in 3.12
+    elif isinstance(obj, type):
+        return obj.__qualname__
+    else:
+        return repr(obj).replace('typing.', '').replace('typing_extensions.', '')
diff --git a/pydantic/_internal/_schema_generation_shared.py b/pydantic/_internal/_schema_generation_shared.py
index 34203092c..f35c665d2 100644
--- a/pydantic/_internal/_schema_generation_shared.py
+++ b/pydantic/_internal/_schema_generation_shared.py
@@ -1,15 +1,20 @@
 """Types and utility functions used by various other internal tools."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING, Any, Callable
+
 from pydantic_core import core_schema
 from typing_extensions import Literal
+
 from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
+
 if TYPE_CHECKING:
     from ..json_schema import GenerateJsonSchema, JsonSchemaValue
     from ._core_utils import CoreSchemaOrField
     from ._generate_schema import GenerateSchema
-    GetJsonSchemaFunction = Callable[[CoreSchemaOrField,
-        GetJsonSchemaHandler], JsonSchemaValue]
+
+    GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue]
     HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue]


@@ -23,17 +28,15 @@ class GenerateJsonSchemaHandler(GetJsonSchemaHandler):
     See `GetJsonSchemaHandler` for the handler API.
     """

-    def __init__(self, generate_json_schema: GenerateJsonSchema,
-        handler_override: (HandlerOverride | None)) ->None:
+    def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None:
         self.generate_json_schema = generate_json_schema
         self.handler = handler_override or generate_json_schema.generate_inner
         self.mode = generate_json_schema.mode

-    def __call__(self, core_schema: CoreSchemaOrField, /) ->JsonSchemaValue:
+    def __call__(self, core_schema: CoreSchemaOrField, /) -> JsonSchemaValue:
         return self.handler(core_schema)

-    def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue
-        ) ->JsonSchemaValue:
+    def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue:
         """Resolves `$ref` in the json schema.

         This returns the input json schema if there is no `$ref` in json schema.
@@ -47,7 +50,16 @@ class GenerateJsonSchemaHandler(GetJsonSchemaHandler):
         Raises:
             LookupError: If it can't find the definition for `$ref`.
         """
-        pass
+        if '$ref' not in maybe_ref_json_schema:
+            return maybe_ref_json_schema
+        ref = maybe_ref_json_schema['$ref']
+        json_schema = self.generate_json_schema.get_schema_from_definitions(ref)
+        if json_schema is None:
+            raise LookupError(
+                f'Could not find a ref for {ref}.'
+                ' Maybe you tried to call resolve_ref_schema from within a recursive model?'
+            )
+        return json_schema


 class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
@@ -57,14 +69,17 @@ class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
     See `GetCoreSchemaHandler` for the handler API.
     """

-    def __init__(self, handler: Callable[[Any], core_schema.CoreSchema],
-        generate_schema: GenerateSchema, ref_mode: Literal['to-def',
-        'unpack']='to-def') ->None:
+    def __init__(
+        self,
+        handler: Callable[[Any], core_schema.CoreSchema],
+        generate_schema: GenerateSchema,
+        ref_mode: Literal['to-def', 'unpack'] = 'to-def',
+    ) -> None:
         self._handler = handler
         self._generate_schema = generate_schema
         self._ref_mode = ref_mode

-    def __call__(self, source_type: Any, /) ->core_schema.CoreSchema:
+    def __call__(self, source_type: Any, /) -> core_schema.CoreSchema:
         schema = self._handler(source_type)
         ref = schema.get('ref')
         if self._ref_mode == 'to-def':
@@ -72,11 +87,20 @@ class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
                 self._generate_schema.defs.definitions[ref] = schema
                 return core_schema.definition_reference_schema(ref)
             return schema
-        else:
+        else:  # ref_mode = 'unpack
             return self.resolve_ref_schema(schema)

-    def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema
-        ) ->core_schema.CoreSchema:
+    def _get_types_namespace(self) -> dict[str, Any] | None:
+        return self._generate_schema._types_namespace
+
+    def generate_schema(self, source_type: Any, /) -> core_schema.CoreSchema:
+        return self._generate_schema.generate_schema(source_type)
+
+    @property
+    def field_name(self) -> str | None:
+        return self._generate_schema.field_name_stack.get()
+
+    def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema:
         """Resolves reference in the core schema.

         Args:
@@ -88,4 +112,14 @@ class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler):
         Raises:
             LookupError: If it can't find the definition for reference.
         """
-        pass
+        if maybe_ref_schema['type'] == 'definition-ref':
+            ref = maybe_ref_schema['schema_ref']
+            if ref not in self._generate_schema.defs.definitions:
+                raise LookupError(
+                    f'Could not find a ref for {ref}.'
+                    ' Maybe you tried to call resolve_ref_schema from within a recursive model?'
+                )
+            return self._generate_schema.defs.definitions[ref]
+        elif maybe_ref_schema['type'] == 'definitions':
+            return self.resolve_ref_schema(maybe_ref_schema['schema'])
+        return maybe_ref_schema
diff --git a/pydantic/_internal/_signature.py b/pydantic/_internal/_signature.py
index f5663caa3..816a1651b 100644
--- a/pydantic/_internal/_signature.py
+++ b/pydantic/_internal/_signature.py
@@ -1,15 +1,19 @@
 from __future__ import annotations
+
 import dataclasses
 from inspect import Parameter, Signature, signature
 from typing import TYPE_CHECKING, Any, Callable
+
 from pydantic_core import PydanticUndefined
+
 from ._config import ConfigWrapper
 from ._utils import is_valid_identifier
+
 if TYPE_CHECKING:
     from ..fields import FieldInfo


-def _field_name_for_signature(field_name: str, field_info: FieldInfo) ->str:
+def _field_name_for_signature(field_name: str, field_info: FieldInfo) -> str:
     """Extract the correct name to use for the field when generating a signature.

     Assuming the field has a valid alias, this will return the alias. Otherwise, it will return the field name.
@@ -22,10 +26,15 @@ def _field_name_for_signature(field_name: str, field_info: FieldInfo) ->str:
     Returns:
         The correct name to use when generating a signature.
     """
-    pass
+
+    def _alias_if_valid(x: Any) -> str | None:
+        """Return the alias if it is a valid alias and identifier, else None."""
+        return x if isinstance(x, str) and is_valid_identifier(x) else None
+
+    return _alias_if_valid(field_info.alias) or _alias_if_valid(field_info.validation_alias) or field_name


-def _process_param_defaults(param: Parameter) ->Parameter:
+def _process_param_defaults(param: Parameter) -> Parameter:
     """Modify the signature for a parameter in a dataclass where the default value is a FieldInfo instance.

     Args:
@@ -34,18 +43,108 @@ def _process_param_defaults(param: Parameter) ->Parameter:
     Returns:
         Parameter: The custom processed parameter
     """
-    pass
+    from ..fields import FieldInfo
+
+    param_default = param.default
+    if isinstance(param_default, FieldInfo):
+        annotation = param.annotation
+        # Replace the annotation if appropriate
+        # inspect does "clever" things to show annotations as strings because we have
+        # `from __future__ import annotations` in main, we don't want that
+        if annotation == 'Any':
+            annotation = Any
+
+        # Replace the field default
+        default = param_default.default
+        if default is PydanticUndefined:
+            if param_default.default_factory is PydanticUndefined:
+                default = Signature.empty
+            else:
+                # this is used by dataclasses to indicate a factory exists:
+                default = dataclasses._HAS_DEFAULT_FACTORY  # type: ignore
+        return param.replace(
+            annotation=annotation, name=_field_name_for_signature(param.name, param_default), default=default
+        )
+    return param


-def _generate_signature_parameters(init: Callable[..., None], fields: dict[
-    str, FieldInfo], config_wrapper: ConfigWrapper) ->dict[str, Parameter]:
+def _generate_signature_parameters(  # noqa: C901 (ignore complexity, could use a refactor)
+    init: Callable[..., None],
+    fields: dict[str, FieldInfo],
+    config_wrapper: ConfigWrapper,
+) -> dict[str, Parameter]:
     """Generate a mapping of parameter names to Parameter objects for a pydantic BaseModel or dataclass."""
-    pass
+    from itertools import islice
+
+    present_params = signature(init).parameters.values()
+    merged_params: dict[str, Parameter] = {}
+    var_kw = None
+    use_var_kw = False
+
+    for param in islice(present_params, 1, None):  # skip self arg
+        # inspect does "clever" things to show annotations as strings because we have
+        # `from __future__ import annotations` in main, we don't want that
+        if fields.get(param.name):
+            # exclude params with init=False
+            if getattr(fields[param.name], 'init', True) is False:
+                continue
+            param = param.replace(name=_field_name_for_signature(param.name, fields[param.name]))
+        if param.annotation == 'Any':
+            param = param.replace(annotation=Any)
+        if param.kind is param.VAR_KEYWORD:
+            var_kw = param
+            continue
+        merged_params[param.name] = param

+    if var_kw:  # if custom init has no var_kw, fields which are not declared in it cannot be passed through
+        allow_names = config_wrapper.populate_by_name
+        for field_name, field in fields.items():
+            # when alias is a str it should be used for signature generation
+            param_name = _field_name_for_signature(field_name, field)

-def generate_pydantic_signature(init: Callable[..., None], fields: dict[str,
-    FieldInfo], config_wrapper: ConfigWrapper, is_dataclass: bool=False
-    ) ->Signature:
+            if field_name in merged_params or param_name in merged_params:
+                continue
+
+            if not is_valid_identifier(param_name):
+                if allow_names:
+                    param_name = field_name
+                else:
+                    use_var_kw = True
+                    continue
+
+            kwargs = {} if field.is_required() else {'default': field.get_default(call_default_factory=False)}
+            merged_params[param_name] = Parameter(
+                param_name, Parameter.KEYWORD_ONLY, annotation=field.rebuild_annotation(), **kwargs
+            )
+
+    if config_wrapper.extra == 'allow':
+        use_var_kw = True
+
+    if var_kw and use_var_kw:
+        # Make sure the parameter for extra kwargs
+        # does not have the same name as a field
+        default_model_signature = [
+            ('self', Parameter.POSITIONAL_ONLY),
+            ('data', Parameter.VAR_KEYWORD),
+        ]
+        if [(p.name, p.kind) for p in present_params] == default_model_signature:
+            # if this is the standard model signature, use extra_data as the extra args name
+            var_kw_name = 'extra_data'
+        else:
+            # else start from var_kw
+            var_kw_name = var_kw.name
+
+        # generate a name that's definitely unique
+        while var_kw_name in fields:
+            var_kw_name += '_'
+        merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
+
+    return merged_params
+
+
+def generate_pydantic_signature(
+    init: Callable[..., None], fields: dict[str, FieldInfo], config_wrapper: ConfigWrapper, is_dataclass: bool = False
+) -> Signature:
     """Generate signature for a pydantic BaseModel or dataclass.

     Args:
@@ -57,4 +156,9 @@ def generate_pydantic_signature(init: Callable[..., None], fields: dict[str,
     Returns:
         The dataclass/BaseModel subclass signature.
     """
-    pass
+    merged_params = _generate_signature_parameters(init, fields, config_wrapper)
+
+    if is_dataclass:
+        merged_params = {k: _process_param_defaults(v) for k, v in merged_params.items()}
+
+    return Signature(parameters=list(merged_params.values()), return_annotation=None)
diff --git a/pydantic/_internal/_std_types_schema.py b/pydantic/_internal/_std_types_schema.py
index 9dc4c792a..9f8b95890 100644
--- a/pydantic/_internal/_std_types_schema.py
+++ b/pydantic/_internal/_std_types_schema.py
@@ -2,7 +2,9 @@

 Import of this module is deferred since it contains imports of many standard library modules.
 """
+
 from __future__ import annotations as _annotations
+
 import collections
 import collections.abc
 import dataclasses
@@ -15,49 +17,128 @@ from functools import partial
 from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
 from operator import attrgetter
 from typing import Any, Callable, Iterable, Literal, Tuple, TypeVar
+
 import typing_extensions
-from pydantic_core import CoreSchema, MultiHostUrl, PydanticCustomError, PydanticOmit, Url, core_schema
+from pydantic_core import (
+    CoreSchema,
+    MultiHostUrl,
+    PydanticCustomError,
+    PydanticOmit,
+    Url,
+    core_schema,
+)
 from typing_extensions import get_args, get_origin
+
 from pydantic.errors import PydanticSchemaGenerationError
 from pydantic.fields import FieldInfo
 from pydantic.types import Strict
+
 from ..config import ConfigDict
 from ..json_schema import JsonSchemaValue
 from . import _known_annotated_metadata, _typing_extra, _validators
 from ._core_utils import get_type_ref
 from ._internal_dataclass import slots_true
 from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler
+
 if typing.TYPE_CHECKING:
     from ._generate_schema import GenerateSchema
-    StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.
-        CoreSchema]
+
+    StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema]


 @dataclasses.dataclass(**slots_true)
 class SchemaTransformer:
     get_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema]
-    get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler],
-        JsonSchemaValue]
+    get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler], JsonSchemaValue]

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         return self.get_core_schema(source_type, handler)

-    def __get_pydantic_json_schema__(self, schema: CoreSchema, handler:
-        GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
         return self.get_json_schema(schema, handler)


+def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchema:
+    cases: list[Any] = list(enum_type.__members__.values())
+
+    enum_ref = get_type_ref(enum_type)
+    description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__)
+    if description == 'An enumeration.':  # This is the default value provided by enum.EnumMeta.__new__; don't use it
+        description = None
+    js_updates = {'title': enum_type.__name__, 'description': description}
+    js_updates = {k: v for k, v in js_updates.items() if v is not None}
+
+    sub_type: Literal['str', 'int', 'float'] | None = None
+    if issubclass(enum_type, int):
+        sub_type = 'int'
+        value_ser_type: core_schema.SerSchema = core_schema.simple_ser_schema('int')
+    elif issubclass(enum_type, str):
+        # this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)`
+        sub_type = 'str'
+        value_ser_type = core_schema.simple_ser_schema('str')
+    elif issubclass(enum_type, float):
+        sub_type = 'float'
+        value_ser_type = core_schema.simple_ser_schema('float')
+    else:
+        # TODO this is an ugly hack, how do we trigger an Any schema for serialization?
+        value_ser_type = core_schema.plain_serializer_function_ser_schema(lambda x: x)
+
+    if cases:
+
+        def get_json_schema(schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+            json_schema = handler(schema)
+            original_schema = handler.resolve_ref_schema(json_schema)
+            original_schema.update(js_updates)
+            return json_schema
+
+        # we don't want to add the missing to the schema if it's the default one
+        default_missing = getattr(enum_type._missing_, '__func__', None) == Enum._missing_.__func__  # type: ignore
+        enum_schema = core_schema.enum_schema(
+            enum_type,
+            cases,
+            sub_type=sub_type,
+            missing=None if default_missing else enum_type._missing_,
+            ref=enum_ref,
+            metadata={'pydantic_js_functions': [get_json_schema]},
+        )
+
+        if config.get('use_enum_values', False):
+            enum_schema = core_schema.no_info_after_validator_function(
+                attrgetter('value'), enum_schema, serialization=value_ser_type
+            )
+
+        return enum_schema
+
+    else:
+
+        def get_json_schema_no_cases(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
+            json_schema = handler(core_schema.enum_schema(enum_type, cases, sub_type=sub_type, ref=enum_ref))
+            original_schema = handler.resolve_ref_schema(json_schema)
+            original_schema.update(js_updates)
+            return json_schema
+
+        # Use an isinstance check for enums with no cases.
+        # The most important use case for this is creating TypeVar bounds for generics that should
+        # be restricted to enums. This is more consistent than it might seem at first, since you can only
+        # subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases.
+        # We use the get_json_schema function when an Enum subclass has been declared with no cases
+        # so that we can still generate a valid json schema.
+        return core_schema.is_instance_schema(
+            enum_type,
+            metadata={'pydantic_js_functions': [get_json_schema_no_cases]},
+        )
+
+
 @dataclasses.dataclass(**slots_true)
 class InnerSchemaValidator:
     """Use a fixed CoreSchema, avoiding interference from outward annotations."""
+
     core_schema: CoreSchema
     js_schema: JsonSchemaValue | None = None
     js_core_schema: CoreSchema | None = None
     js_schema_update: JsonSchemaValue | None = None

-    def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler:
-        GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
         if self.js_schema is not None:
             return self.js_schema
         js_schema = handler(self.js_core_schema or self.core_schema)
@@ -65,11 +146,160 @@ class InnerSchemaValidator:
             js_schema.update(self.js_schema_update)
         return js_schema

-    def __get_pydantic_core_schema__(self, _source_type: Any, _handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema:
         return self.core_schema


+def decimal_prepare_pydantic_annotations(
+    source: Any, annotations: Iterable[Any], config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    if source is not decimal.Decimal:
+        return None
+
+    metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+
+    config_allow_inf_nan = config.get('allow_inf_nan')
+    if config_allow_inf_nan is not None:
+        metadata.setdefault('allow_inf_nan', config_allow_inf_nan)
+
+    _known_annotated_metadata.check_metadata(
+        metadata, {*_known_annotated_metadata.FLOAT_CONSTRAINTS, 'max_digits', 'decimal_places'}, decimal.Decimal
+    )
+    return source, [InnerSchemaValidator(core_schema.decimal_schema(**metadata)), *remaining_annotations]
+
+
+def datetime_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    import datetime
+
+    metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+    if source_type is datetime.date:
+        sv = InnerSchemaValidator(core_schema.date_schema(**metadata))
+    elif source_type is datetime.datetime:
+        sv = InnerSchemaValidator(core_schema.datetime_schema(**metadata))
+    elif source_type is datetime.time:
+        sv = InnerSchemaValidator(core_schema.time_schema(**metadata))
+    elif source_type is datetime.timedelta:
+        sv = InnerSchemaValidator(core_schema.timedelta_schema(**metadata))
+    else:
+        return None
+    # check now that we know the source type is correct
+    _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.DATE_TIME_CONSTRAINTS, source_type)
+    return (source_type, [sv, *remaining_annotations])
+
+
+def uuid_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    # UUIDs have no constraints - they are fixed length, constructing a UUID instance checks the length
+
+    from uuid import UUID
+
+    if source_type is not UUID:
+        return None
+
+    return (source_type, [InnerSchemaValidator(core_schema.uuid_schema()), *annotations])
+
+
+def path_schema_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    import pathlib
+
+    if source_type not in {
+        os.PathLike,
+        pathlib.Path,
+        pathlib.PurePath,
+        pathlib.PosixPath,
+        pathlib.PurePosixPath,
+        pathlib.PureWindowsPath,
+    }:
+        return None
+
+    metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+    _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, source_type)
+
+    construct_path = pathlib.PurePath if source_type is os.PathLike else source_type
+
+    def path_validator(input_value: str) -> os.PathLike[Any]:
+        try:
+            return construct_path(input_value)
+        except TypeError as e:
+            raise PydanticCustomError('path_type', 'Input is not a valid path') from e
+
+    constrained_str_schema = core_schema.str_schema(**metadata)
+
+    instance_schema = core_schema.json_or_python_schema(
+        json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
+        python_schema=core_schema.is_instance_schema(source_type),
+    )
+
+    strict: bool | None = None
+    for annotation in annotations:
+        if isinstance(annotation, Strict):
+            strict = annotation.strict
+
+    schema = core_schema.lax_or_strict_schema(
+        lax_schema=core_schema.union_schema(
+            [
+                instance_schema,
+                core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
+            ],
+            custom_error_type='path_type',
+            custom_error_message='Input is not a valid path',
+            strict=True,
+        ),
+        strict_schema=instance_schema,
+        serialization=core_schema.to_string_ser_schema(),
+        strict=strict,
+    )
+
+    return (
+        source_type,
+        [
+            InnerSchemaValidator(schema, js_core_schema=constrained_str_schema, js_schema_update={'format': 'path'}),
+            *remaining_annotations,
+        ],
+    )
+
+
+def dequeue_validator(
+    input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int
+) -> collections.deque[Any]:
+    if isinstance(input_value, collections.deque):
+        maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None]
+        if maxlens:
+            maxlen = min(maxlens)
+        return collections.deque(handler(input_value), maxlen=maxlen)
+    else:
+        return collections.deque(handler(input_value), maxlen=maxlen)
+
+
+def serialize_sequence_via_list(
+    v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo
+) -> Any:
+    items: list[Any] = []
+
+    mapped_origin = SEQUENCE_ORIGIN_MAP.get(type(v), None)
+    if mapped_origin is None:
+        # we shouldn't hit this branch, should probably add a serialization error or something
+        return v
+
+    for index, item in enumerate(v):
+        try:
+            v = handler(item, index)
+        except PydanticOmit:
+            pass
+        else:
+            items.append(v)
+
+    if info.mode_is_json():
+        return items
+    else:
+        return mapped_origin(items)
+
+
 @dataclasses.dataclass(**slots_true)
 class SequenceValidator:
     mapped_origin: type[Any]
@@ -79,71 +309,201 @@ class SequenceValidator:
     strict: bool | None = None
     fail_fast: bool | None = None

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         if self.item_source_type is Any:
             items_schema = None
         else:
             items_schema = handler.generate_schema(self.item_source_type)
-        metadata = {'min_length': self.min_length, 'max_length': self.
-            max_length, 'strict': self.strict, 'fail_fast': self.fail_fast}
+
+        metadata = {
+            'min_length': self.min_length,
+            'max_length': self.max_length,
+            'strict': self.strict,
+            'fail_fast': self.fail_fast,
+        }
+
         if self.mapped_origin in (list, set, frozenset):
             if self.mapped_origin is list:
-                constrained_schema = core_schema.list_schema(items_schema,
-                    **metadata)
+                constrained_schema = core_schema.list_schema(items_schema, **metadata)
             elif self.mapped_origin is set:
-                constrained_schema = core_schema.set_schema(items_schema,
-                    **metadata)
+                constrained_schema = core_schema.set_schema(items_schema, **metadata)
             else:
-                assert self.mapped_origin is frozenset
-                constrained_schema = core_schema.frozenset_schema(items_schema,
-                    **metadata)
+                assert self.mapped_origin is frozenset  # safety check in case we forget to add a case
+                constrained_schema = core_schema.frozenset_schema(items_schema, **metadata)
+
             schema = constrained_schema
         else:
-            assert self.mapped_origin in (collections.deque, collections.
-                Counter)
+            # safety check in case we forget to add a case
+            assert self.mapped_origin in (collections.deque, collections.Counter)
+
             if self.mapped_origin is collections.deque:
-                coerce_instance_wrap = partial(core_schema.
-                    no_info_wrap_validator_function, partial(
-                    dequeue_validator, maxlen=metadata.get('max_length', None))
-                    )
+                # if we have a MaxLen annotation might as well set that as the default maxlen on the deque
+                # this lets us re-use existing metadata annotations to let users set the maxlen on a dequeue
+                # that e.g. comes from JSON
+                coerce_instance_wrap = partial(
+                    core_schema.no_info_wrap_validator_function,
+                    partial(dequeue_validator, maxlen=metadata.get('max_length', None)),
+                )
             else:
-                coerce_instance_wrap = partial(core_schema.
-                    no_info_after_validator_function, self.mapped_origin)
+                coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
+
+            # we have to use a lax list schema here, because we need to validate the deque's
+            # items via a list schema, but it's ok if the deque itself is not a list (same for Counter)
             metadata_with_strict_override = {**metadata, 'strict': False}
-            constrained_schema = core_schema.list_schema(items_schema, **
-                metadata_with_strict_override)
-            check_instance = core_schema.json_or_python_schema(json_schema=
-                core_schema.list_schema(), python_schema=core_schema.
-                is_instance_schema(self.mapped_origin))
+            constrained_schema = core_schema.list_schema(items_schema, **metadata_with_strict_override)
+
+            check_instance = core_schema.json_or_python_schema(
+                json_schema=core_schema.list_schema(),
+                python_schema=core_schema.is_instance_schema(self.mapped_origin),
+            )
+
             serialization = core_schema.wrap_serializer_function_ser_schema(
-                serialize_sequence_via_list, schema=items_schema or
-                core_schema.any_schema(), info_arg=True)
-            strict = core_schema.chain_schema([check_instance,
-                coerce_instance_wrap(constrained_schema)])
+                serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True
+            )
+
+            strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
+
             if metadata.get('strict', False):
                 schema = strict
             else:
                 lax = coerce_instance_wrap(constrained_schema)
-                schema = core_schema.lax_or_strict_schema(lax_schema=lax,
-                    strict_schema=strict)
+                schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
             schema['serialization'] = serialization
+
         return schema


-SEQUENCE_ORIGIN_MAP: dict[Any, Any] = {typing.Deque: collections.deque,
-    collections.deque: collections.deque, list: list, typing.List: list,
-    set: set, typing.AbstractSet: set, typing.Set: set, frozenset:
-    frozenset, typing.FrozenSet: frozenset, typing.Sequence: list, typing.
-    MutableSequence: list, typing.MutableSet: set, collections.abc.
-    MutableSet: set, collections.abc.Set: frozenset}
-MAPPING_ORIGIN_MAP: dict[Any, Any] = {typing.DefaultDict: collections.
-    defaultdict, collections.defaultdict: collections.defaultdict,
-    collections.OrderedDict: collections.OrderedDict, typing_extensions.
-    OrderedDict: collections.OrderedDict, dict: dict, typing.Dict: dict,
-    collections.Counter: collections.Counter, typing.Counter: collections.
-    Counter, typing.Mapping: dict, typing.MutableMapping: dict, collections
-    .abc.MutableMapping: dict, collections.abc.Mapping: dict}
+SEQUENCE_ORIGIN_MAP: dict[Any, Any] = {
+    typing.Deque: collections.deque,
+    collections.deque: collections.deque,
+    list: list,
+    typing.List: list,
+    set: set,
+    typing.AbstractSet: set,
+    typing.Set: set,
+    frozenset: frozenset,
+    typing.FrozenSet: frozenset,
+    typing.Sequence: list,
+    typing.MutableSequence: list,
+    typing.MutableSet: set,
+    # this doesn't handle subclasses of these
+    # parametrized typing.Set creates one of these
+    collections.abc.MutableSet: set,
+    collections.abc.Set: frozenset,
+}
+
+
+def identity(s: CoreSchema) -> CoreSchema:
+    return s
+
+
+def sequence_like_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    origin: Any = get_origin(source_type)
+
+    mapped_origin = SEQUENCE_ORIGIN_MAP.get(origin, None) if origin else SEQUENCE_ORIGIN_MAP.get(source_type, None)
+    if mapped_origin is None:
+        return None
+
+    args = get_args(source_type)
+
+    if not args:
+        args = typing.cast(Tuple[Any], (Any,))
+    elif len(args) != 1:
+        raise ValueError('Expected sequence to have exactly 1 generic parameter')
+
+    item_source_type = args[0]
+
+    metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+    _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
+
+    return (source_type, [SequenceValidator(mapped_origin, item_source_type, **metadata), *remaining_annotations])
+
+
+MAPPING_ORIGIN_MAP: dict[Any, Any] = {
+    typing.DefaultDict: collections.defaultdict,
+    collections.defaultdict: collections.defaultdict,
+    collections.OrderedDict: collections.OrderedDict,
+    typing_extensions.OrderedDict: collections.OrderedDict,
+    dict: dict,
+    typing.Dict: dict,
+    collections.Counter: collections.Counter,
+    typing.Counter: collections.Counter,
+    # this doesn't handle subclasses of these
+    typing.Mapping: dict,
+    typing.MutableMapping: dict,
+    # parametrized typing.{Mutable}Mapping creates one of these
+    collections.abc.MutableMapping: dict,
+    collections.abc.Mapping: dict,
+}
+
+
+def defaultdict_validator(
+    input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any]
+) -> collections.defaultdict[Any, Any]:
+    if isinstance(input_value, collections.defaultdict):
+        default_factory = input_value.default_factory
+        return collections.defaultdict(default_factory, handler(input_value))
+    else:
+        return collections.defaultdict(default_default_factory, handler(input_value))
+
+
+def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]:
+    def infer_default() -> Callable[[], Any]:
+        allowed_default_types: dict[Any, Any] = {
+            typing.Tuple: tuple,
+            tuple: tuple,
+            collections.abc.Sequence: tuple,
+            collections.abc.MutableSequence: list,
+            typing.List: list,
+            list: list,
+            typing.Sequence: list,
+            typing.Set: set,
+            set: set,
+            typing.MutableSet: set,
+            collections.abc.MutableSet: set,
+            collections.abc.Set: frozenset,
+            typing.MutableMapping: dict,
+            typing.Mapping: dict,
+            collections.abc.Mapping: dict,
+            collections.abc.MutableMapping: dict,
+            float: float,
+            int: int,
+            str: str,
+            bool: bool,
+        }
+        values_type_origin = get_origin(values_source_type) or values_source_type
+        instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`'
+        if isinstance(values_type_origin, TypeVar):
+
+            def type_var_default_factory() -> None:
+                raise RuntimeError(
+                    'Generic defaultdict cannot be used without a concrete value type or an'
+                    ' explicit default factory, ' + instructions
+                )
+
+            return type_var_default_factory
+        elif values_type_origin not in allowed_default_types:
+            # a somewhat subjective set of types that have reasonable default values
+            allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())])
+            raise PydanticSchemaGenerationError(
+                f'Unable to infer a default factory for keys of type {values_source_type}.'
+                f' Only {allowed_msg} are supported, other types require an explicit default factory'
+                ' ' + instructions
+            )
+        return allowed_default_types[values_type_origin]
+
+    # Assume Annotated[..., Field(...)]
+    if _typing_extra.is_annotated(values_source_type):
+        field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None)
+    else:
+        field_info = None
+    if field_info and field_info.default_factory:
+        default_default_factory = field_info.default_factory
+    else:
+        default_default_factory = infer_default()
+    return default_default_factory


 @dataclasses.dataclass(**slots_true)
@@ -155,8 +515,10 @@ class MappingValidator:
     max_length: int | None = None
     strict: bool = False

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any:
+        return handler(v)
+
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         if self.keys_source_type is Any:
             keys_schema = None
         else:
@@ -165,49 +527,196 @@ class MappingValidator:
             values_schema = None
         else:
             values_schema = handler.generate_schema(self.values_source_type)
-        metadata = {'min_length': self.min_length, 'max_length': self.
-            max_length, 'strict': self.strict}
+
+        metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict}
+
         if self.mapped_origin is dict:
-            schema = core_schema.dict_schema(keys_schema, values_schema, **
-                metadata)
+            schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
         else:
-            constrained_schema = core_schema.dict_schema(keys_schema,
-                values_schema, **metadata)
-            check_instance = core_schema.json_or_python_schema(json_schema=
-                core_schema.dict_schema(), python_schema=core_schema.
-                is_instance_schema(self.mapped_origin))
+            constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata)
+            check_instance = core_schema.json_or_python_schema(
+                json_schema=core_schema.dict_schema(),
+                python_schema=core_schema.is_instance_schema(self.mapped_origin),
+            )
+
             if self.mapped_origin is collections.defaultdict:
-                default_default_factory = (
-                    get_defaultdict_default_default_factory(self.
-                    values_source_type))
-                coerce_instance_wrap = partial(core_schema.
-                    no_info_wrap_validator_function, partial(
-                    defaultdict_validator, default_default_factory=
-                    default_default_factory))
+                default_default_factory = get_defaultdict_default_default_factory(self.values_source_type)
+                coerce_instance_wrap = partial(
+                    core_schema.no_info_wrap_validator_function,
+                    partial(defaultdict_validator, default_default_factory=default_default_factory),
+                )
             else:
-                coerce_instance_wrap = partial(core_schema.
-                    no_info_after_validator_function, self.mapped_origin)
+                coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
+
             serialization = core_schema.wrap_serializer_function_ser_schema(
-                self.serialize_mapping_via_dict, schema=core_schema.
-                dict_schema(keys_schema or core_schema.any_schema(), 
-                values_schema or core_schema.any_schema()), info_arg=False)
-            strict = core_schema.chain_schema([check_instance,
-                coerce_instance_wrap(constrained_schema)])
+                self.serialize_mapping_via_dict,
+                schema=core_schema.dict_schema(
+                    keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema()
+                ),
+                info_arg=False,
+            )
+
+            strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)])
+
             if metadata.get('strict', False):
                 schema = strict
             else:
                 lax = coerce_instance_wrap(constrained_schema)
-                schema = core_schema.lax_or_strict_schema(lax_schema=lax,
-                    strict_schema=strict)
+                schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict)
                 schema['serialization'] = serialization
+
         return schema


-PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any,
-    list[Any]] | None], ...] = (decimal_prepare_pydantic_annotations,
+def mapping_like_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    origin: Any = get_origin(source_type)
+
+    mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None)
+    if mapped_origin is None:
+        return None
+
+    args = get_args(source_type)
+
+    if not args:
+        args = typing.cast(Tuple[Any, Any], (Any, Any))
+    elif mapped_origin is collections.Counter:
+        # a single generic
+        if len(args) != 1:
+            raise ValueError('Expected Counter to have exactly 1 generic parameter')
+        args = (args[0], int)  # keys are always an int
+    elif len(args) != 2:
+        raise ValueError('Expected mapping to have exactly 2 generic parameters')
+
+    keys_source_type, values_source_type = args
+
+    metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations)
+    _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type)
+
+    return (
+        source_type,
+        [
+            MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata),
+            *remaining_annotations,
+        ],
+    )
+
+
+def ip_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    def make_strict_ip_schema(tp: type[Any]) -> CoreSchema:
+        return core_schema.json_or_python_schema(
+            json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()),
+            python_schema=core_schema.is_instance_schema(tp),
+        )
+
+    if source_type is IPv4Address:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_address_validator),
+                    strict_schema=make_strict_ip_schema(IPv4Address),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv4'},
+            ),
+            *annotations,
+        ]
+    if source_type is IPv4Network:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_network_validator),
+                    strict_schema=make_strict_ip_schema(IPv4Network),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv4network'},
+            ),
+            *annotations,
+        ]
+    if source_type is IPv4Interface:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_interface_validator),
+                    strict_schema=make_strict_ip_schema(IPv4Interface),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv4interface'},
+            ),
+            *annotations,
+        ]
+
+    if source_type is IPv6Address:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_address_validator),
+                    strict_schema=make_strict_ip_schema(IPv6Address),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv6'},
+            ),
+            *annotations,
+        ]
+    if source_type is IPv6Network:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_network_validator),
+                    strict_schema=make_strict_ip_schema(IPv6Network),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv6network'},
+            ),
+            *annotations,
+        ]
+    if source_type is IPv6Interface:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.lax_or_strict_schema(
+                    lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_interface_validator),
+                    strict_schema=make_strict_ip_schema(IPv6Interface),
+                    serialization=core_schema.to_string_ser_schema(),
+                ),
+                lambda _1, _2: {'type': 'string', 'format': 'ipv6interface'},
+            ),
+            *annotations,
+        ]
+
+    return None
+
+
+def url_prepare_pydantic_annotations(
+    source_type: Any, annotations: Iterable[Any], _config: ConfigDict
+) -> tuple[Any, list[Any]] | None:
+    if source_type is Url:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.url_schema(),
+                lambda cs, handler: handler(cs),
+            ),
+            *annotations,
+        ]
+    if source_type is MultiHostUrl:
+        return source_type, [
+            SchemaTransformer(
+                lambda _1, _2: core_schema.multi_host_url_schema(),
+                lambda cs, handler: handler(cs),
+            ),
+            *annotations,
+        ]
+
+
+PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any, list[Any]] | None], ...] = (
+    decimal_prepare_pydantic_annotations,
     sequence_like_prepare_pydantic_annotations,
     datetime_prepare_pydantic_annotations,
     uuid_prepare_pydantic_annotations,
     path_schema_prepare_pydantic_annotations,
     mapping_like_prepare_pydantic_annotations,
-    ip_prepare_pydantic_annotations, url_prepare_pydantic_annotations)
+    ip_prepare_pydantic_annotations,
+    url_prepare_pydantic_annotations,
+)
diff --git a/pydantic/_internal/_typing_extra.py b/pydantic/_internal/_typing_extra.py
index 1caf85c1b..71be20b37 100644
--- a/pydantic/_internal/_typing_extra.py
+++ b/pydantic/_internal/_typing_extra.py
@@ -1,5 +1,7 @@
 """Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap python's typing module."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import re
 import sys
@@ -10,75 +12,165 @@ from collections.abc import Callable
 from functools import partial
 from types import GetSetDescriptorType
 from typing import TYPE_CHECKING, Any, Final
+
 from typing_extensions import Annotated, Literal, TypeAliasType, TypeGuard, deprecated, get_args, get_origin
+
 if TYPE_CHECKING:
     from ._dataclasses import StandardDataclass
+
 try:
-    from typing import _TypingBase
+    from typing import _TypingBase  # type: ignore[attr-defined]
 except ImportError:
-    from typing import _Final as _TypingBase
+    from typing import _Final as _TypingBase  # type: ignore[attr-defined]
+
 typing_base = _TypingBase
+
+
 if sys.version_info < (3, 9):
+    # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
     TypingGenericAlias = ()
 else:
-    from typing import GenericAlias as TypingGenericAlias
+    from typing import GenericAlias as TypingGenericAlias  # type: ignore
+
+
 if sys.version_info < (3, 11):
     from typing_extensions import NotRequired, Required
 else:
-    from typing import NotRequired, Required
+    from typing import NotRequired, Required  # noqa: F401
+
+
 if sys.version_info < (3, 10):
-    WithArgsTypes = TypingGenericAlias,
+
+    def origin_is_union(tp: type[Any] | None) -> bool:
+        return tp is typing.Union
+
+    WithArgsTypes = (TypingGenericAlias,)
+
 else:
-    WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType
+
+    def origin_is_union(tp: type[Any] | None) -> bool:
+        return tp is typing.Union or tp is types.UnionType
+
+    WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType  # type: ignore[attr-defined]
+
+
 if sys.version_info < (3, 10):
     NoneType = type(None)
     EllipsisType = type(Ellipsis)
 else:
     from types import NoneType as NoneType
+
+
 LITERAL_TYPES: set[Any] = {Literal}
 if hasattr(typing, 'Literal'):
-    LITERAL_TYPES.add(typing.Literal)
-DEPRECATED_TYPES: tuple[Any, ...] = (deprecated,) if isinstance(deprecated,
-    type) else ()
+    LITERAL_TYPES.add(typing.Literal)  # type: ignore
+
+# Check if `deprecated` is a type to prevent errors when using typing_extensions < 4.9.0
+DEPRECATED_TYPES: tuple[Any, ...] = (deprecated,) if isinstance(deprecated, type) else ()
 if hasattr(warnings, 'deprecated'):
-    DEPRECATED_TYPES = *DEPRECATED_TYPES, warnings.deprecated
-NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in
-    LITERAL_TYPES))
-TypeVarType = Any
+    DEPRECATED_TYPES = (*DEPRECATED_TYPES, warnings.deprecated)  # type: ignore
+
+NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in LITERAL_TYPES))
+
+
+TypeVarType = Any  # since mypy doesn't allow the use of TypeVar as a type
+
+
+def is_none_type(type_: Any) -> bool:
+    return type_ in NONE_TYPES
+
+
+def is_callable_type(type_: type[Any]) -> bool:
+    return type_ is Callable or get_origin(type_) is Callable
+
+
+def is_literal_type(type_: type[Any]) -> bool:
+    return Literal is not None and get_origin(type_) in LITERAL_TYPES
+
+
+def is_deprecated_instance(instance: Any) -> TypeGuard[deprecated]:
+    return isinstance(instance, DEPRECATED_TYPES)
+

+def literal_values(type_: type[Any]) -> tuple[Any, ...]:
+    return get_args(type_)

-def all_literal_values(type_: type[Any]) ->list[Any]:
+
+def all_literal_values(type_: type[Any]) -> list[Any]:
     """This method is used to retrieve all Literal values as
     Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
     e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`.
     """
-    pass
+    if not is_literal_type(type_):
+        return [type_]
+
+    values = literal_values(type_)
+    return list(x for value in values for x in all_literal_values(value))
+
+
+def is_annotated(ann_type: Any) -> bool:
+    return get_origin(ann_type) is Annotated
+

+def annotated_type(type_: Any) -> Any | None:
+    return get_args(type_)[0] if is_annotated(type_) else None

-def is_namedtuple(type_: type[Any]) ->bool:
+
+def is_namedtuple(type_: type[Any]) -> bool:
     """Check if a given class is a named tuple.
     It can be either a `typing.NamedTuple` or `collections.namedtuple`.
     """
-    pass
+    from ._utils import lenient_issubclass
+
+    return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')


 test_new_type = typing.NewType('test_new_type', str)


-def is_new_type(type_: type[Any]) ->bool:
+def is_new_type(type_: type[Any]) -> bool:
     """Check whether type_ was created using typing.NewType.

     Can't use isinstance because it fails <3.10.
     """
-    pass
+    return isinstance(type_, test_new_type.__class__) and hasattr(type_, '__supertype__')  # type: ignore[arg-type]
+
+
+def _check_classvar(v: type[Any] | None) -> bool:
+    if v is None:
+        return False

+    return v.__class__ == typing.ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar'

-def _check_finalvar(v: (type[Any] | None)) ->bool:
+
+def is_classvar(ann_type: type[Any]) -> bool:
+    if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
+        return True
+
+    # this is an ugly workaround for class vars that contain forward references and are therefore themselves
+    # forward references, see #3679
+    if ann_type.__class__ == typing.ForwardRef and re.match(
+        r'(\w+\.)?ClassVar\[',
+        ann_type.__forward_arg__,  # type: ignore
+    ):
+        return True
+
+    return False
+
+
+def _check_finalvar(v: type[Any] | None) -> bool:
     """Check if a given type is a `typing.Final` type."""
-    pass
+    if v is None:
+        return False
+
+    return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final')
+
+
+def is_finalvar(ann_type: Any) -> bool:
+    return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type))


-def parent_frame_namespace(*, parent_depth: int=2) ->(dict[str, Any] | None):
+def parent_frame_namespace(*, parent_depth: int = 2) -> dict[str, Any] | None:
     """We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the
     global module namespace. See https://github.com/pydantic/pydantic/issues/2678#issuecomment-1008139014 -> Scope
     and suggestion at the end of the next comment by @gvanrossum.
@@ -90,49 +182,153 @@ def parent_frame_namespace(*, parent_depth: int=2) ->(dict[str, Any] | None):
     dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many
     other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659.
     """
-    pass
-
-
-def get_cls_type_hints_lenient(obj: Any, globalns: (dict[str, Any] | None)=None
-    ) ->dict[str, Any]:
+    frame = sys._getframe(parent_depth)
+    # if f_back is None, it's the global module namespace and we don't need to include it here
+    if frame.f_back is None:
+        return None
+    else:
+        return frame.f_locals
+
+
+def add_module_globals(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
+    module_name = getattr(obj, '__module__', None)
+    if module_name:
+        try:
+            module_globalns = sys.modules[module_name].__dict__
+        except KeyError:
+            # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
+            pass
+        else:
+            if globalns:
+                return {**module_globalns, **globalns}
+            else:
+                # copy module globals to make sure it can't be updated later
+                return module_globalns.copy()
+
+    return globalns or {}
+
+
+def get_cls_types_namespace(cls: type[Any], parent_namespace: dict[str, Any] | None = None) -> dict[str, Any]:
+    ns = add_module_globals(cls, parent_namespace)
+    ns[cls.__name__] = cls
+    return ns
+
+
+def get_cls_type_hints_lenient(obj: Any, globalns: dict[str, Any] | None = None) -> dict[str, Any]:
     """Collect annotations from a class, including those from parent classes.

     Unlike `typing.get_type_hints`, this function will not error if a forward reference is not resolvable.
     """
-    pass
+    hints = {}
+    for base in reversed(obj.__mro__):
+        ann = base.__dict__.get('__annotations__')
+        localns = dict(vars(base))
+        if ann is not None and ann is not GetSetDescriptorType:
+            for name, value in ann.items():
+                hints[name] = eval_type_lenient(value, globalns, localns)
+    return hints


-def eval_type_lenient(value: Any, globalns: (dict[str, Any] | None)=None,
-    localns: (dict[str, Any] | None)=None) ->Any:
+def eval_type_lenient(value: Any, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None) -> Any:
     """Behaves like typing._eval_type, except it won't raise an error if a forward reference can't be resolved."""
-    pass
-
-
-def eval_type_backport(value: Any, globalns: (dict[str, Any] | None)=None,
-    localns: (dict[str, Any] | None)=None, type_params: (tuple[Any] | None)
-    =None) ->Any:
+    if value is None:
+        value = NoneType
+    elif isinstance(value, str):
+        value = _make_forward_ref(value, is_argument=False, is_class=True)
+
+    try:
+        return eval_type_backport(value, globalns, localns)
+    except NameError:
+        # the point of this function is to be tolerant to this case
+        return value
+
+
+def eval_type_backport(
+    value: Any,
+    globalns: dict[str, Any] | None = None,
+    localns: dict[str, Any] | None = None,
+    type_params: tuple[Any] | None = None,
+) -> Any:
     """Like `typing._eval_type`, but falls back to the `eval_type_backport` package if it's
     installed to let older Python versions use newer typing features.
     Specifically, this transforms `X | Y` into `typing.Union[X, Y]`
     and `list[X]` into `typing.List[X]` etc. (for all the types made generic in PEP 585)
     if the original syntax is not supported in the current Python version.
     """
-    pass
-
-
-def get_function_type_hints(function: Callable[..., Any], *, include_keys:
-    (set[str] | None)=None, types_namespace: (dict[str, Any] | None)=None
-    ) ->dict[str, Any]:
+    try:
+        if sys.version_info >= (3, 13):
+            return typing._eval_type(  # type: ignore
+                value, globalns, localns, type_params=type_params
+            )
+        else:
+            return typing._eval_type(  # type: ignore
+                value, globalns, localns
+            )
+    except TypeError as e:
+        if not (isinstance(value, typing.ForwardRef) and is_backport_fixable_error(e)):
+            raise
+        try:
+            from eval_type_backport import eval_type_backport
+        except ImportError:
+            raise TypeError(
+                f'You have a type annotation {value.__forward_arg__!r} '
+                f'which makes use of newer typing features than are supported in your version of Python. '
+                f'To handle this error, you should either remove the use of new syntax '
+                f'or install the `eval_type_backport` package.'
+            ) from e
+
+        return eval_type_backport(value, globalns, localns, try_default=False)
+
+
+def is_backport_fixable_error(e: TypeError) -> bool:
+    msg = str(e)
+    return msg.startswith('unsupported operand type(s) for |: ') or "' object is not subscriptable" in msg
+
+
+def get_function_type_hints(
+    function: Callable[..., Any], *, include_keys: set[str] | None = None, types_namespace: dict[str, Any] | None = None
+) -> dict[str, Any]:
     """Like `typing.get_type_hints`, but doesn't convert `X` to `Optional[X]` if the default value is `None`, also
     copes with `partial`.
     """
-    pass
+    try:
+        if isinstance(function, partial):
+            annotations = function.func.__annotations__
+        else:
+            annotations = function.__annotations__
+    except AttributeError:
+        type_hints = get_type_hints(function)
+        if isinstance(function, type):
+            # `type[...]` is a callable, which returns an instance of itself.
+            # At some point, we might even look into the return type of `__new__`
+            # if it returns something else.
+            type_hints.setdefault('return', function)
+        return type_hints
+
+    globalns = add_module_globals(function)
+    type_hints = {}
+    type_params: tuple[Any] = getattr(function, '__type_params__', ())  # type: ignore
+    for name, value in annotations.items():
+        if include_keys is not None and name not in include_keys:
+            continue
+        if value is None:
+            value = NoneType
+        elif isinstance(value, str):
+            value = _make_forward_ref(value)
+
+        type_hints[name] = eval_type_backport(value, globalns, types_namespace, type_params)
+
+    return type_hints


 if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):

-    def _make_forward_ref(arg: Any, is_argument: bool=True, *, is_class:
-        bool=False) ->typing.ForwardRef:
+    def _make_forward_ref(
+        arg: Any,
+        is_argument: bool = True,
+        *,
+        is_class: bool = False,
+    ) -> typing.ForwardRef:
         """Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions.
         The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below.

@@ -144,11 +340,15 @@ if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):

         Implemented as EAFP with memory.
         """
-        pass
+        return typing.ForwardRef(arg, is_argument)
+
 else:
     _make_forward_ref = typing.ForwardRef
+
+
 if sys.version_info >= (3, 10):
     get_type_hints = typing.get_type_hints
+
 else:
     """
     For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to
@@ -156,9 +356,12 @@ else:
     """

     @typing.no_type_check
-    def get_type_hints(obj: Any, globalns: (dict[str, Any] | None)=None,
-        localns: (dict[str, Any] | None)=None, include_extras: bool=False
-        ) ->dict[str, Any]:
+    def get_type_hints(  # noqa: C901
+        obj: Any,
+        globalns: dict[str, Any] | None = None,
+        localns: dict[str, Any] | None = None,
+        include_extras: bool = False,
+    ) -> dict[str, Any]:  # pragma: no cover
         """Taken verbatim from python 3.10.8 unchanged, except:
         * type annotations of the function definition above.
         * prefixing `typing.` where appropriate
@@ -200,10 +403,106 @@ else:
         - If two dict arguments are passed, they specify globals and
           locals, respectively.
         """
-        pass
+        if getattr(obj, '__no_type_check__', None):
+            return {}
+        # Classes require a special treatment.
+        if isinstance(obj, type):
+            hints = {}
+            for base in reversed(obj.__mro__):
+                if globalns is None:
+                    base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
+                else:
+                    base_globals = globalns
+                ann = base.__dict__.get('__annotations__', {})
+                if isinstance(ann, types.GetSetDescriptorType):
+                    ann = {}
+                base_locals = dict(vars(base)) if localns is None else localns
+                if localns is None and globalns is None:
+                    # This is surprising, but required.  Before Python 3.10,
+                    # get_type_hints only evaluated the globalns of
+                    # a class.  To maintain backwards compatibility, we reverse
+                    # the globalns and localns order so that eval() looks into
+                    # *base_globals* first rather than *base_locals*.
+                    # This only affects ForwardRefs.
+                    base_globals, base_locals = base_locals, base_globals
+                for name, value in ann.items():
+                    if value is None:
+                        value = type(None)
+                    if isinstance(value, str):
+                        value = _make_forward_ref(value, is_argument=False, is_class=True)
+
+                    value = eval_type_backport(value, base_globals, base_locals)
+                    hints[name] = value
+            if not include_extras and hasattr(typing, '_strip_annotations'):
+                return {
+                    k: typing._strip_annotations(t)  # type: ignore
+                    for k, t in hints.items()
+                }
+            else:
+                return hints
+
+        if globalns is None:
+            if isinstance(obj, types.ModuleType):
+                globalns = obj.__dict__
+            else:
+                nsobj = obj
+                # Find globalns for the unwrapped object.
+                while hasattr(nsobj, '__wrapped__'):
+                    nsobj = nsobj.__wrapped__
+                globalns = getattr(nsobj, '__globals__', {})
+            if localns is None:
+                localns = globalns
+        elif localns is None:
+            localns = globalns
+        hints = getattr(obj, '__annotations__', None)
+        if hints is None:
+            # Return empty annotations for something that _could_ have them.
+            if isinstance(obj, typing._allowed_types):  # type: ignore
+                return {}
+            else:
+                raise TypeError(f'{obj!r} is not a module, class, method, ' 'or function.')
+        defaults = typing._get_defaults(obj)  # type: ignore
+        hints = dict(hints)
+        for name, value in hints.items():
+            if value is None:
+                value = type(None)
+            if isinstance(value, str):
+                # class-level forward refs were handled above, this must be either
+                # a module-level annotation or a function argument annotation
+
+                value = _make_forward_ref(
+                    value,
+                    is_argument=not isinstance(obj, types.ModuleType),
+                    is_class=False,
+                )
+            value = eval_type_backport(value, globalns, localns)
+            if name in defaults and defaults[name] is None:
+                value = typing.Optional[value]
+            hints[name] = value
+        return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()}  # type: ignore
+
+
+def is_dataclass(_cls: type[Any]) -> TypeGuard[type[StandardDataclass]]:
+    # The dataclasses.is_dataclass function doesn't seem to provide TypeGuard functionality,
+    # so I created this convenience function
+    return dataclasses.is_dataclass(_cls)
+
+
+def origin_is_type_alias_type(origin: Any) -> TypeGuard[TypeAliasType]:
+    return isinstance(origin, TypeAliasType)
+
+
 if sys.version_info >= (3, 10):

+    def is_generic_alias(type_: type[Any]) -> bool:
+        return isinstance(type_, (types.GenericAlias, typing._GenericAlias))  # type: ignore[attr-defined]
+
+else:
+
+    def is_generic_alias(type_: type[Any]) -> bool:
+        return isinstance(type_, typing._GenericAlias)  # type: ignore
+

-def is_self_type(tp: Any) ->bool:
+def is_self_type(tp: Any) -> bool:
     """Check if a given class is a Self type (from `typing` or `typing_extensions`)"""
-    pass
+    return isinstance(tp, typing_base) and getattr(tp, '_name', None) == 'Self'
diff --git a/pydantic/_internal/_utils.py b/pydantic/_internal/_utils.py
index 5353dcfc4..de19243ae 100644
--- a/pydantic/_internal/_utils.py
+++ b/pydantic/_internal/_utils.py
@@ -2,7 +2,9 @@

 This should be reduced as much as possible with functions only used in one place, moved to that place.
 """
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import keyword
 import typing
@@ -12,84 +14,168 @@ from copy import deepcopy
 from itertools import zip_longest
 from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
 from typing import Any, Mapping, TypeVar
+
 from typing_extensions import TypeAlias, TypeGuard
+
 from . import _repr, _typing_extra
+
 if typing.TYPE_CHECKING:
-    MappingIntStrAny: TypeAlias = (
-        'typing.Mapping[int, Any] | typing.Mapping[str, Any]')
-    AbstractSetIntStr: TypeAlias = (
-        'typing.AbstractSet[int] | typing.AbstractSet[str]')
+    MappingIntStrAny: TypeAlias = 'typing.Mapping[int, Any] | typing.Mapping[str, Any]'
+    AbstractSetIntStr: TypeAlias = 'typing.AbstractSet[int] | typing.AbstractSet[str]'
     from ..main import BaseModel
-IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = {int, float, complex, str,
-    bool, bytes, type, _typing_extra.NoneType, FunctionType,
-    BuiltinFunctionType, LambdaType, weakref.ref, CodeType, ModuleType,
-    NotImplemented.__class__, Ellipsis.__class__}
-BUILTIN_COLLECTIONS: set[type[Any]] = {list, set, tuple, frozenset, dict,
-    OrderedDict, defaultdict, deque}


-def is_model_class(cls: Any) ->TypeGuard[type[BaseModel]]:
+# these are types that are returned unchanged by deepcopy
+IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = {
+    int,
+    float,
+    complex,
+    str,
+    bool,
+    bytes,
+    type,
+    _typing_extra.NoneType,
+    FunctionType,
+    BuiltinFunctionType,
+    LambdaType,
+    weakref.ref,
+    CodeType,
+    # note: including ModuleType will differ from behaviour of deepcopy by not producing error.
+    # It might be not a good idea in general, but considering that this function used only internally
+    # against default values of fields, this will allow to actually have a field with module as default value
+    ModuleType,
+    NotImplemented.__class__,
+    Ellipsis.__class__,
+}
+
+# these are types that if empty, might be copied with simple copy() instead of deepcopy()
+BUILTIN_COLLECTIONS: set[type[Any]] = {
+    list,
+    set,
+    tuple,
+    frozenset,
+    dict,
+    OrderedDict,
+    defaultdict,
+    deque,
+}
+
+
+def sequence_like(v: Any) -> bool:
+    return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
+
+
+def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool:  # pragma: no cover
+    try:
+        return isinstance(o, class_or_tuple)  # type: ignore[arg-type]
+    except TypeError:
+        return False
+
+
+def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool:  # pragma: no cover
+    try:
+        return isinstance(cls, type) and issubclass(cls, class_or_tuple)
+    except TypeError:
+        if isinstance(cls, _typing_extra.WithArgsTypes):
+            return False
+        raise  # pragma: no cover
+
+
+def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]:
     """Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking,
     unlike raw calls to lenient_issubclass.
     """
-    pass
+    from ..main import BaseModel
+
+    return lenient_issubclass(cls, BaseModel) and cls is not BaseModel


-def is_valid_identifier(identifier: str) ->bool:
+def is_valid_identifier(identifier: str) -> bool:
     """Checks that a string is a valid identifier and not a Python keyword.
     :param identifier: The identifier to test.
     :return: True if the identifier is valid.
     """
-    pass
+    return identifier.isidentifier() and not keyword.iskeyword(identifier)


 KeyType = TypeVar('KeyType')
+
+
+def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]:
+    updated_mapping = mapping.copy()
+    for updating_mapping in updating_mappings:
+        for k, v in updating_mapping.items():
+            if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
+                updated_mapping[k] = deep_update(updated_mapping[k], v)
+            else:
+                updated_mapping[k] = v
+    return updated_mapping
+
+
+def update_not_none(mapping: dict[Any, Any], **update: Any) -> None:
+    mapping.update({k: v for k, v in update.items() if v is not None})
+
+
 T = TypeVar('T')


-def unique_list(input_list: (list[T] | tuple[T, ...]), *, name_factory:
-    typing.Callable[[T], str]=str) ->list[T]:
+def unique_list(
+    input_list: list[T] | tuple[T, ...],
+    *,
+    name_factory: typing.Callable[[T], str] = str,
+) -> list[T]:
     """Make a list unique while maintaining order.
     We update the list if another one with the same name is set
     (e.g. model validator overridden in subclass).
     """
-    pass
+    result: list[T] = []
+    result_names: list[str] = []
+    for v in input_list:
+        v_name = name_factory(v)
+        if v_name not in result_names:
+            result_names.append(v_name)
+            result.append(v)
+        else:
+            result[result_names.index(v_name)] = v
+
+    return result


 class ValueItems(_repr.Representation):
     """Class for more convenient calculation of excluded or included fields on values."""
-    __slots__ = '_items', '_type'

-    def __init__(self, value: Any, items: (AbstractSetIntStr |
-        MappingIntStrAny)) ->None:
+    __slots__ = ('_items', '_type')
+
+    def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None:
         items = self._coerce_items(items)
+
         if isinstance(value, (list, tuple)):
-            items = self._normalize_indexes(items, len(value))
-        self._items: MappingIntStrAny = items
+            items = self._normalize_indexes(items, len(value))  # type: ignore

-    def is_excluded(self, item: Any) ->bool:
+        self._items: MappingIntStrAny = items  # type: ignore
+
+    def is_excluded(self, item: Any) -> bool:
         """Check if item is fully excluded.

         :param item: key or index of a value
         """
-        pass
+        return self.is_true(self._items.get(item))

-    def is_included(self, item: Any) ->bool:
+    def is_included(self, item: Any) -> bool:
         """Check if value is contained in self._items.

         :param item: key or index of value
         """
-        pass
+        return item in self._items

-    def for_element(self, e: (int | str)) ->(AbstractSetIntStr |
-        MappingIntStrAny | None):
+    def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None:
         """:param e: key or index of element on value
         :return: raw values for element if self._items is dict and contain needed element
         """
-        pass
+        item = self._items.get(e)  # type: ignore
+        return item if not self.is_true(item) else None

-    def _normalize_indexes(self, items: MappingIntStrAny, v_length: int
-        ) ->dict[int | str, Any]:
+    def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]:
         """:param items: dict or set of indexes which will be normalized
         :param v_length: length of sequence indexes of which will be

@@ -98,10 +184,36 @@ class ValueItems(_repr.Representation):
         >>> self._normalize_indexes({'__all__': True}, 4)
         {0: True, 1: True, 2: True, 3: True}
         """
-        pass
+        normalized_items: dict[int | str, Any] = {}
+        all_items = None
+        for i, v in items.items():
+            if not (isinstance(v, typing.Mapping) or isinstance(v, typing.AbstractSet) or self.is_true(v)):
+                raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
+            if i == '__all__':
+                all_items = self._coerce_value(v)
+                continue
+            if not isinstance(i, int):
+                raise TypeError(
+                    'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
+                    'expected integer keys or keyword "__all__"'
+                )
+            normalized_i = v_length + i if i < 0 else i
+            normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
+
+        if not all_items:
+            return normalized_items
+        if self.is_true(all_items):
+            for i in range(v_length):
+                normalized_items.setdefault(i, ...)
+            return normalized_items
+        for i in range(v_length):
+            normalized_item = normalized_items.setdefault(i, {})
+            if not self.is_true(normalized_item):
+                normalized_items[i] = self.merge(all_items, normalized_item)
+        return normalized_items

     @classmethod
-    def merge(cls, base: Any, override: Any, intersect: bool=False) ->Any:
+    def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
         """Merge a `base` item with an `override` item.

         Both `base` and `override` are converted to dictionaries if possible.
@@ -115,45 +227,101 @@ class ValueItems(_repr.Representation):
         set to `False` (default) and on the intersection of keys if
         `intersect` is set to `True`.
         """
-        pass
+        override = cls._coerce_value(override)
+        base = cls._coerce_value(base)
+        if override is None:
+            return base
+        if cls.is_true(base) or base is None:
+            return override
+        if cls.is_true(override):
+            return base if intersect else override
+
+        # intersection or union of keys while preserving ordering:
+        if intersect:
+            merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
+        else:
+            merge_keys = list(base) + [k for k in override if k not in base]
+
+        merged: dict[int | str, Any] = {}
+        for k in merge_keys:
+            merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
+            if merged_item is not None:
+                merged[k] = merged_item
+
+        return merged
+
+    @staticmethod
+    def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny:
+        if isinstance(items, typing.Mapping):
+            pass
+        elif isinstance(items, typing.AbstractSet):
+            items = dict.fromkeys(items, ...)  # type: ignore
+        else:
+            class_name = getattr(items, '__class__', '???')
+            raise TypeError(f'Unexpected type of exclude value {class_name}')
+        return items  # type: ignore

-    def __repr_args__(self) ->_repr.ReprArgs:
+    @classmethod
+    def _coerce_value(cls, value: Any) -> Any:
+        if value is None or cls.is_true(value):
+            return value
+        return cls._coerce_items(value)
+
+    @staticmethod
+    def is_true(v: Any) -> bool:
+        return v is True or v is ...
+
+    def __repr_args__(self) -> _repr.ReprArgs:
         return [(None, self._items)]


 if typing.TYPE_CHECKING:
-else:

+    def ClassAttribute(name: str, value: T) -> T: ...
+
+else:

     class ClassAttribute:
         """Hide class attribute from its instances."""
+
         __slots__ = 'name', 'value'

-        def __init__(self, name: str, value: Any) ->None:
+        def __init__(self, name: str, value: Any) -> None:
             self.name = name
             self.value = value

-        def __get__(self, instance: Any, owner: type[Any]) ->None:
+        def __get__(self, instance: Any, owner: type[Any]) -> None:
             if instance is None:
                 return self.value
-            raise AttributeError(
-                f'{self.name!r} attribute of {owner.__name__!r} is class-only')
+            raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
+
+
 Obj = TypeVar('Obj')


-def smart_deepcopy(obj: Obj) ->Obj:
+def smart_deepcopy(obj: Obj) -> Obj:
     """Return type as is for immutable built-in types
     Use obj.copy() for built-in empty collections
     Use copy.deepcopy() for non-empty collections and unknown objects.
     """
-    pass
+    obj_type = obj.__class__
+    if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
+        return obj  # fastest case: obj is immutable and not collection therefore will not be copied anyway
+    try:
+        if not obj and obj_type in BUILTIN_COLLECTIONS:
+            # faster way for empty collections, no need to copy its members
+            return obj if obj_type is tuple else obj.copy()  # tuple doesn't have copy method  # type: ignore
+    except (TypeError, ValueError, RuntimeError):
+        # do we really dare to catch ALL errors? Seems a bit risky
+        pass
+
+    return deepcopy(obj)  # slowest way when we actually might need a deepcopy


 _SENTINEL = object()


-def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]
-    ) ->bool:
+def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]) -> bool:
     """Check that the items of `left` are the same objects as those in `right`.

     >>> a, b = object(), object()
@@ -162,7 +330,10 @@ def all_identical(left: typing.Iterable[Any], right: typing.Iterable[Any]
     >>> all_identical([a, b, [a]], [a, b, [a]])  # new list object, while "equal" is not "identical"
     False
     """
-    pass
+    for left_item, right_item in zip_longest(left, right, fillvalue=_SENTINEL):
+        if left_item is not right_item:
+            return False
+    return True


 @dataclasses.dataclass(frozen=True)
@@ -171,12 +342,21 @@ class SafeGetItemProxy:

     This makes is safe to use in `operator.itemgetter` when some keys may be missing
     """
-    __slots__ = 'wrapped',
+
+    # Define __slots__manually for performances
+    # @dataclasses.dataclass() only support slots=True in python>=3.10
+    __slots__ = ('wrapped',)
+
     wrapped: Mapping[str, Any]

-    def __getitem__(self, key: str, /) ->Any:
+    def __getitem__(self, key: str, /) -> Any:
         return self.wrapped.get(key, _SENTINEL)
+
+    # required to pass the object to operator.itemgetter() instances due to a quirk of typeshed
+    # https://github.com/python/mypy/issues/13713
+    # https://github.com/python/typeshed/pull/8785
+    # Since this is typing-only, hide it in a typing.TYPE_CHECKING block
     if typing.TYPE_CHECKING:

-        def __contains__(self, key: str, /) ->bool:
+        def __contains__(self, key: str, /) -> bool:
             return self.wrapped.__contains__(key)
diff --git a/pydantic/_internal/_validate_call.py b/pydantic/_internal/_validate_call.py
index 0c9c2fbee..3fae2d10e 100644
--- a/pydantic/_internal/_validate_call.py
+++ b/pydantic/_internal/_validate_call.py
@@ -1,8 +1,11 @@
 from __future__ import annotations as _annotations
+
 import inspect
 from functools import partial
 from typing import Any, Awaitable, Callable
+
 import pydantic_core
+
 from ..config import ConfigDict
 from ..plugin._schema_validator import create_schema_validator
 from . import _generate_schema, _typing_extra
@@ -11,11 +14,22 @@ from ._config import ConfigWrapper

 class ValidateCallWrapper:
     """This is a wrapper around a function that validates the arguments passed to it, and optionally the return value."""
-    __slots__ = ('__pydantic_validator__', '__name__', '__qualname__',
-        '__annotations__', '__dict__')

-    def __init__(self, function: Callable[..., Any], config: (ConfigDict |
-        None), validate_return: bool, namespace: (dict[str, Any] | None)):
+    __slots__ = (
+        '__pydantic_validator__',
+        '__name__',
+        '__qualname__',
+        '__annotations__',
+        '__dict__',  # required for __module__
+    )
+
+    def __init__(
+        self,
+        function: Callable[..., Any],
+        config: ConfigDict | None,
+        validate_return: bool,
+        namespace: dict[str, Any] | None,
+    ):
         if isinstance(function, partial):
             func = function.func
             schema_type = func
@@ -27,41 +41,59 @@ class ValidateCallWrapper:
             self.__name__ = function.__name__
             self.__qualname__ = function.__qualname__
             self.__module__ = function.__module__
+
         global_ns = _typing_extra.add_module_globals(function, None)
+        # TODO: this is a bit of a hack, we should probably have a better way to handle this
+        # specifically, we shouldn't be pumping the namespace full of type_params
+        # when we take namespace and type_params arguments in eval_type_backport
         type_params = getattr(schema_type, '__type_params__', ())
-        namespace = {**{param.__name__: param for param in type_params}, **
-            global_ns or {}, **namespace or {}}
+        namespace = {
+            **{param.__name__: param for param in type_params},
+            **(global_ns or {}),
+            **(namespace or {}),
+        }
         config_wrapper = ConfigWrapper(config)
         gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
         schema = gen_schema.clean_schema(gen_schema.generate_schema(function))
         core_config = config_wrapper.core_config(self)
-        self.__pydantic_validator__ = create_schema_validator(schema,
-            schema_type, self.__module__, self.__qualname__,
-            'validate_call', core_config, config_wrapper.plugin_settings)
+
+        self.__pydantic_validator__ = create_schema_validator(
+            schema,
+            schema_type,
+            self.__module__,
+            self.__qualname__,
+            'validate_call',
+            core_config,
+            config_wrapper.plugin_settings,
+        )
+
         if validate_return:
             signature = inspect.signature(function)
-            return_type = (signature.return_annotation if signature.
-                return_annotation is not signature.empty else Any)
-            gen_schema = _generate_schema.GenerateSchema(config_wrapper,
-                namespace)
-            schema = gen_schema.clean_schema(gen_schema.generate_schema(
-                return_type))
-            validator = create_schema_validator(schema, schema_type, self.
-                __module__, self.__qualname__, 'validate_call', core_config,
-                config_wrapper.plugin_settings)
+            return_type = signature.return_annotation if signature.return_annotation is not signature.empty else Any
+            gen_schema = _generate_schema.GenerateSchema(config_wrapper, namespace)
+            schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type))
+            validator = create_schema_validator(
+                schema,
+                schema_type,
+                self.__module__,
+                self.__qualname__,
+                'validate_call',
+                core_config,
+                config_wrapper.plugin_settings,
+            )
             if inspect.iscoroutinefunction(function):

-                async def return_val_wrapper(aw: Awaitable[Any]) ->None:
+                async def return_val_wrapper(aw: Awaitable[Any]) -> None:
                     return validator.validate_python(await aw)
+
                 self.__return_pydantic_validator__ = return_val_wrapper
             else:
                 self.__return_pydantic_validator__ = validator.validate_python
         else:
             self.__return_pydantic_validator__ = None

-    def __call__(self, *args: Any, **kwargs: Any) ->Any:
-        res = self.__pydantic_validator__.validate_python(pydantic_core.
-            ArgsKwargs(args, kwargs))
+    def __call__(self, *args: Any, **kwargs: Any) -> Any:
+        res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs))
         if self.__return_pydantic_validator__:
             return self.__return_pydantic_validator__(res)
         return res
diff --git a/pydantic/_internal/_validators.py b/pydantic/_internal/_validators.py
index 2f7d67f47..870b536e3 100644
--- a/pydantic/_internal/_validators.py
+++ b/pydantic/_internal/_validators.py
@@ -2,23 +2,71 @@

 Import of this module is deferred since it contains imports of many standard library modules.
 """
+
 from __future__ import annotations as _annotations
+
 import math
 import re
 import typing
 from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
 from typing import Any, Callable
+
 from pydantic_core import PydanticCustomError, core_schema
 from pydantic_core._pydantic_core import PydanticKnownError


-def sequence_validator(input_value: typing.Sequence[Any], /, validator:
-    core_schema.ValidatorFunctionWrapHandler) ->typing.Sequence[Any]:
+def sequence_validator(
+    input_value: typing.Sequence[Any],
+    /,
+    validator: core_schema.ValidatorFunctionWrapHandler,
+) -> typing.Sequence[Any]:
     """Validator for `Sequence` types, isinstance(v, Sequence) has already been called."""
-    pass
+    value_type = type(input_value)
+
+    # We don't accept any plain string as a sequence
+    # Relevant issue: https://github.com/pydantic/pydantic/issues/5595
+    if issubclass(value_type, (str, bytes)):
+        raise PydanticCustomError(
+            'sequence_str',
+            "'{type_name}' instances are not allowed as a Sequence value",
+            {'type_name': value_type.__name__},
+        )
+
+    # TODO: refactor sequence validation to validate with either a list or a tuple
+    # schema, depending on the type of the value.
+    # Additionally, we should be able to remove one of either this validator or the
+    # SequenceValidator in _std_types_schema.py (preferably this one, while porting over some logic).
+    # Effectively, a refactor for sequence validation is needed.
+    if value_type is tuple:
+        input_value = list(input_value)
+
+    v_list = validator(input_value)
+
+    # the rest of the logic is just re-creating the original type from `v_list`
+    if value_type is list:
+        return v_list
+    elif issubclass(value_type, range):
+        # return the list as we probably can't re-create the range
+        return v_list
+    elif value_type is tuple:
+        return tuple(v_list)
+    else:
+        # best guess at how to re-create the original type, more custom construction logic might be required
+        return value_type(v_list)  # type: ignore[call-arg]


-def _import_string_logic(dotted_path: str) ->Any:
+def import_string(value: Any) -> Any:
+    if isinstance(value, str):
+        try:
+            return _import_string_logic(value)
+        except ImportError as e:
+            raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e
+    else:
+        # otherwise we just return the value and let the next validator do the rest of the work
+        return value
+
+
+def _import_string_logic(dotted_path: str) -> Any:
     """Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module.
     (This is necessary to distinguish between a submodule and an attribute when there is a conflict.).

@@ -36,37 +84,225 @@ def _import_string_logic(dotted_path: str) ->Any:
     * the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping')
     * the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123')
     """
-    pass
+    from importlib import import_module
+
+    components = dotted_path.strip().split(':')
+    if len(components) > 2:
+        raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}")
+
+    module_path = components[0]
+    if not module_path:
+        raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}')
+
+    try:
+        module = import_module(module_path)
+    except ModuleNotFoundError as e:
+        if '.' in module_path:
+            # Check if it would be valid if the final item was separated from its module with a `:`
+            maybe_module_path, maybe_attribute = dotted_path.strip().rsplit('.', 1)
+            try:
+                return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}')
+            except ImportError:
+                pass
+            raise ImportError(f'No module named {module_path!r}') from e
+        raise e
+
+    if len(components) > 1:
+        attribute = components[1]
+        try:
+            return getattr(module, attribute)
+        except AttributeError as e:
+            raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e
+    else:
+        return module
+
+
+def pattern_either_validator(input_value: Any, /) -> typing.Pattern[Any]:
+    if isinstance(input_value, typing.Pattern):
+        return input_value
+    elif isinstance(input_value, (str, bytes)):
+        # todo strict mode
+        return compile_pattern(input_value)  # type: ignore
+    else:
+        raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
+
+
+def pattern_str_validator(input_value: Any, /) -> typing.Pattern[str]:
+    if isinstance(input_value, typing.Pattern):
+        if isinstance(input_value.pattern, str):
+            return input_value
+        else:
+            raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
+    elif isinstance(input_value, str):
+        return compile_pattern(input_value)
+    elif isinstance(input_value, bytes):
+        raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern')
+    else:
+        raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')
+
+
+def pattern_bytes_validator(input_value: Any, /) -> typing.Pattern[bytes]:
+    if isinstance(input_value, typing.Pattern):
+        if isinstance(input_value.pattern, bytes):
+            return input_value
+        else:
+            raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
+    elif isinstance(input_value, bytes):
+        return compile_pattern(input_value)
+    elif isinstance(input_value, str):
+        raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern')
+    else:
+        raise PydanticCustomError('pattern_type', 'Input should be a valid pattern')


 PatternType = typing.TypeVar('PatternType', str, bytes)


-def ip_v4_network_validator(input_value: Any, /) ->IPv4Network:
+def compile_pattern(pattern: PatternType) -> typing.Pattern[PatternType]:
+    try:
+        return re.compile(pattern)
+    except re.error:
+        raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression')
+
+
+def ip_v4_address_validator(input_value: Any, /) -> IPv4Address:
+    if isinstance(input_value, IPv4Address):
+        return input_value
+
+    try:
+        return IPv4Address(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address')
+
+
+def ip_v6_address_validator(input_value: Any, /) -> IPv6Address:
+    if isinstance(input_value, IPv6Address):
+        return input_value
+
+    try:
+        return IPv6Address(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address')
+
+
+def ip_v4_network_validator(input_value: Any, /) -> IPv4Network:
     """Assume IPv4Network initialised with a default `strict` argument.

     See more:
     https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
     """
-    pass
+    if isinstance(input_value, IPv4Network):
+        return input_value
+
+    try:
+        return IPv4Network(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network')


-def ip_v6_network_validator(input_value: Any, /) ->IPv6Network:
+def ip_v6_network_validator(input_value: Any, /) -> IPv6Network:
     """Assume IPv6Network initialised with a default `strict` argument.

     See more:
     https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
     """
-    pass
+    if isinstance(input_value, IPv6Network):
+        return input_value
+
+    try:
+        return IPv6Network(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network')
+
+
+def ip_v4_interface_validator(input_value: Any, /) -> IPv4Interface:
+    if isinstance(input_value, IPv4Interface):
+        return input_value
+
+    try:
+        return IPv4Interface(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface')
+
+
+def ip_v6_interface_validator(input_value: Any, /) -> IPv6Interface:
+    if isinstance(input_value, IPv6Interface):
+        return input_value
+
+    try:
+        return IPv6Interface(input_value)
+    except ValueError:
+        raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface')
+
+
+def greater_than_validator(x: Any, gt: Any) -> Any:
+    if not (x > gt):
+        raise PydanticKnownError('greater_than', {'gt': gt})
+    return x
+
+
+def greater_than_or_equal_validator(x: Any, ge: Any) -> Any:
+    if not (x >= ge):
+        raise PydanticKnownError('greater_than_equal', {'ge': ge})
+    return x
+
+
+def less_than_validator(x: Any, lt: Any) -> Any:
+    if not (x < lt):
+        raise PydanticKnownError('less_than', {'lt': lt})
+    return x
+
+
+def less_than_or_equal_validator(x: Any, le: Any) -> Any:
+    if not (x <= le):
+        raise PydanticKnownError('less_than_equal', {'le': le})
+    return x
+
+
+def multiple_of_validator(x: Any, multiple_of: Any) -> Any:
+    if not (x % multiple_of == 0):
+        raise PydanticKnownError('multiple_of', {'multiple_of': multiple_of})
+    return x
+
+
+def min_length_validator(x: Any, min_length: Any) -> Any:
+    if not (len(x) >= min_length):
+        raise PydanticKnownError(
+            'too_short',
+            {'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)},
+        )
+    return x
+
+
+def max_length_validator(x: Any, max_length: Any) -> Any:
+    if len(x) > max_length:
+        raise PydanticKnownError(
+            'too_long',
+            {'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)},
+        )
+    return x
+
+
+def forbid_inf_nan_check(x: Any) -> Any:
+    if not math.isfinite(x):
+        raise PydanticKnownError('finite_number')
+    return x


-_CONSTRAINT_TO_VALIDATOR_MAP: dict[str, Callable] = {'gt':
-    greater_than_validator, 'ge': greater_than_or_equal_validator, 'lt':
-    less_than_validator, 'le': less_than_or_equal_validator, 'multiple_of':
-    multiple_of_validator, 'min_length': min_length_validator, 'max_length':
-    max_length_validator}
+_CONSTRAINT_TO_VALIDATOR_MAP: dict[str, Callable] = {
+    'gt': greater_than_validator,
+    'ge': greater_than_or_equal_validator,
+    'lt': less_than_validator,
+    'le': less_than_or_equal_validator,
+    'multiple_of': multiple_of_validator,
+    'min_length': min_length_validator,
+    'max_length': max_length_validator,
+}


-def get_constraint_validator(constraint: str) ->Callable:
+def get_constraint_validator(constraint: str) -> Callable:
     """Fetch the validator function for the given constraint."""
-    pass
+    try:
+        return _CONSTRAINT_TO_VALIDATOR_MAP[constraint]
+    except KeyError:
+        raise TypeError(f'Unknown constraint {constraint}')
diff --git a/pydantic/_migration.py b/pydantic/_migration.py
index ef115aae8..c8478a624 100644
--- a/pydantic/_migration.py
+++ b/pydantic/_migration.py
@@ -1,48 +1,71 @@
 import sys
 from typing import Any, Callable, Dict
+
 from .version import version_short
-MOVED_IN_V2 = {'pydantic.utils:version_info':
-    'pydantic.version:version_info',
+
+MOVED_IN_V2 = {
+    'pydantic.utils:version_info': 'pydantic.version:version_info',
     'pydantic.error_wrappers:ValidationError': 'pydantic:ValidationError',
     'pydantic.utils:to_camel': 'pydantic.alias_generators:to_pascal',
     'pydantic.utils:to_lower_camel': 'pydantic.alias_generators:to_camel',
     'pydantic:PyObject': 'pydantic.types:ImportString',
     'pydantic.types:PyObject': 'pydantic.types:ImportString',
-    'pydantic.generics:GenericModel': 'pydantic.BaseModel'}
-DEPRECATED_MOVED_IN_V2 = {'pydantic.tools:schema_of':
-    'pydantic.deprecated.tools:schema_of', 'pydantic.tools:parse_obj_as':
-    'pydantic.deprecated.tools:parse_obj_as',
-    'pydantic.tools:schema_json_of':
-    'pydantic.deprecated.tools:schema_json_of',
-    'pydantic.json:pydantic_encoder':
-    'pydantic.deprecated.json:pydantic_encoder',
-    'pydantic:validate_arguments':
-    'pydantic.deprecated.decorator:validate_arguments',
-    'pydantic.json:custom_pydantic_encoder':
-    'pydantic.deprecated.json:custom_pydantic_encoder',
-    'pydantic.json:timedelta_isoformat':
-    'pydantic.deprecated.json:timedelta_isoformat',
-    'pydantic.decorator:validate_arguments':
-    'pydantic.deprecated.decorator:validate_arguments',
-    'pydantic.class_validators:validator':
-    'pydantic.deprecated.class_validators:validator',
-    'pydantic.class_validators:root_validator':
-    'pydantic.deprecated.class_validators:root_validator',
+    'pydantic.generics:GenericModel': 'pydantic.BaseModel',
+}
+
+DEPRECATED_MOVED_IN_V2 = {
+    'pydantic.tools:schema_of': 'pydantic.deprecated.tools:schema_of',
+    'pydantic.tools:parse_obj_as': 'pydantic.deprecated.tools:parse_obj_as',
+    'pydantic.tools:schema_json_of': 'pydantic.deprecated.tools:schema_json_of',
+    'pydantic.json:pydantic_encoder': 'pydantic.deprecated.json:pydantic_encoder',
+    'pydantic:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments',
+    'pydantic.json:custom_pydantic_encoder': 'pydantic.deprecated.json:custom_pydantic_encoder',
+    'pydantic.json:timedelta_isoformat': 'pydantic.deprecated.json:timedelta_isoformat',
+    'pydantic.decorator:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments',
+    'pydantic.class_validators:validator': 'pydantic.deprecated.class_validators:validator',
+    'pydantic.class_validators:root_validator': 'pydantic.deprecated.class_validators:root_validator',
     'pydantic.config:BaseConfig': 'pydantic.deprecated.config:BaseConfig',
-    'pydantic.config:Extra': 'pydantic.deprecated.config:Extra'}
-REDIRECT_TO_V1 = {f'pydantic.utils:{obj}': f'pydantic.v1.utils:{obj}' for
-    obj in ('deep_update', 'GetterDict', 'lenient_issubclass',
-    'lenient_isinstance', 'is_valid_field', 'update_not_none',
-    'import_string', 'Representation', 'ROOT_KEY', 'smart_deepcopy',
-    'sequence_like')}
-REMOVED_IN_V2 = {'pydantic:ConstrainedBytes', 'pydantic:ConstrainedDate',
-    'pydantic:ConstrainedDecimal', 'pydantic:ConstrainedFloat',
-    'pydantic:ConstrainedFrozenSet', 'pydantic:ConstrainedInt',
-    'pydantic:ConstrainedList', 'pydantic:ConstrainedSet',
-    'pydantic:ConstrainedStr', 'pydantic:JsonWrapper', 'pydantic:NoneBytes',
-    'pydantic:NoneStr', 'pydantic:NoneStrBytes', 'pydantic:Protocol',
-    'pydantic:Required', 'pydantic:StrBytes', 'pydantic:compiled',
-    'pydantic.config:get_config', 'pydantic.config:inherit_config',
+    'pydantic.config:Extra': 'pydantic.deprecated.config:Extra',
+}
+
+REDIRECT_TO_V1 = {
+    f'pydantic.utils:{obj}': f'pydantic.v1.utils:{obj}'
+    for obj in (
+        'deep_update',
+        'GetterDict',
+        'lenient_issubclass',
+        'lenient_isinstance',
+        'is_valid_field',
+        'update_not_none',
+        'import_string',
+        'Representation',
+        'ROOT_KEY',
+        'smart_deepcopy',
+        'sequence_like',
+    )
+}
+
+
+REMOVED_IN_V2 = {
+    'pydantic:ConstrainedBytes',
+    'pydantic:ConstrainedDate',
+    'pydantic:ConstrainedDecimal',
+    'pydantic:ConstrainedFloat',
+    'pydantic:ConstrainedFrozenSet',
+    'pydantic:ConstrainedInt',
+    'pydantic:ConstrainedList',
+    'pydantic:ConstrainedSet',
+    'pydantic:ConstrainedStr',
+    'pydantic:JsonWrapper',
+    'pydantic:NoneBytes',
+    'pydantic:NoneStr',
+    'pydantic:NoneStrBytes',
+    'pydantic:Protocol',
+    'pydantic:Required',
+    'pydantic:StrBytes',
+    'pydantic:compiled',
+    'pydantic.config:get_config',
+    'pydantic.config:inherit_config',
     'pydantic.config:prepare_config',
     'pydantic:create_model_from_namedtuple',
     'pydantic:create_model_from_typeddict',
@@ -56,107 +79,174 @@ REMOVED_IN_V2 = {'pydantic:ConstrainedBytes', 'pydantic:ConstrainedDate',
     'pydantic.error_wrappers:ErrorWrapper',
     'pydantic.errors:AnyStrMaxLengthError',
     'pydantic.errors:AnyStrMinLengthError',
-    'pydantic.errors:ArbitraryTypeError', 'pydantic.errors:BoolError',
-    'pydantic.errors:BytesError', 'pydantic.errors:CallableError',
-    'pydantic.errors:ClassError', 'pydantic.errors:ColorError',
-    'pydantic.errors:ConfigError', 'pydantic.errors:DataclassTypeError',
-    'pydantic.errors:DateError', 'pydantic.errors:DateNotInTheFutureError',
+    'pydantic.errors:ArbitraryTypeError',
+    'pydantic.errors:BoolError',
+    'pydantic.errors:BytesError',
+    'pydantic.errors:CallableError',
+    'pydantic.errors:ClassError',
+    'pydantic.errors:ColorError',
+    'pydantic.errors:ConfigError',
+    'pydantic.errors:DataclassTypeError',
+    'pydantic.errors:DateError',
+    'pydantic.errors:DateNotInTheFutureError',
     'pydantic.errors:DateNotInThePastError',
-    'pydantic.errors:DateTimeError', 'pydantic.errors:DecimalError',
+    'pydantic.errors:DateTimeError',
+    'pydantic.errors:DecimalError',
     'pydantic.errors:DecimalIsNotFiniteError',
     'pydantic.errors:DecimalMaxDigitsError',
     'pydantic.errors:DecimalMaxPlacesError',
-    'pydantic.errors:DecimalWholeDigitsError', 'pydantic.errors:DictError',
-    'pydantic.errors:DurationError', 'pydantic.errors:EmailError',
-    'pydantic.errors:EnumError', 'pydantic.errors:EnumMemberError',
-    'pydantic.errors:ExtraError', 'pydantic.errors:FloatError',
+    'pydantic.errors:DecimalWholeDigitsError',
+    'pydantic.errors:DictError',
+    'pydantic.errors:DurationError',
+    'pydantic.errors:EmailError',
+    'pydantic.errors:EnumError',
+    'pydantic.errors:EnumMemberError',
+    'pydantic.errors:ExtraError',
+    'pydantic.errors:FloatError',
     'pydantic.errors:FrozenSetError',
     'pydantic.errors:FrozenSetMaxLengthError',
     'pydantic.errors:FrozenSetMinLengthError',
-    'pydantic.errors:HashableError', 'pydantic.errors:IPv4AddressError',
+    'pydantic.errors:HashableError',
+    'pydantic.errors:IPv4AddressError',
     'pydantic.errors:IPv4InterfaceError',
-    'pydantic.errors:IPv4NetworkError', 'pydantic.errors:IPv6AddressError',
+    'pydantic.errors:IPv4NetworkError',
+    'pydantic.errors:IPv6AddressError',
     'pydantic.errors:IPv6InterfaceError',
     'pydantic.errors:IPv6NetworkError',
     'pydantic.errors:IPvAnyAddressError',
     'pydantic.errors:IPvAnyInterfaceError',
-    'pydantic.errors:IPvAnyNetworkError', 'pydantic.errors:IntEnumError',
-    'pydantic.errors:IntegerError', 'pydantic.errors:InvalidByteSize',
+    'pydantic.errors:IPvAnyNetworkError',
+    'pydantic.errors:IntEnumError',
+    'pydantic.errors:IntegerError',
+    'pydantic.errors:InvalidByteSize',
     'pydantic.errors:InvalidByteSizeUnit',
     'pydantic.errors:InvalidDiscriminator',
-    'pydantic.errors:InvalidLengthForBrand', 'pydantic.errors:JsonError',
-    'pydantic.errors:JsonTypeError', 'pydantic.errors:ListError',
+    'pydantic.errors:InvalidLengthForBrand',
+    'pydantic.errors:JsonError',
+    'pydantic.errors:JsonTypeError',
+    'pydantic.errors:ListError',
     'pydantic.errors:ListMaxLengthError',
     'pydantic.errors:ListMinLengthError',
     'pydantic.errors:ListUniqueItemsError',
     'pydantic.errors:LuhnValidationError',
-    'pydantic.errors:MissingDiscriminator', 'pydantic.errors:MissingError',
+    'pydantic.errors:MissingDiscriminator',
+    'pydantic.errors:MissingError',
     'pydantic.errors:NoneIsAllowedError',
     'pydantic.errors:NoneIsNotAllowedError',
-    'pydantic.errors:NotDigitError', 'pydantic.errors:NotNoneError',
-    'pydantic.errors:NumberNotGeError', 'pydantic.errors:NumberNotGtError',
-    'pydantic.errors:NumberNotLeError', 'pydantic.errors:NumberNotLtError',
-    'pydantic.errors:NumberNotMultipleError', 'pydantic.errors:PathError',
+    'pydantic.errors:NotDigitError',
+    'pydantic.errors:NotNoneError',
+    'pydantic.errors:NumberNotGeError',
+    'pydantic.errors:NumberNotGtError',
+    'pydantic.errors:NumberNotLeError',
+    'pydantic.errors:NumberNotLtError',
+    'pydantic.errors:NumberNotMultipleError',
+    'pydantic.errors:PathError',
     'pydantic.errors:PathNotADirectoryError',
     'pydantic.errors:PathNotAFileError',
-    'pydantic.errors:PathNotExistsError', 'pydantic.errors:PatternError',
-    'pydantic.errors:PyObjectError', 'pydantic.errors:PydanticTypeError',
-    'pydantic.errors:PydanticValueError', 'pydantic.errors:SequenceError',
-    'pydantic.errors:SetError', 'pydantic.errors:SetMaxLengthError',
-    'pydantic.errors:SetMinLengthError', 'pydantic.errors:StrError',
-    'pydantic.errors:StrRegexError', 'pydantic.errors:StrictBoolError',
-    'pydantic.errors:SubclassError', 'pydantic.errors:TimeError',
-    'pydantic.errors:TupleError', 'pydantic.errors:TupleLengthError',
-    'pydantic.errors:UUIDError', 'pydantic.errors:UUIDVersionError',
-    'pydantic.errors:UrlError', 'pydantic.errors:UrlExtraError',
-    'pydantic.errors:UrlHostError', 'pydantic.errors:UrlHostTldError',
-    'pydantic.errors:UrlPortError', 'pydantic.errors:UrlSchemeError',
+    'pydantic.errors:PathNotExistsError',
+    'pydantic.errors:PatternError',
+    'pydantic.errors:PyObjectError',
+    'pydantic.errors:PydanticTypeError',
+    'pydantic.errors:PydanticValueError',
+    'pydantic.errors:SequenceError',
+    'pydantic.errors:SetError',
+    'pydantic.errors:SetMaxLengthError',
+    'pydantic.errors:SetMinLengthError',
+    'pydantic.errors:StrError',
+    'pydantic.errors:StrRegexError',
+    'pydantic.errors:StrictBoolError',
+    'pydantic.errors:SubclassError',
+    'pydantic.errors:TimeError',
+    'pydantic.errors:TupleError',
+    'pydantic.errors:TupleLengthError',
+    'pydantic.errors:UUIDError',
+    'pydantic.errors:UUIDVersionError',
+    'pydantic.errors:UrlError',
+    'pydantic.errors:UrlExtraError',
+    'pydantic.errors:UrlHostError',
+    'pydantic.errors:UrlHostTldError',
+    'pydantic.errors:UrlPortError',
+    'pydantic.errors:UrlSchemeError',
     'pydantic.errors:UrlSchemePermittedError',
     'pydantic.errors:UrlUserInfoError',
-    'pydantic.errors:WrongConstantError', 'pydantic.main:validate_model',
-    'pydantic.networks:stricturl', 'pydantic:parse_file_as',
-    'pydantic:parse_raw_as', 'pydantic:stricturl',
-    'pydantic.tools:parse_file_as', 'pydantic.tools:parse_raw_as',
-    'pydantic.types:ConstrainedBytes', 'pydantic.types:ConstrainedDate',
-    'pydantic.types:ConstrainedDecimal', 'pydantic.types:ConstrainedFloat',
-    'pydantic.types:ConstrainedFrozenSet', 'pydantic.types:ConstrainedInt',
-    'pydantic.types:ConstrainedList', 'pydantic.types:ConstrainedSet',
-    'pydantic.types:ConstrainedStr', 'pydantic.types:JsonWrapper',
-    'pydantic.types:NoneBytes', 'pydantic.types:NoneStr',
-    'pydantic.types:NoneStrBytes', 'pydantic.types:StrBytes',
+    'pydantic.errors:WrongConstantError',
+    'pydantic.main:validate_model',
+    'pydantic.networks:stricturl',
+    'pydantic:parse_file_as',
+    'pydantic:parse_raw_as',
+    'pydantic:stricturl',
+    'pydantic.tools:parse_file_as',
+    'pydantic.tools:parse_raw_as',
+    'pydantic.types:ConstrainedBytes',
+    'pydantic.types:ConstrainedDate',
+    'pydantic.types:ConstrainedDecimal',
+    'pydantic.types:ConstrainedFloat',
+    'pydantic.types:ConstrainedFrozenSet',
+    'pydantic.types:ConstrainedInt',
+    'pydantic.types:ConstrainedList',
+    'pydantic.types:ConstrainedSet',
+    'pydantic.types:ConstrainedStr',
+    'pydantic.types:JsonWrapper',
+    'pydantic.types:NoneBytes',
+    'pydantic.types:NoneStr',
+    'pydantic.types:NoneStrBytes',
+    'pydantic.types:StrBytes',
     'pydantic.typing:evaluate_forwardref',
-    'pydantic.typing:AbstractSetIntStr', 'pydantic.typing:AnyCallable',
-    'pydantic.typing:AnyClassMethod', 'pydantic.typing:CallableGenerator',
-    'pydantic.typing:DictAny', 'pydantic.typing:DictIntStrAny',
-    'pydantic.typing:DictStrAny', 'pydantic.typing:IntStr',
-    'pydantic.typing:ListStr', 'pydantic.typing:MappingIntStrAny',
-    'pydantic.typing:NoArgAnyCallable', 'pydantic.typing:NoneType',
-    'pydantic.typing:ReprArgs', 'pydantic.typing:SetStr',
-    'pydantic.typing:StrPath', 'pydantic.typing:TupleGenerator',
-    'pydantic.typing:WithArgsTypes', 'pydantic.typing:all_literal_values',
-    'pydantic.typing:display_as_type', 'pydantic.typing:get_all_type_hints',
-    'pydantic.typing:get_args', 'pydantic.typing:get_origin',
-    'pydantic.typing:get_sub_types', 'pydantic.typing:is_callable_type',
-    'pydantic.typing:is_classvar', 'pydantic.typing:is_finalvar',
-    'pydantic.typing:is_literal_type', 'pydantic.typing:is_namedtuple',
-    'pydantic.typing:is_new_type', 'pydantic.typing:is_none_type',
-    'pydantic.typing:is_typeddict', 'pydantic.typing:is_typeddict_special',
-    'pydantic.typing:is_union', 'pydantic.typing:new_type_supertype',
-    'pydantic.typing:resolve_annotations', 'pydantic.typing:typing_base',
+    'pydantic.typing:AbstractSetIntStr',
+    'pydantic.typing:AnyCallable',
+    'pydantic.typing:AnyClassMethod',
+    'pydantic.typing:CallableGenerator',
+    'pydantic.typing:DictAny',
+    'pydantic.typing:DictIntStrAny',
+    'pydantic.typing:DictStrAny',
+    'pydantic.typing:IntStr',
+    'pydantic.typing:ListStr',
+    'pydantic.typing:MappingIntStrAny',
+    'pydantic.typing:NoArgAnyCallable',
+    'pydantic.typing:NoneType',
+    'pydantic.typing:ReprArgs',
+    'pydantic.typing:SetStr',
+    'pydantic.typing:StrPath',
+    'pydantic.typing:TupleGenerator',
+    'pydantic.typing:WithArgsTypes',
+    'pydantic.typing:all_literal_values',
+    'pydantic.typing:display_as_type',
+    'pydantic.typing:get_all_type_hints',
+    'pydantic.typing:get_args',
+    'pydantic.typing:get_origin',
+    'pydantic.typing:get_sub_types',
+    'pydantic.typing:is_callable_type',
+    'pydantic.typing:is_classvar',
+    'pydantic.typing:is_finalvar',
+    'pydantic.typing:is_literal_type',
+    'pydantic.typing:is_namedtuple',
+    'pydantic.typing:is_new_type',
+    'pydantic.typing:is_none_type',
+    'pydantic.typing:is_typeddict',
+    'pydantic.typing:is_typeddict_special',
+    'pydantic.typing:is_union',
+    'pydantic.typing:new_type_supertype',
+    'pydantic.typing:resolve_annotations',
+    'pydantic.typing:typing_base',
     'pydantic.typing:update_field_forward_refs',
     'pydantic.typing:update_model_forward_refs',
-    'pydantic.utils:ClassAttribute', 'pydantic.utils:DUNDER_ATTRIBUTES',
-    'pydantic.utils:PyObjectStr', 'pydantic.utils:ValueItems',
+    'pydantic.utils:ClassAttribute',
+    'pydantic.utils:DUNDER_ATTRIBUTES',
+    'pydantic.utils:PyObjectStr',
+    'pydantic.utils:ValueItems',
     'pydantic.utils:almost_equal_floats',
     'pydantic.utils:get_discriminator_alias_and_values',
     'pydantic.utils:get_model',
     'pydantic.utils:get_unique_discriminator_alias',
-    'pydantic.utils:in_ipython', 'pydantic.utils:is_valid_identifier',
-    'pydantic.utils:path_type', 'pydantic.utils:validate_field_name',
-    'pydantic:validate_model'}
+    'pydantic.utils:in_ipython',
+    'pydantic.utils:is_valid_identifier',
+    'pydantic.utils:path_type',
+    'pydantic.utils:validate_field_name',
+    'pydantic:validate_model',
+}


-def getattr_migration(module: str) ->Callable[[str], Any]:
+def getattr_migration(module: str) -> Callable[[str], Any]:
     """Implement PEP 562 for objects that were either moved or removed on the migration
     to V2.

@@ -166,4 +256,53 @@ def getattr_migration(module: str) ->Callable[[str], Any]:
     Returns:
         A callable that will raise an error if the object is not found.
     """
-    pass
+    # This avoids circular import with errors.py.
+    from .errors import PydanticImportError
+
+    def wrapper(name: str) -> object:
+        """Raise an error if the object is not found, or warn if it was moved.
+
+        In case it was moved, it still returns the object.
+
+        Args:
+            name: The object name.
+
+        Returns:
+            The object.
+        """
+        if name == '__path__':
+            raise AttributeError(f'module {module!r} has no attribute {name!r}')
+
+        import warnings
+
+        from ._internal._validators import import_string
+
+        import_path = f'{module}:{name}'
+        if import_path in MOVED_IN_V2.keys():
+            new_location = MOVED_IN_V2[import_path]
+            warnings.warn(f'`{import_path}` has been moved to `{new_location}`.')
+            return import_string(MOVED_IN_V2[import_path])
+        if import_path in DEPRECATED_MOVED_IN_V2:
+            # skip the warning here because a deprecation warning will be raised elsewhere
+            return import_string(DEPRECATED_MOVED_IN_V2[import_path])
+        if import_path in REDIRECT_TO_V1:
+            new_location = REDIRECT_TO_V1[import_path]
+            warnings.warn(
+                f'`{import_path}` has been removed. We are importing from `{new_location}` instead.'
+                'See the migration guide for more details: https://docs.pydantic.dev/latest/migration/'
+            )
+            return import_string(REDIRECT_TO_V1[import_path])
+        if import_path == 'pydantic:BaseSettings':
+            raise PydanticImportError(
+                '`BaseSettings` has been moved to the `pydantic-settings` package. '
+                f'See https://docs.pydantic.dev/{version_short()}/migration/#basesettings-has-moved-to-pydantic-settings '
+                'for more details.'
+            )
+        if import_path in REMOVED_IN_V2:
+            raise PydanticImportError(f'`{import_path}` has been removed in V2.')
+        globals: Dict[str, Any] = sys.modules[module].__dict__
+        if name in globals:
+            return globals[name]
+        raise AttributeError(f'module {module!r} has no attribute {name!r}')
+
+    return wrapper
diff --git a/pydantic/alias_generators.py b/pydantic/alias_generators.py
index c68d768b2..0b7653f58 100644
--- a/pydantic/alias_generators.py
+++ b/pydantic/alias_generators.py
@@ -1,9 +1,15 @@
 """Alias generators for converting between different capitalization conventions."""
+
 import re
-__all__ = 'to_pascal', 'to_camel', 'to_snake'

+__all__ = ('to_pascal', 'to_camel', 'to_snake')
+
+# TODO: in V3, change the argument names to be more descriptive
+# Generally, don't only convert from snake_case, or name the functions
+# more specifically like snake_to_camel.

-def to_pascal(snake: str) ->str:
+
+def to_pascal(snake: str) -> str:
     """Convert a snake_case string to PascalCase.

     Args:
@@ -12,10 +18,11 @@ def to_pascal(snake: str) ->str:
     Returns:
         The PascalCase string.
     """
-    pass
+    camel = snake.title()
+    return re.sub('([0-9A-Za-z])_(?=[0-9A-Z])', lambda m: m.group(1), camel)


-def to_camel(snake: str) ->str:
+def to_camel(snake: str) -> str:
     """Convert a snake_case string to camelCase.

     Args:
@@ -24,10 +31,16 @@ def to_camel(snake: str) ->str:
     Returns:
         The converted camelCase string.
     """
-    pass
+    # If the string is already in camelCase and does not contain a digit followed
+    # by a lowercase letter, return it as it is
+    if re.match('^[a-z]+[A-Za-z0-9]*$', snake) and not re.search(r'\d[a-z]', snake):
+        return snake
+
+    camel = to_pascal(snake)
+    return re.sub('(^_*[A-Z])', lambda m: m.group(1).lower(), camel)


-def to_snake(camel: str) ->str:
+def to_snake(camel: str) -> str:
     """Convert a PascalCase, camelCase, or kebab-case string to snake_case.

     Args:
@@ -36,4 +49,14 @@ def to_snake(camel: str) ->str:
     Returns:
         The converted string in snake_case.
     """
-    pass
+    # Handle the sequence of uppercase letters followed by a lowercase letter
+    snake = re.sub(r'([A-Z]+)([A-Z][a-z])', lambda m: f'{m.group(1)}_{m.group(2)}', camel)
+    # Insert an underscore between a lowercase letter and an uppercase letter
+    snake = re.sub(r'([a-z])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake)
+    # Insert an underscore between a digit and an uppercase letter
+    snake = re.sub(r'([0-9])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake)
+    # Insert an underscore between a lowercase letter and a digit
+    snake = re.sub(r'([a-z])([0-9])', lambda m: f'{m.group(1)}_{m.group(2)}', snake)
+    # Replace hyphens with underscores to handle kebab-case
+    snake = snake.replace('-', '_')
+    return snake.lower()
diff --git a/pydantic/aliases.py b/pydantic/aliases.py
index a6947b0cd..441fee162 100644
--- a/pydantic/aliases.py
+++ b/pydantic/aliases.py
@@ -1,10 +1,15 @@
 """Support for alias configurations."""
+
 from __future__ import annotations
+
 import dataclasses
 from typing import Any, Callable, Literal
+
 from pydantic_core import PydanticUndefined
+
 from ._internal import _internal_dataclass
-__all__ = 'AliasGenerator', 'AliasPath', 'AliasChoices'
+
+__all__ = ('AliasGenerator', 'AliasPath', 'AliasChoices')


 @dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -16,26 +21,36 @@ class AliasPath:
     Attributes:
         path: A list of string or integer aliases.
     """
+
     path: list[int | str]

-    def __init__(self, first_arg: str, *args: (str | int)) ->None:
+    def __init__(self, first_arg: str, *args: str | int) -> None:
         self.path = [first_arg] + list(args)

-    def convert_to_aliases(self) ->list[str | int]:
+    def convert_to_aliases(self) -> list[str | int]:
         """Converts arguments to a list of string or integer aliases.

         Returns:
             The list of aliases.
         """
-        pass
+        return self.path

-    def search_dict_for_path(self, d: dict) ->Any:
+    def search_dict_for_path(self, d: dict) -> Any:
         """Searches a dictionary for the path specified by the alias.

         Returns:
             The value at the specified path, or `PydanticUndefined` if the path is not found.
         """
-        pass
+        v = d
+        for k in self.path:
+            if isinstance(v, str):
+                # disallow indexing into a str, like for AliasPath('x', 0) and x='abc'
+                return PydanticUndefined
+            try:
+                v = v[k]
+            except (KeyError, IndexError, TypeError):
+                return PydanticUndefined
+        return v


 @dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -47,19 +62,25 @@ class AliasChoices:
     Attributes:
         choices: A list containing a string or `AliasPath`.
     """
+
     choices: list[str | AliasPath]

-    def __init__(self, first_choice: (str | AliasPath), *choices: (str |
-        AliasPath)) ->None:
+    def __init__(self, first_choice: str | AliasPath, *choices: str | AliasPath) -> None:
         self.choices = [first_choice] + list(choices)

-    def convert_to_aliases(self) ->list[list[str | int]]:
+    def convert_to_aliases(self) -> list[list[str | int]]:
         """Converts arguments to a list of lists containing string or integer aliases.

         Returns:
             The list of aliases.
         """
-        pass
+        aliases: list[list[str | int]] = []
+        for c in self.choices:
+            if isinstance(c, AliasPath):
+                aliases.append(c.convert_to_aliases())
+            else:
+                aliases.append([c])
+        return aliases


 @dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -73,27 +94,39 @@ class AliasGenerator:
         validation_alias: A callable that takes a field name and returns a validation alias for it.
         serialization_alias: A callable that takes a field name and returns a serialization alias for it.
     """
+
     alias: Callable[[str], str] | None = None
-    validation_alias: Callable[[str], str | AliasPath | AliasChoices
-        ] | None = None
+    validation_alias: Callable[[str], str | AliasPath | AliasChoices] | None = None
     serialization_alias: Callable[[str], str] | None = None

-    def _generate_alias(self, alias_kind: Literal['alias',
-        'validation_alias', 'serialization_alias'], allowed_types: tuple[
-        type[str] | type[AliasPath] | type[AliasChoices], ...], field_name: str
-        ) ->(str | AliasPath | AliasChoices | None):
+    def _generate_alias(
+        self,
+        alias_kind: Literal['alias', 'validation_alias', 'serialization_alias'],
+        allowed_types: tuple[type[str] | type[AliasPath] | type[AliasChoices], ...],
+        field_name: str,
+    ) -> str | AliasPath | AliasChoices | None:
         """Generate an alias of the specified kind. Returns None if the alias generator is None.

         Raises:
             TypeError: If the alias generator produces an invalid type.
         """
-        pass
-
-    def generate_aliases(self, field_name: str) ->tuple[str | None, str |
-        AliasPath | AliasChoices | None, str | None]:
+        alias = None
+        if alias_generator := getattr(self, alias_kind):
+            alias = alias_generator(field_name)
+            if alias and not isinstance(alias, allowed_types):
+                raise TypeError(
+                    f'Invalid `{alias_kind}` type. `{alias_kind}` generator must produce one of `{allowed_types}`'
+                )
+        return alias
+
+    def generate_aliases(self, field_name: str) -> tuple[str | None, str | AliasPath | AliasChoices | None, str | None]:
         """Generate `alias`, `validation_alias`, and `serialization_alias` for a field.

         Returns:
             A tuple of three aliases - validation, alias, and serialization.
         """
-        pass
+        alias = self._generate_alias('alias', (str,), field_name)
+        validation_alias = self._generate_alias('validation_alias', (str, AliasChoices, AliasPath), field_name)
+        serialization_alias = self._generate_alias('serialization_alias', (str,), field_name)
+
+        return alias, validation_alias, serialization_alias  # type: ignore
diff --git a/pydantic/annotated_handlers.py b/pydantic/annotated_handlers.py
index 92a4df504..ac3d21391 100644
--- a/pydantic/annotated_handlers.py
+++ b/pydantic/annotated_handlers.py
@@ -1,12 +1,22 @@
 """Type annotations to use with `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__`."""
+
 from __future__ import annotations as _annotations
+
 from typing import TYPE_CHECKING, Any, Union
+
 from pydantic_core import core_schema
+
 if TYPE_CHECKING:
     from .json_schema import JsonSchemaMode, JsonSchemaValue
-    CoreSchemaOrField = Union[core_schema.CoreSchema, core_schema.
-        ModelField, core_schema.DataclassField, core_schema.TypedDictField,
-        core_schema.ComputedField]
+
+    CoreSchemaOrField = Union[
+        core_schema.CoreSchema,
+        core_schema.ModelField,
+        core_schema.DataclassField,
+        core_schema.TypedDictField,
+        core_schema.ComputedField,
+    ]
+
 __all__ = 'GetJsonSchemaHandler', 'GetCoreSchemaHandler'


@@ -16,9 +26,10 @@ class GetJsonSchemaHandler:
     Attributes:
         mode: Json schema mode, can be `validation` or `serialization`.
     """
+
     mode: JsonSchemaMode

-    def __call__(self, core_schema: CoreSchemaOrField, /) ->JsonSchemaValue:
+    def __call__(self, core_schema: CoreSchemaOrField, /) -> JsonSchemaValue:
         """Call the inner handler and get the JsonSchemaValue it returns.
         This will call the next JSON schema modifying function up until it calls
         into `pydantic.json_schema.GenerateJsonSchema`, which will raise a
@@ -34,8 +45,7 @@ class GetJsonSchemaHandler:
         """
         raise NotImplementedError

-    def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue, /
-        ) ->JsonSchemaValue:
+    def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue, /) -> JsonSchemaValue:
         """Get the real schema for a `{"$ref": ...}` schema.
         If the schema given is not a `$ref` schema, it will be returned as is.
         This means you don't have to check before calling this function.
@@ -49,13 +59,13 @@ class GetJsonSchemaHandler:
         Returns:
             JsonSchemaValue: A JsonSchemaValue that has no `$ref`.
         """
-        pass
+        raise NotImplementedError


 class GetCoreSchemaHandler:
     """Handler to call into the next CoreSchema schema generation function."""

-    def __call__(self, source_type: Any, /) ->core_schema.CoreSchema:
+    def __call__(self, source_type: Any, /) -> core_schema.CoreSchema:
         """Call the inner handler and get the CoreSchema it returns.
         This will call the next CoreSchema modifying function up until it calls
         into Pydantic's internal schema generation machinery, which will raise a
@@ -70,7 +80,7 @@ class GetCoreSchemaHandler:
         """
         raise NotImplementedError

-    def generate_schema(self, source_type: Any, /) ->core_schema.CoreSchema:
+    def generate_schema(self, source_type: Any, /) -> core_schema.CoreSchema:
         """Generate a schema unrelated to the current context.
         Use this function if e.g. you are handling schema generation for a sequence
         and want to generate a schema for its items.
@@ -83,10 +93,9 @@ class GetCoreSchemaHandler:
         Returns:
             CoreSchema: The `pydantic-core` CoreSchema generated.
         """
-        pass
+        raise NotImplementedError

-    def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema, /
-        ) ->core_schema.CoreSchema:
+    def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema, /) -> core_schema.CoreSchema:
         """Get the real schema for a `definition-ref` schema.
         If the schema given is not a `definition-ref` schema, it will be returned as is.
         This means you don't have to check before calling this function.
@@ -100,13 +109,13 @@ class GetCoreSchemaHandler:
         Returns:
             A concrete `CoreSchema`.
         """
-        pass
+        raise NotImplementedError

     @property
-    def field_name(self) ->(str | None):
+    def field_name(self) -> str | None:
         """Get the name of the closest field to this validator."""
-        pass
+        raise NotImplementedError

-    def _get_types_namespace(self) ->(dict[str, Any] | None):
+    def _get_types_namespace(self) -> dict[str, Any] | None:
         """Internal method used during type resolution for serializer annotations."""
-        pass
+        raise NotImplementedError
diff --git a/pydantic/class_validators.py b/pydantic/class_validators.py
index dcb5f3bac..9977150c9 100644
--- a/pydantic/class_validators.py
+++ b/pydantic/class_validators.py
@@ -1,3 +1,5 @@
 """`class_validators` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/color.py b/pydantic/color.py
index dae7ca3d2..c702fc634 100644
--- a/pydantic/color.py
+++ b/pydantic/color.py
@@ -11,24 +11,28 @@ Warning: Deprecated
     See [`pydantic-extra-types.Color`](../usage/types/extra_types/color_types.md)
     for more information.
 """
+
 import math
 import re
 from colorsys import hls_to_rgb, rgb_to_hls
 from typing import Any, Callable, Optional, Tuple, Type, Union, cast
+
 from pydantic_core import CoreSchema, PydanticCustomError, core_schema
 from typing_extensions import deprecated
+
 from ._internal import _repr
 from ._internal._schema_generation_shared import GetJsonSchemaHandler as _GetJsonSchemaHandler
 from .json_schema import JsonSchemaValue
 from .warnings import PydanticDeprecatedSince20
+
 ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
 ColorType = Union[ColorTuple, str]
-HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float,
-    float]]
+HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]


 class RGBA:
     """Internal use only as a representation of a color."""
+
     __slots__ = 'r', 'g', 'b', 'alpha', '_tuple'

     def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
@@ -36,45 +40,46 @@ class RGBA:
         self.g = g
         self.b = b
         self.alpha = alpha
-        self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b,
-            alpha)

-    def __getitem__(self, item: Any) ->Any:
+        self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
+
+    def __getitem__(self, item: Any) -> Any:
         return self._tuple[item]


-_r_255 = '(\\d{1,3}(?:\\.\\d+)?)'
-_r_comma = '\\s*,\\s*'
-_r_alpha = '(\\d(?:\\.\\d+)?|\\.\\d+|\\d{1,2}%)'
-_r_h = '(-?\\d+(?:\\.\\d+)?|-?\\.\\d+)(deg|rad|turn)?'
-_r_sl = '(\\d{1,3}(?:\\.\\d+)?)%'
-r_hex_short = '\\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\\s*'
-r_hex_long = (
-    '\\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\\s*')
-r_rgb = (
-    f'\\s*rgba?\\(\\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\\s*\\)\\s*'
-    )
-r_hsl = (
-    f'\\s*hsla?\\(\\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\\s*\\)\\s*'
-    )
-r_rgb_v4_style = (
-    f'\\s*rgba?\\(\\s*{_r_255}\\s+{_r_255}\\s+{_r_255}(?:\\s*/\\s*{_r_alpha})?\\s*\\)\\s*'
-    )
-r_hsl_v4_style = (
-    f'\\s*hsla?\\(\\s*{_r_h}\\s+{_r_sl}\\s+{_r_sl}(?:\\s*/\\s*{_r_alpha})?\\s*\\)\\s*'
-    )
+# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
+_r_255 = r'(\d{1,3}(?:\.\d+)?)'
+_r_comma = r'\s*,\s*'
+_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
+_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
+_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
+r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
+r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
+# CSS3 RGB examples: rgb(0, 0, 0), rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 50%)
+r_rgb = rf'\s*rgba?\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
+# CSS3 HSL examples: hsl(270, 60%, 50%), hsla(270, 60%, 50%, 0.5), hsla(270, 60%, 50%, 50%)
+r_hsl = rf'\s*hsla?\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
+# CSS4 RGB examples: rgb(0 0 0), rgb(0 0 0 / 0.5), rgb(0 0 0 / 50%), rgba(0 0 0 / 50%)
+r_rgb_v4_style = rf'\s*rgba?\(\s*{_r_255}\s+{_r_255}\s+{_r_255}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
+# CSS4 HSL examples: hsl(270 60% 50%), hsl(270 60% 50% / 0.5), hsl(270 60% 50% / 50%), hsla(270 60% 50% / 50%)
+r_hsl_v4_style = rf'\s*hsla?\(\s*{_r_h}\s+{_r_sl}\s+{_r_sl}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
+
+# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
 repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
 rads = 2 * math.pi


 @deprecated(
-    'The `Color` class is deprecated, use `pydantic_extra_types` instead. See https://docs.pydantic.dev/latest/api/pydantic_extra_types_color/.'
-    , category=PydanticDeprecatedSince20)
+    'The `Color` class is deprecated, use `pydantic_extra_types` instead. '
+    'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_color/.',
+    category=PydanticDeprecatedSince20,
+)
 class Color(_repr.Representation):
     """Represents a color."""
+
     __slots__ = '_original', '_rgba'

-    def __init__(self, value: ColorType) ->None:
+    def __init__(self, value: ColorType) -> None:
         self._rgba: RGBA
         self._original: ColorType
         if isinstance(value, (tuple, list)):
@@ -85,23 +90,26 @@ class Color(_repr.Representation):
             self._rgba = value._rgba
             value = value._original
         else:
-            raise PydanticCustomError('color_error',
-                'value is not a valid color: value must be a tuple, list or string'
-                )
+            raise PydanticCustomError(
+                'color_error', 'value is not a valid color: value must be a tuple, list or string'
+            )
+
+        # if we've got here value must be a valid color
         self._original = value

     @classmethod
-    def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-        CoreSchema, handler: _GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        cls, core_schema: core_schema.CoreSchema, handler: _GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = {}
         field_schema.update(type='string', format='color')
         return field_schema

-    def original(self) ->ColorType:
+    def original(self) -> ColorType:
         """Original value passed to `Color`."""
-        pass
+        return self._original

-    def as_named(self, *, fallback: bool=False) ->str:
+    def as_named(self, *, fallback: bool = False) -> str:
         """Returns the name of the color if it can be found in `COLORS_BY_VALUE` dictionary,
         otherwise returns the hexadecimal representation of the color or raises `ValueError`.

@@ -115,9 +123,19 @@ class Color(_repr.Representation):
         Raises:
             ValueError: When no named color is found and fallback is `False`.
         """
-        pass
+        if self._rgba.alpha is None:
+            rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
+            try:
+                return COLORS_BY_VALUE[rgb]
+            except KeyError as e:
+                if fallback:
+                    return self.as_hex()
+                else:
+                    raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
+        else:
+            return self.as_hex()

-    def as_hex(self) ->str:
+    def as_hex(self) -> str:
         """Returns the hexadecimal representation of the color.

         Hex string representing the color can be 3, 4, 6, or 8 characters depending on whether the string
@@ -126,13 +144,26 @@ class Color(_repr.Representation):
         Returns:
             The hexadecimal representation of the color.
         """
-        pass
+        values = [float_to_255(c) for c in self._rgba[:3]]
+        if self._rgba.alpha is not None:
+            values.append(float_to_255(self._rgba.alpha))
+
+        as_hex = ''.join(f'{v:02x}' for v in values)
+        if all(c in repeat_colors for c in values):
+            as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
+        return '#' + as_hex

-    def as_rgb(self) ->str:
+    def as_rgb(self) -> str:
         """Color as an `rgb(<r>, <g>, <b>)` or `rgba(<r>, <g>, <b>, <a>)` string."""
-        pass
+        if self._rgba.alpha is None:
+            return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
+        else:
+            return (
+                f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
+                f'{round(self._alpha_float(), 2)})'
+            )

-    def as_rgb_tuple(self, *, alpha: Optional[bool]=None) ->ColorTuple:
+    def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
         """Returns the color as an RGB or RGBA tuple.

         Args:
@@ -146,13 +177,28 @@ class Color(_repr.Representation):
             A tuple that contains the values of the red, green, and blue channels in the range 0 to 255.
                 If alpha is included, it is in the range 0 to 1.
         """
-        pass
+        r, g, b = (float_to_255(c) for c in self._rgba[:3])
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return r, g, b
+            else:
+                return r, g, b, self._alpha_float()
+        elif alpha:
+            return r, g, b, self._alpha_float()
+        else:
+            # alpha is False
+            return r, g, b

-    def as_hsl(self) ->str:
+    def as_hsl(self) -> str:
         """Color as an `hsl(<h>, <s>, <l>)` or `hsl(<h>, <s>, <l>, <a>)` string."""
-        pass
+        if self._rgba.alpha is None:
+            h, s, li = self.as_hsl_tuple(alpha=False)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
+        else:
+            h, s, li, a = self.as_hsl_tuple(alpha=True)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'

-    def as_hsl_tuple(self, *, alpha: Optional[bool]=None) ->HslColorTuple:
+    def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
         """Returns the color as an HSL or HSLA tuple.

         Args:
@@ -169,30 +215,47 @@ class Color(_repr.Representation):
         Note:
             This is HSL as used in HTML and most other places, not HLS as used in Python's `colorsys`.
         """
-        pass
+        h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)  # noqa: E741
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return h, s, l
+            else:
+                return h, s, l, self._alpha_float()
+        if alpha:
+            return h, s, l, self._alpha_float()
+        else:
+            # alpha is False
+            return h, s, l
+
+    def _alpha_float(self) -> float:
+        return 1 if self._rgba.alpha is None else self._rgba.alpha
+
+    @classmethod
+    def __get_pydantic_core_schema__(
+        cls, source: Type[Any], handler: Callable[[Any], CoreSchema]
+    ) -> core_schema.CoreSchema:
+        return core_schema.with_info_plain_validator_function(
+            cls._validate, serialization=core_schema.to_string_ser_schema()
+        )

     @classmethod
-    def __get_pydantic_core_schema__(cls, source: Type[Any], handler:
-        Callable[[Any], CoreSchema]) ->core_schema.CoreSchema:
-        return core_schema.with_info_plain_validator_function(cls._validate,
-            serialization=core_schema.to_string_ser_schema())
+    def _validate(cls, __input_value: Any, _: Any) -> 'Color':
+        return cls(__input_value)

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.as_named(fallback=True)

-    def __repr_args__(self) ->'_repr.ReprArgs':
-        return [(None, self.as_named(fallback=True))] + [('rgb', self.
-            as_rgb_tuple())]
+    def __repr_args__(self) -> '_repr.ReprArgs':
+        return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())]

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, Color) and self.as_rgb_tuple(
-            ) == other.as_rgb_tuple()
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple()

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.as_rgb_tuple())


-def parse_tuple(value: Tuple[Any, ...]) ->RGBA:
+def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
     """Parse a tuple or list to get RGBA values.

     Args:
@@ -204,10 +267,17 @@ def parse_tuple(value: Tuple[Any, ...]) ->RGBA:
     Raises:
         PydanticCustomError: If tuple is not valid.
     """
-    pass
+    if len(value) == 3:
+        r, g, b = (parse_color_value(v) for v in value)
+        return RGBA(r, g, b, None)
+    elif len(value) == 4:
+        r, g, b = (parse_color_value(v) for v in value[:3])
+        return RGBA(r, g, b, parse_float_alpha(value[3]))
+    else:
+        raise PydanticCustomError('color_error', 'value is not a valid color: tuples must have length 3 or 4')


-def parse_str(value: str) ->RGBA:
+def parse_str(value: str) -> RGBA:
     """Parse a string representing a color to an RGBA tuple.

     Possible formats for the input string include:
@@ -227,11 +297,46 @@ def parse_str(value: str) ->RGBA:
     Raises:
         ValueError: If the input string cannot be parsed to an RGBA tuple.
     """
-    pass
+    value_lower = value.lower()
+    try:
+        r, g, b = COLORS_BY_NAME[value_lower]
+    except KeyError:
+        pass
+    else:
+        return ints_to_rgba(r, g, b, None)
+
+    m = re.fullmatch(r_hex_short, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v * 2, 16) for v in rgb)
+        if a:
+            alpha: Optional[float] = int(a * 2, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_hex_long, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v, 16) for v in rgb)
+        if a:
+            alpha = int(a, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_rgb, value_lower) or re.fullmatch(r_rgb_v4_style, value_lower)
+    if m:
+        return ints_to_rgba(*m.groups())  # type: ignore

+    m = re.fullmatch(r_hsl, value_lower) or re.fullmatch(r_hsl_v4_style, value_lower)
+    if m:
+        return parse_hsl(*m.groups())  # type: ignore

-def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str],
-    alpha: Optional[float]=None) ->RGBA:
+    raise PydanticCustomError('color_error', 'value is not a valid color: string not recognised as a valid color')
+
+
+def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float] = None) -> RGBA:
     """Converts integer or string values for RGB color and an optional alpha value to an `RGBA` object.

     Args:
@@ -243,10 +348,10 @@ def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str],
     Returns:
         An instance of the `RGBA` class with the corresponding color and alpha values.
     """
-    pass
+    return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))


-def parse_color_value(value: Union[int, str], max_val: int=255) ->float:
+def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
     """Parse the color value provided and return a number between 0 and 1.

     Args:
@@ -259,10 +364,21 @@ def parse_color_value(value: Union[int, str], max_val: int=255) ->float:
     Returns:
         A number between 0 and 1.
     """
-    pass
-
-
-def parse_float_alpha(value: Union[None, str, float, int]) ->Optional[float]:
+    try:
+        color = float(value)
+    except ValueError:
+        raise PydanticCustomError('color_error', 'value is not a valid color: color values must be a valid number')
+    if 0 <= color <= max_val:
+        return color / max_val
+    else:
+        raise PydanticCustomError(
+            'color_error',
+            'value is not a valid color: color values must be in the range 0 to {max_val}',
+            {'max_val': max_val},
+        )
+
+
+def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
     """Parse an alpha value checking it's a valid float in the range 0 to 1.

     Args:
@@ -274,11 +390,25 @@ def parse_float_alpha(value: Union[None, str, float, int]) ->Optional[float]:
     Raises:
         PydanticCustomError: If the input value cannot be successfully parsed as a float in the expected range.
     """
-    pass
+    if value is None:
+        return None
+    try:
+        if isinstance(value, str) and value.endswith('%'):
+            alpha = float(value[:-1]) / 100
+        else:
+            alpha = float(value)
+    except ValueError:
+        raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be a valid float')

+    if math.isclose(alpha, 1):
+        return None
+    elif 0 <= alpha <= 1:
+        return alpha
+    else:
+        raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be in the range 0 to 1')

-def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[
-    float]=None) ->RGBA:
+
+def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
     """Parse raw hue, saturation, lightness, and alpha values and convert to RGBA.

     Args:
@@ -291,10 +421,22 @@ def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[
     Returns:
         An instance of `RGBA`.
     """
-    pass
+    s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
+
+    h_value = float(h)
+    if h_units in {None, 'deg'}:
+        h_value = h_value % 360 / 360
+    elif h_units == 'rad':
+        h_value = h_value % rads / rads
+    else:
+        # turns
+        h_value = h_value % 1

+    r, g, b = hls_to_rgb(h_value, l_value, s_value)
+    return RGBA(r, g, b, parse_float_alpha(alpha))

-def float_to_255(c: float) ->int:
+
+def float_to_255(c: float) -> int:
     """Converts a float value between 0 and 1 (inclusive) to an integer between 0 and 255 (inclusive).

     Args:
@@ -306,69 +448,157 @@ def float_to_255(c: float) ->int:
     Raises:
         ValueError: If the given float value is outside the acceptable range of 0 to 1 (inclusive).
     """
-    pass
-
-
-COLORS_BY_NAME = {'aliceblue': (240, 248, 255), 'antiquewhite': (250, 235, 
-    215), 'aqua': (0, 255, 255), 'aquamarine': (127, 255, 212), 'azure': (
-    240, 255, 255), 'beige': (245, 245, 220), 'bisque': (255, 228, 196),
-    'black': (0, 0, 0), 'blanchedalmond': (255, 235, 205), 'blue': (0, 0, 
-    255), 'blueviolet': (138, 43, 226), 'brown': (165, 42, 42), 'burlywood':
-    (222, 184, 135), 'cadetblue': (95, 158, 160), 'chartreuse': (127, 255, 
-    0), 'chocolate': (210, 105, 30), 'coral': (255, 127, 80),
-    'cornflowerblue': (100, 149, 237), 'cornsilk': (255, 248, 220),
-    'crimson': (220, 20, 60), 'cyan': (0, 255, 255), 'darkblue': (0, 0, 139
-    ), 'darkcyan': (0, 139, 139), 'darkgoldenrod': (184, 134, 11),
-    'darkgray': (169, 169, 169), 'darkgreen': (0, 100, 0), 'darkgrey': (169,
-    169, 169), 'darkkhaki': (189, 183, 107), 'darkmagenta': (139, 0, 139),
-    'darkolivegreen': (85, 107, 47), 'darkorange': (255, 140, 0),
-    'darkorchid': (153, 50, 204), 'darkred': (139, 0, 0), 'darksalmon': (
-    233, 150, 122), 'darkseagreen': (143, 188, 143), 'darkslateblue': (72, 
-    61, 139), 'darkslategray': (47, 79, 79), 'darkslategrey': (47, 79, 79),
-    'darkturquoise': (0, 206, 209), 'darkviolet': (148, 0, 211), 'deeppink':
-    (255, 20, 147), 'deepskyblue': (0, 191, 255), 'dimgray': (105, 105, 105
-    ), 'dimgrey': (105, 105, 105), 'dodgerblue': (30, 144, 255),
-    'firebrick': (178, 34, 34), 'floralwhite': (255, 250, 240),
-    'forestgreen': (34, 139, 34), 'fuchsia': (255, 0, 255), 'gainsboro': (
-    220, 220, 220), 'ghostwhite': (248, 248, 255), 'gold': (255, 215, 0),
-    'goldenrod': (218, 165, 32), 'gray': (128, 128, 128), 'green': (0, 128,
-    0), 'greenyellow': (173, 255, 47), 'grey': (128, 128, 128), 'honeydew':
-    (240, 255, 240), 'hotpink': (255, 105, 180), 'indianred': (205, 92, 92),
-    'indigo': (75, 0, 130), 'ivory': (255, 255, 240), 'khaki': (240, 230, 
-    140), 'lavender': (230, 230, 250), 'lavenderblush': (255, 240, 245),
-    'lawngreen': (124, 252, 0), 'lemonchiffon': (255, 250, 205),
-    'lightblue': (173, 216, 230), 'lightcoral': (240, 128, 128),
-    'lightcyan': (224, 255, 255), 'lightgoldenrodyellow': (250, 250, 210),
-    'lightgray': (211, 211, 211), 'lightgreen': (144, 238, 144),
-    'lightgrey': (211, 211, 211), 'lightpink': (255, 182, 193),
-    'lightsalmon': (255, 160, 122), 'lightseagreen': (32, 178, 170),
-    'lightskyblue': (135, 206, 250), 'lightslategray': (119, 136, 153),
-    'lightslategrey': (119, 136, 153), 'lightsteelblue': (176, 196, 222),
-    'lightyellow': (255, 255, 224), 'lime': (0, 255, 0), 'limegreen': (50, 
-    205, 50), 'linen': (250, 240, 230), 'magenta': (255, 0, 255), 'maroon':
-    (128, 0, 0), 'mediumaquamarine': (102, 205, 170), 'mediumblue': (0, 0, 
-    205), 'mediumorchid': (186, 85, 211), 'mediumpurple': (147, 112, 219),
-    'mediumseagreen': (60, 179, 113), 'mediumslateblue': (123, 104, 238),
-    'mediumspringgreen': (0, 250, 154), 'mediumturquoise': (72, 209, 204),
-    'mediumvioletred': (199, 21, 133), 'midnightblue': (25, 25, 112),
-    'mintcream': (245, 255, 250), 'mistyrose': (255, 228, 225), 'moccasin':
-    (255, 228, 181), 'navajowhite': (255, 222, 173), 'navy': (0, 0, 128),
-    'oldlace': (253, 245, 230), 'olive': (128, 128, 0), 'olivedrab': (107, 
-    142, 35), 'orange': (255, 165, 0), 'orangered': (255, 69, 0), 'orchid':
-    (218, 112, 214), 'palegoldenrod': (238, 232, 170), 'palegreen': (152, 
-    251, 152), 'paleturquoise': (175, 238, 238), 'palevioletred': (219, 112,
-    147), 'papayawhip': (255, 239, 213), 'peachpuff': (255, 218, 185),
-    'peru': (205, 133, 63), 'pink': (255, 192, 203), 'plum': (221, 160, 221
-    ), 'powderblue': (176, 224, 230), 'purple': (128, 0, 128), 'red': (255,
-    0, 0), 'rosybrown': (188, 143, 143), 'royalblue': (65, 105, 225),
-    'saddlebrown': (139, 69, 19), 'salmon': (250, 128, 114), 'sandybrown':
-    (244, 164, 96), 'seagreen': (46, 139, 87), 'seashell': (255, 245, 238),
-    'sienna': (160, 82, 45), 'silver': (192, 192, 192), 'skyblue': (135, 
-    206, 235), 'slateblue': (106, 90, 205), 'slategray': (112, 128, 144),
-    'slategrey': (112, 128, 144), 'snow': (255, 250, 250), 'springgreen': (
-    0, 255, 127), 'steelblue': (70, 130, 180), 'tan': (210, 180, 140),
-    'teal': (0, 128, 128), 'thistle': (216, 191, 216), 'tomato': (255, 99, 
-    71), 'turquoise': (64, 224, 208), 'violet': (238, 130, 238), 'wheat': (
-    245, 222, 179), 'white': (255, 255, 255), 'whitesmoke': (245, 245, 245),
-    'yellow': (255, 255, 0), 'yellowgreen': (154, 205, 50)}
+    return int(round(c * 255))
+
+
+COLORS_BY_NAME = {
+    'aliceblue': (240, 248, 255),
+    'antiquewhite': (250, 235, 215),
+    'aqua': (0, 255, 255),
+    'aquamarine': (127, 255, 212),
+    'azure': (240, 255, 255),
+    'beige': (245, 245, 220),
+    'bisque': (255, 228, 196),
+    'black': (0, 0, 0),
+    'blanchedalmond': (255, 235, 205),
+    'blue': (0, 0, 255),
+    'blueviolet': (138, 43, 226),
+    'brown': (165, 42, 42),
+    'burlywood': (222, 184, 135),
+    'cadetblue': (95, 158, 160),
+    'chartreuse': (127, 255, 0),
+    'chocolate': (210, 105, 30),
+    'coral': (255, 127, 80),
+    'cornflowerblue': (100, 149, 237),
+    'cornsilk': (255, 248, 220),
+    'crimson': (220, 20, 60),
+    'cyan': (0, 255, 255),
+    'darkblue': (0, 0, 139),
+    'darkcyan': (0, 139, 139),
+    'darkgoldenrod': (184, 134, 11),
+    'darkgray': (169, 169, 169),
+    'darkgreen': (0, 100, 0),
+    'darkgrey': (169, 169, 169),
+    'darkkhaki': (189, 183, 107),
+    'darkmagenta': (139, 0, 139),
+    'darkolivegreen': (85, 107, 47),
+    'darkorange': (255, 140, 0),
+    'darkorchid': (153, 50, 204),
+    'darkred': (139, 0, 0),
+    'darksalmon': (233, 150, 122),
+    'darkseagreen': (143, 188, 143),
+    'darkslateblue': (72, 61, 139),
+    'darkslategray': (47, 79, 79),
+    'darkslategrey': (47, 79, 79),
+    'darkturquoise': (0, 206, 209),
+    'darkviolet': (148, 0, 211),
+    'deeppink': (255, 20, 147),
+    'deepskyblue': (0, 191, 255),
+    'dimgray': (105, 105, 105),
+    'dimgrey': (105, 105, 105),
+    'dodgerblue': (30, 144, 255),
+    'firebrick': (178, 34, 34),
+    'floralwhite': (255, 250, 240),
+    'forestgreen': (34, 139, 34),
+    'fuchsia': (255, 0, 255),
+    'gainsboro': (220, 220, 220),
+    'ghostwhite': (248, 248, 255),
+    'gold': (255, 215, 0),
+    'goldenrod': (218, 165, 32),
+    'gray': (128, 128, 128),
+    'green': (0, 128, 0),
+    'greenyellow': (173, 255, 47),
+    'grey': (128, 128, 128),
+    'honeydew': (240, 255, 240),
+    'hotpink': (255, 105, 180),
+    'indianred': (205, 92, 92),
+    'indigo': (75, 0, 130),
+    'ivory': (255, 255, 240),
+    'khaki': (240, 230, 140),
+    'lavender': (230, 230, 250),
+    'lavenderblush': (255, 240, 245),
+    'lawngreen': (124, 252, 0),
+    'lemonchiffon': (255, 250, 205),
+    'lightblue': (173, 216, 230),
+    'lightcoral': (240, 128, 128),
+    'lightcyan': (224, 255, 255),
+    'lightgoldenrodyellow': (250, 250, 210),
+    'lightgray': (211, 211, 211),
+    'lightgreen': (144, 238, 144),
+    'lightgrey': (211, 211, 211),
+    'lightpink': (255, 182, 193),
+    'lightsalmon': (255, 160, 122),
+    'lightseagreen': (32, 178, 170),
+    'lightskyblue': (135, 206, 250),
+    'lightslategray': (119, 136, 153),
+    'lightslategrey': (119, 136, 153),
+    'lightsteelblue': (176, 196, 222),
+    'lightyellow': (255, 255, 224),
+    'lime': (0, 255, 0),
+    'limegreen': (50, 205, 50),
+    'linen': (250, 240, 230),
+    'magenta': (255, 0, 255),
+    'maroon': (128, 0, 0),
+    'mediumaquamarine': (102, 205, 170),
+    'mediumblue': (0, 0, 205),
+    'mediumorchid': (186, 85, 211),
+    'mediumpurple': (147, 112, 219),
+    'mediumseagreen': (60, 179, 113),
+    'mediumslateblue': (123, 104, 238),
+    'mediumspringgreen': (0, 250, 154),
+    'mediumturquoise': (72, 209, 204),
+    'mediumvioletred': (199, 21, 133),
+    'midnightblue': (25, 25, 112),
+    'mintcream': (245, 255, 250),
+    'mistyrose': (255, 228, 225),
+    'moccasin': (255, 228, 181),
+    'navajowhite': (255, 222, 173),
+    'navy': (0, 0, 128),
+    'oldlace': (253, 245, 230),
+    'olive': (128, 128, 0),
+    'olivedrab': (107, 142, 35),
+    'orange': (255, 165, 0),
+    'orangered': (255, 69, 0),
+    'orchid': (218, 112, 214),
+    'palegoldenrod': (238, 232, 170),
+    'palegreen': (152, 251, 152),
+    'paleturquoise': (175, 238, 238),
+    'palevioletred': (219, 112, 147),
+    'papayawhip': (255, 239, 213),
+    'peachpuff': (255, 218, 185),
+    'peru': (205, 133, 63),
+    'pink': (255, 192, 203),
+    'plum': (221, 160, 221),
+    'powderblue': (176, 224, 230),
+    'purple': (128, 0, 128),
+    'red': (255, 0, 0),
+    'rosybrown': (188, 143, 143),
+    'royalblue': (65, 105, 225),
+    'saddlebrown': (139, 69, 19),
+    'salmon': (250, 128, 114),
+    'sandybrown': (244, 164, 96),
+    'seagreen': (46, 139, 87),
+    'seashell': (255, 245, 238),
+    'sienna': (160, 82, 45),
+    'silver': (192, 192, 192),
+    'skyblue': (135, 206, 235),
+    'slateblue': (106, 90, 205),
+    'slategray': (112, 128, 144),
+    'slategrey': (112, 128, 144),
+    'snow': (255, 250, 250),
+    'springgreen': (0, 255, 127),
+    'steelblue': (70, 130, 180),
+    'tan': (210, 180, 140),
+    'teal': (0, 128, 128),
+    'thistle': (216, 191, 216),
+    'tomato': (255, 99, 71),
+    'turquoise': (64, 224, 208),
+    'violet': (238, 130, 238),
+    'wheat': (245, 222, 179),
+    'white': (255, 255, 255),
+    'whitesmoke': (245, 245, 245),
+    'yellow': (255, 255, 0),
+    'yellowgreen': (154, 205, 50),
+}
+
 COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
diff --git a/pydantic/config.py b/pydantic/config.py
index 24abaf45f..87ab416fb 100644
--- a/pydantic/config.py
+++ b/pydantic/config.py
@@ -1,42 +1,62 @@
 """Configuration for Pydantic models."""
+
 from __future__ import annotations as _annotations
+
 from typing import TYPE_CHECKING, Any, Callable, Dict, List, Type, TypeVar, Union
+
 from typing_extensions import Literal, TypeAlias, TypedDict
+
 from ._migration import getattr_migration
 from .aliases import AliasGenerator
 from .errors import PydanticUserError
+
 if TYPE_CHECKING:
     from ._internal._generate_schema import GenerateSchema as _GenerateSchema
     from .fields import ComputedFieldInfo, FieldInfo
-__all__ = 'ConfigDict', 'with_config'
-JsonValue: TypeAlias = Union[int, float, str, bool, None, List['JsonValue'],
-    'JsonDict']
+
+__all__ = ('ConfigDict', 'with_config')
+
+
+JsonValue: TypeAlias = Union[int, float, str, bool, None, List['JsonValue'], 'JsonDict']
 JsonDict: TypeAlias = Dict[str, JsonValue]
+
 JsonEncoder = Callable[[Any], Any]
-JsonSchemaExtraCallable: TypeAlias = Union[Callable[[JsonDict], None],
-    Callable[[JsonDict, Type[Any]], None]]
+
+JsonSchemaExtraCallable: TypeAlias = Union[
+    Callable[[JsonDict], None],
+    Callable[[JsonDict, Type[Any]], None],
+]
+
 ExtraValues = Literal['allow', 'ignore', 'forbid']


-class ConfigDict(TypedDict, total=(False)):
+class ConfigDict(TypedDict, total=False):
     """A TypedDict for configuring Pydantic behaviour."""
+
     title: str | None
     """The title for the generated JSON schema, defaults to the model's name"""
+
     model_title_generator: Callable[[type], str] | None
     """A callable that takes a model class and returns the title for it. Defaults to `None`."""
-    field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str
-        ] | None
+
+    field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str] | None
     """A callable that takes a field's name and info and returns title for it. Defaults to `None`."""
+
     str_to_lower: bool
     """Whether to convert all characters to lowercase for str types. Defaults to `False`."""
+
     str_to_upper: bool
     """Whether to convert all characters to uppercase for str types. Defaults to `False`."""
+
     str_strip_whitespace: bool
     """Whether to strip leading and trailing whitespace for str types."""
+
     str_min_length: int
     """The minimum length for str types. Defaults to `None`."""
+
     str_max_length: int | None
     """The maximum length for str types. Defaults to `None`."""
+
     extra: ExtraValues | None
     """
     Whether to ignore, allow, or forbid extra attributes during model initialization. Defaults to `'ignore'`.
@@ -107,6 +127,7 @@ class ConfigDict(TypedDict, total=(False)):
         '''
     ```
     """
+
     frozen: bool
     """
     Whether models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates
@@ -116,6 +137,7 @@ class ConfigDict(TypedDict, total=(False)):
     Note:
         On V1, the inverse of this setting was called `allow_mutation`, and was `True` by default.
     """
+
     populate_by_name: bool
     """
     Whether an aliased field may be populated by its name as given by the model
@@ -148,6 +170,7 @@ class ConfigDict(TypedDict, total=(False)):
     2. The model is populated by the alias `'full_name'`.
     3. The model is populated by the field name `'name'`.
     """
+
     use_enum_values: bool
     """
     Whether to populate models with the `value` property of enums, rather than the raw enum.
@@ -187,6 +210,7 @@ class ConfigDict(TypedDict, total=(False)):
     #> {'some_enum': 'bar', 'another_enum': 'baz'}
     ```
     """
+
     validate_assignment: bool
     """
     Whether to validate the data when the model is changed. Defaults to `False`.
@@ -238,6 +262,7 @@ class ConfigDict(TypedDict, total=(False)):
     2. The validation happens when the model is created.
     3. The validation _also_ happens when the data is changed.
     """
+
     arbitrary_types_allowed: bool
     """
     Whether arbitrary types are allowed for field types. Defaults to `False`.
@@ -292,12 +317,15 @@ class ConfigDict(TypedDict, total=(False)):
     #> <class '__main__.Pet'>
     ```
     """
+
     from_attributes: bool
     """
     Whether to build models and look up discriminators of tagged unions using python object attributes.
     """
+
     loc_by_alias: bool
     """Whether to use the actual key provided in the data (e.g. alias) for error `loc`s rather than the field's name. Defaults to `True`."""
+
     alias_generator: Callable[[str], str] | AliasGenerator | None
     """
     A callable that takes a field name and returns an alias for it
@@ -356,16 +384,20 @@ class ConfigDict(TypedDict, total=(False)):
         Pydantic offers three built-in alias generators: [`to_pascal`][pydantic.alias_generators.to_pascal],
         [`to_camel`][pydantic.alias_generators.to_camel], and [`to_snake`][pydantic.alias_generators.to_snake].
     """
+
     ignored_types: tuple[type, ...]
     """A tuple of types that may occur as values of class attributes without annotations. This is
     typically used for custom descriptors (classes that behave like `property`). If an attribute is set on a
     class without an annotation and has a type that is not in this tuple (or otherwise recognized by
     _pydantic_), an error will be raised. Defaults to `()`.
     """
+
     allow_inf_nan: bool
     """Whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields. Defaults to `True`."""
+
     json_schema_extra: JsonDict | JsonSchemaExtraCallable | None
     """A dict or callable to provide extra JSON schema properties. Defaults to `None`."""
+
     json_encoders: dict[type[object], JsonEncoder] | None
     """
     A `dict` of custom JSON encoders for specific types. Defaults to `None`.
@@ -375,6 +407,8 @@ class ConfigDict(TypedDict, total=(False)):
         We originally planned to remove it in v2 but didn't have a 1:1 replacement so we are keeping it for now.
         It is still deprecated and will likely be removed in the future.
     """
+
+    # new in V2
     strict: bool
     """
     _(new in V2)_ If `True`, strict validation is applied to all fields on the model.
@@ -401,6 +435,7 @@ class ConfigDict(TypedDict, total=(False)):
     See the [Conversion Table](../concepts/conversion_table.md) for more details on how Pydantic converts data in both
     strict and lax modes.
     """
+    # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never'
     revalidate_instances: Literal['always', 'never', 'subclass-instances']
     """
     When and how to revalidate models and dataclasses during validation. Accepts the string
@@ -527,6 +562,7 @@ class ConfigDict(TypedDict, total=(False)):
     2. This is not revalidated, since `my_user` is not a subclass of `User`.
     3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`.
     """
+
     ser_json_timedelta: Literal['iso8601', 'float']
     """
     The format of JSON serialized timedeltas. Accepts the string values of `'iso8601'` and
@@ -535,6 +571,7 @@ class ConfigDict(TypedDict, total=(False)):
     - `'iso8601'` will serialize timedeltas to ISO 8601 durations.
     - `'float'` will serialize timedeltas to the total number of seconds.
     """
+
     ser_json_bytes: Literal['utf8', 'base64']
     """
     The encoding of JSON serialized bytes. Accepts the string values of `'utf8'` and `'base64'`.
@@ -543,6 +580,7 @@ class ConfigDict(TypedDict, total=(False)):
     - `'utf8'` will serialize bytes to UTF-8 strings.
     - `'base64'` will serialize bytes to URL safe base64 strings.
     """
+
     ser_json_inf_nan: Literal['null', 'constants', 'strings']
     """
     The encoding of JSON serialized infinity and NaN float values. Defaults to `'null'`.
@@ -551,10 +589,14 @@ class ConfigDict(TypedDict, total=(False)):
     - `'constants'` will serialize infinity and NaN values as `Infinity` and `NaN`.
     - `'strings'` will serialize infinity as string `"Infinity"` and NaN as string `"NaN"`.
     """
+
+    # whether to validate default values during validation, default False
     validate_default: bool
     """Whether to validate default values during validation. Defaults to `False`."""
+
     validate_return: bool
     """whether to validate the return value from call validators. Defaults to `False`."""
+
     protected_namespaces: tuple[str, ...]
     """
     A `tuple` of strings that prevent model to have field which conflict with them.
@@ -630,6 +672,7 @@ class ConfigDict(TypedDict, total=(False)):
         '''
     ```
     """
+
     hide_input_in_errors: bool
     """
     Whether to hide inputs when printing errors. Defaults to `False`.
@@ -673,6 +716,7 @@ class ConfigDict(TypedDict, total=(False)):
         '''
     ```
     """
+
     defer_build: bool
     """
     Whether to defer model validator and serializer construction until the first model validation. Defaults to False.
@@ -690,6 +734,7 @@ class ConfigDict(TypedDict, total=(False)):
         models in order for `defer_build=True` to take effect. This additional (experimental) parameter is required for
         the deferred building due to FastAPI relying on `TypeAdapter`s.
     """
+
     experimental_defer_build_mode: tuple[Literal['model', 'type_adapter'], ...]
     """
     Controls when [`defer_build`][pydantic.config.ConfigDict.defer_build] is applicable. Defaults to `('model',)`.
@@ -704,11 +749,13 @@ class ConfigDict(TypedDict, total=(False)):
         The `experimental_defer_build_mode` parameter is named with an underscore to suggest this is an experimental feature. It may
         be removed or changed in the future in a minor release.
     """
+
     plugin_settings: dict[str, object] | None
     """A `dict` of settings for plugins. Defaults to `None`.

     See [Pydantic Plugins](../concepts/plugins.md) for details.
     """
+
     schema_generator: type[_GenerateSchema] | None
     """
     A custom core schema generator class to use when generating JSON schemas.
@@ -718,6 +765,7 @@ class ConfigDict(TypedDict, total=(False)):

     See [#6737](https://github.com/pydantic/pydantic/pull/6737) for details.
     """
+
     json_schema_serialization_defaults_required: bool
     """
     Whether fields with default values should be marked as required in the serialization schema. Defaults to `False`.
@@ -756,6 +804,7 @@ class ConfigDict(TypedDict, total=(False)):
     '''
     ```
     """
+
     json_schema_mode_override: Literal['validation', 'serialization', None]
     """
     If not `None`, the specified mode will be used to generate the JSON schema regardless of what `mode` was passed to
@@ -810,6 +859,7 @@ class ConfigDict(TypedDict, total=(False)):
     '''
     ```
     """
+
     coerce_numbers_to_str: bool
     """
     If `True`, enables automatic coercion of any `Number` type to `str` in "lax" (non-strict) mode. Defaults to `False`.
@@ -847,6 +897,7 @@ class ConfigDict(TypedDict, total=(False)):
     #> "42.13"
     ```
     """
+
     regex_engine: Literal['rust-regex', 'python-re']
     """
     The regex engine to be used for pattern validation.
@@ -883,6 +934,7 @@ class ConfigDict(TypedDict, total=(False)):
         '''
     ```
     """
+
     validation_error_cause: bool
     """
     If `True`, Python exceptions that were part of a validation failure will be shown as an exception group as a cause. Can be useful for debugging. Defaults to `False`.
@@ -893,8 +945,9 @@ class ConfigDict(TypedDict, total=(False)):
     Note:
         The structure of validation errors are likely to change in future Pydantic versions. Pydantic offers no guarantees about their structure. Should be used for visual traceback debugging only.
     """
+
     use_attribute_docstrings: bool
-    """
+    '''
     Whether docstrings of attributes (bare string literals immediately following the attribute declaration)
     should be used for field descriptions. Defaults to `False`.

@@ -908,14 +961,14 @@ class ConfigDict(TypedDict, total=(False)):
         model_config = ConfigDict(use_attribute_docstrings=True)

         x: str
-        ""\"
+        """
         Example of an attribute docstring
-        ""\"
+        """

         y: int = Field(description="Description in Field")
-        ""\"
+        """
         Description in Field overrides attribute docstring
-        ""\"
+        """


     print(Model.model_fields["x"].description)
@@ -929,7 +982,8 @@ class ConfigDict(TypedDict, total=(False)):
         Due to current limitations, attribute docstrings detection may not work as expected when using `TypedDict`
         (in particular when multiple `TypedDict` classes have the same name in the same source file). The behavior
         can be different depending on the Python version used.
-    """
+    '''
+
     cache_strings: bool | Literal['all', 'keys', 'none']
     """
     Whether to cache strings to avoid constructing new Python objects. Defaults to True.
@@ -953,7 +1007,7 @@ class ConfigDict(TypedDict, total=(False)):
 _TypeT = TypeVar('_TypeT', bound=type)


-def with_config(config: ConfigDict) ->Callable[[_TypeT], _TypeT]:
+def with_config(config: ConfigDict) -> Callable[[_TypeT], _TypeT]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/config/#configuration-with-dataclass-from-the-standard-library-or-typeddict

     A convenience decorator to set a [Pydantic configuration](config.md) on a `TypedDict` or a `dataclass` from the standard library.
@@ -978,7 +1032,22 @@ def with_config(config: ConfigDict) ->Callable[[_TypeT], _TypeT]:
         #> {'x': 'abc'}
         ```
     """
-    pass
+
+    def inner(class_: _TypeT, /) -> _TypeT:
+        # Ideally, we would check for `class_` to either be a `TypedDict` or a stdlib dataclass.
+        # However, the `@with_config` decorator can be applied *after* `@dataclass`. To avoid
+        # common mistakes, we at least check for `class_` to not be a Pydantic model.
+        from ._internal._utils import is_model_class
+
+        if is_model_class(class_):
+            raise PydanticUserError(
+                f'Cannot use `with_config` on {class_.__name__} as it is a Pydantic model',
+                code='with-config-on-model',
+            )
+        class_.__pydantic_config__ = config
+        return class_
+
+    return inner


 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/dataclasses.py b/pydantic/dataclasses.py
index cb2e2160b..acfd3d520 100644
--- a/pydantic/dataclasses.py
+++ b/pydantic/dataclasses.py
@@ -1,30 +1,112 @@
 """Provide an enhanced dataclass that performs validation."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import sys
 import types
 from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload
+
 from typing_extensions import Literal, TypeGuard, dataclass_transform
+
 from ._internal import _config, _decorators, _typing_extra
 from ._internal import _dataclasses as _pydantic_dataclasses
 from ._migration import getattr_migration
 from .config import ConfigDict
 from .errors import PydanticUserError
 from .fields import Field, FieldInfo, PrivateAttr
+
 if TYPE_CHECKING:
     from ._internal._dataclasses import PydanticDataclass
+
 __all__ = 'dataclass', 'rebuild_dataclass'
+
 _T = TypeVar('_T')
+
 if sys.version_info >= (3, 10):

+    @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
+    @overload
+    def dataclass(
+        *,
+        init: Literal[False] = False,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: ConfigDict | type[object] | None = None,
+        validate_on_init: bool | None = None,
+        kw_only: bool = ...,
+        slots: bool = ...,
+    ) -> Callable[[type[_T]], type[PydanticDataclass]]:  # type: ignore
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
+    @overload
+    def dataclass(
+        _cls: type[_T],  # type: ignore
+        *,
+        init: Literal[False] = False,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: ConfigDict | type[object] | None = None,
+        validate_on_init: bool | None = None,
+        kw_only: bool = ...,
+        slots: bool = ...,
+    ) -> type[PydanticDataclass]: ...
+
+else:
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
+    @overload
+    def dataclass(
+        *,
+        init: Literal[False] = False,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: ConfigDict | type[object] | None = None,
+        validate_on_init: bool | None = None,
+    ) -> Callable[[type[_T]], type[PydanticDataclass]]:  # type: ignore
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
+    @overload
+    def dataclass(
+        _cls: type[_T],  # type: ignore
+        *,
+        init: Literal[False] = False,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: ConfigDict | type[object] | None = None,
+        validate_on_init: bool | None = None,
+    ) -> type[PydanticDataclass]: ...
+

 @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
-def dataclass(_cls: (type[_T] | None)=None, *, init: Literal[False]=False,
-    repr: bool=True, eq: bool=True, order: bool=False, unsafe_hash: bool=
-    False, frozen: bool=False, config: (ConfigDict | type[object] | None)=
-    None, validate_on_init: (bool | None)=None, kw_only: bool=False, slots:
-    bool=False) ->(Callable[[type[_T]], type[PydanticDataclass]] | type[
-    PydanticDataclass]):
+def dataclass(  # noqa: C901
+    _cls: type[_T] | None = None,
+    *,
+    init: Literal[False] = False,
+    repr: bool = True,
+    eq: bool = True,
+    order: bool = False,
+    unsafe_hash: bool = False,
+    frozen: bool = False,
+    config: ConfigDict | type[object] | None = None,
+    validate_on_init: bool | None = None,
+    kw_only: bool = False,
+    slots: bool = False,
+) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/dataclasses/

     A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`,
@@ -56,24 +138,150 @@ def dataclass(_cls: (type[_T] | None)=None, *, init: Literal[False]=False,
     Raises:
         AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`.
     """
-    pass
+    assert init is False, 'pydantic.dataclasses.dataclass only supports init=False'
+    assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
+
+    if sys.version_info >= (3, 10):
+        kwargs = dict(kw_only=kw_only, slots=slots)
+    else:
+        kwargs = {}
+
+    def make_pydantic_fields_compatible(cls: type[Any]) -> None:
+        """Make sure that stdlib `dataclasses` understands `Field` kwargs like `kw_only`
+        To do that, we simply change
+          `x: int = pydantic.Field(..., kw_only=True)`
+        into
+          `x: int = dataclasses.field(default=pydantic.Field(..., kw_only=True), kw_only=True)`
+        """
+        for annotation_cls in cls.__mro__:
+            # In Python < 3.9, `__annotations__` might not be present if there are no fields.
+            # we therefore need to use `getattr` to avoid an `AttributeError`.
+            annotations = getattr(annotation_cls, '__annotations__', [])
+            for field_name in annotations:
+                field_value = getattr(cls, field_name, None)
+                # Process only if this is an instance of `FieldInfo`.
+                if not isinstance(field_value, FieldInfo):
+                    continue
+
+                # Initialize arguments for the standard `dataclasses.field`.
+                field_args: dict = {'default': field_value}
+
+                # Handle `kw_only` for Python 3.10+
+                if sys.version_info >= (3, 10) and field_value.kw_only:
+                    field_args['kw_only'] = True
+
+                # Set `repr` attribute if it's explicitly specified to be not `True`.
+                if field_value.repr is not True:
+                    field_args['repr'] = field_value.repr
+
+                setattr(cls, field_name, dataclasses.field(**field_args))
+                # In Python 3.8, dataclasses checks cls.__dict__['__annotations__'] for annotations,
+                # so we must make sure it's initialized before we add to it.
+                if cls.__dict__.get('__annotations__') is None:
+                    cls.__annotations__ = {}
+                cls.__annotations__[field_name] = annotations[field_name]
+
+    def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
+        """Create a Pydantic dataclass from a regular dataclass.
+
+        Args:
+            cls: The class to create the Pydantic dataclass from.
+
+        Returns:
+            A Pydantic dataclass.
+        """
+        from ._internal._utils import is_model_class
+
+        if is_model_class(cls):
+            raise PydanticUserError(
+                f'Cannot create a Pydantic dataclass from {cls.__name__} as it is already a Pydantic model',
+                code='dataclass-on-model',
+            )
+
+        original_cls = cls
+
+        config_dict = config
+        if config_dict is None:
+            # if not explicitly provided, read from the type
+            cls_config = getattr(cls, '__pydantic_config__', None)
+            if cls_config is not None:
+                config_dict = cls_config
+        config_wrapper = _config.ConfigWrapper(config_dict)
+        decorators = _decorators.DecoratorInfos.build(cls)
+
+        # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator
+        # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description,
+        # since dataclasses.dataclass will set this as the __doc__
+        original_doc = cls.__doc__
+
+        if _pydantic_dataclasses.is_builtin_dataclass(cls):
+            # Don't preserve the docstring for vanilla dataclasses, as it may include the signature
+            # This matches v1 behavior, and there was an explicit test for it
+            original_doc = None
+
+            # We don't want to add validation to the existing std lib dataclass, so we will subclass it
+            #   If the class is generic, we need to make sure the subclass also inherits from Generic
+            #   with all the same parameters.
+            bases = (cls,)
+            if issubclass(cls, Generic):
+                generic_base = Generic[cls.__parameters__]  # type: ignore
+                bases = bases + (generic_base,)
+            cls = types.new_class(cls.__name__, bases)
+
+        make_pydantic_fields_compatible(cls)
+
+        cls = dataclasses.dataclass(  # type: ignore[call-overload]
+            cls,
+            # the value of init here doesn't affect anything except that it makes it easier to generate a signature
+            init=True,
+            repr=repr,
+            eq=eq,
+            order=order,
+            unsafe_hash=unsafe_hash,
+            frozen=frozen,
+            **kwargs,
+        )
+
+        cls.__pydantic_decorators__ = decorators  # type: ignore
+        cls.__doc__ = original_doc
+        cls.__module__ = original_cls.__module__
+        cls.__qualname__ = original_cls.__qualname__
+        pydantic_complete = _pydantic_dataclasses.complete_dataclass(
+            cls, config_wrapper, raise_errors=False, types_namespace=None
+        )
+        cls.__pydantic_complete__ = pydantic_complete  # type: ignore
+        return cls
+
+    if _cls is None:
+        return create_dataclass
+
+    return create_dataclass(_cls)


 __getattr__ = getattr_migration(__name__)
+
 if (3, 8) <= sys.version_info < (3, 11):
+    # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints
+    # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable.

-    def _call_initvar(*args: Any, **kwargs: Any) ->NoReturn:
+    def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
         """This function does nothing but raise an error that is as similar as possible to what you'd get
         if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just
         to ensure typing._type_check does not error if the type hint evaluates to `InitVar[<parameter>]`.
         """
-        pass
+        raise TypeError("'InitVar' object is not callable")
+
     dataclasses.InitVar.__call__ = _call_initvar


-def rebuild_dataclass(cls: type[PydanticDataclass], *, force: bool=False,
-    raise_errors: bool=True, _parent_namespace_depth: int=2,
-    _types_namespace: (dict[str, Any] | None)=None) ->(bool | None):
+def rebuild_dataclass(
+    cls: type[PydanticDataclass],
+    *,
+    force: bool = False,
+    raise_errors: bool = True,
+    _parent_namespace_depth: int = 2,
+    _types_namespace: dict[str, Any] | None = None,
+) -> bool | None:
     """Try to rebuild the pydantic-core schema for the dataclass.

     This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
@@ -92,11 +300,30 @@ def rebuild_dataclass(cls: type[PydanticDataclass], *, force: bool=False,
         Returns `None` if the schema is already "complete" and rebuilding was not required.
         If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
     """
-    pass
+    if not force and cls.__pydantic_complete__:
+        return None
+    else:
+        if _types_namespace is not None:
+            types_namespace: dict[str, Any] | None = _types_namespace.copy()
+        else:
+            if _parent_namespace_depth > 0:
+                frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {}
+                # Note: we may need to add something similar to cls.__pydantic_parent_namespace__ from BaseModel
+                #   here when implementing handling of recursive generics. See BaseModel.model_rebuild for reference.
+                types_namespace = frame_parent_ns
+            else:
+                types_namespace = {}
+
+            types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace)
+        return _pydantic_dataclasses.complete_dataclass(
+            cls,
+            _config.ConfigWrapper(cls.__pydantic_config__, check=False),
+            raise_errors=raise_errors,
+            types_namespace=types_namespace,
+        )


-def is_pydantic_dataclass(class_: type[Any], /) ->TypeGuard[type[
-    PydanticDataclass]]:
+def is_pydantic_dataclass(class_: type[Any], /) -> TypeGuard[type[PydanticDataclass]]:
     """Whether a class is a pydantic dataclass.

     Args:
@@ -105,4 +332,7 @@ def is_pydantic_dataclass(class_: type[Any], /) ->TypeGuard[type[
     Returns:
         `True` if the class is a pydantic dataclass, `False` otherwise.
     """
-    pass
+    try:
+        return '__pydantic_validator__' in class_.__dict__ and dataclasses.is_dataclass(class_)
+    except AttributeError:
+        return False
diff --git a/pydantic/datetime_parse.py b/pydantic/datetime_parse.py
index c562404a1..53d52649e 100644
--- a/pydantic/datetime_parse.py
+++ b/pydantic/datetime_parse.py
@@ -1,3 +1,5 @@
 """The `datetime_parse` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/decorator.py b/pydantic/decorator.py
index 789184be3..0d97560c1 100644
--- a/pydantic/decorator.py
+++ b/pydantic/decorator.py
@@ -1,3 +1,5 @@
 """The `decorator` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/deprecated/class_validators.py b/pydantic/deprecated/class_validators.py
index 5ece7422d..aae33628d 100644
--- a/pydantic/deprecated/class_validators.py
+++ b/pydantic/deprecated/class_validators.py
@@ -1,84 +1,93 @@
 """Old `@validator` and `@root_validator` function validators from V1."""
+
 from __future__ import annotations as _annotations
+
 from functools import partial, partialmethod
 from types import FunctionType
 from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload
 from warnings import warn
+
 from typing_extensions import Literal, Protocol, TypeAlias, deprecated
+
 from .._internal import _decorators, _decorators_v1
 from ..errors import PydanticUserError
 from ..warnings import PydanticDeprecatedSince20
-_ALLOW_REUSE_WARNING_MESSAGE = (
-    '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary'
-    )
-if TYPE_CHECKING:

+_ALLOW_REUSE_WARNING_MESSAGE = '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary'

-    class _OnlyValueValidatorClsMethod(Protocol):

-        def __call__(self, __cls: Any, __value: Any) ->Any:
-            ...
+if TYPE_CHECKING:

+    class _OnlyValueValidatorClsMethod(Protocol):
+        def __call__(self, __cls: Any, __value: Any) -> Any: ...

     class _V1ValidatorWithValuesClsMethod(Protocol):
-
-        def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]
-            ) ->Any:
-            ...
-
+        def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]) -> Any: ...

     class _V1ValidatorWithValuesKwOnlyClsMethod(Protocol):
-
-        def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]
-            ) ->Any:
-            ...
-
+        def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]) -> Any: ...

     class _V1ValidatorWithKwargsClsMethod(Protocol):
-
-        def __call__(self, __cls: Any, **kwargs: Any) ->Any:
-            ...
-
+        def __call__(self, __cls: Any, **kwargs: Any) -> Any: ...

     class _V1ValidatorWithValuesAndKwargsClsMethod(Protocol):
-
-        def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any
-            ) ->Any:
-            ...
-
+        def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any) -> Any: ...

     class _V1RootValidatorClsMethod(Protocol):
+        def __call__(
+            self, __cls: Any, __values: _decorators_v1.RootValidatorValues
+        ) -> _decorators_v1.RootValidatorValues: ...

-        def __call__(self, __cls: Any, __values: _decorators_v1.
-            RootValidatorValues) ->_decorators_v1.RootValidatorValues:
-            ...
-    V1Validator = Union[_OnlyValueValidatorClsMethod,
+    V1Validator = Union[
+        _OnlyValueValidatorClsMethod,
         _V1ValidatorWithValuesClsMethod,
         _V1ValidatorWithValuesKwOnlyClsMethod,
         _V1ValidatorWithKwargsClsMethod,
-        _V1ValidatorWithValuesAndKwargsClsMethod, _decorators_v1.
-        V1ValidatorWithValues, _decorators_v1.V1ValidatorWithValuesKwOnly,
-        _decorators_v1.V1ValidatorWithKwargs, _decorators_v1.
-        V1ValidatorWithValuesAndKwargs]
-    V1RootValidator = Union[_V1RootValidatorClsMethod, _decorators_v1.
-        V1RootValidatorFunction]
-    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any],
-        staticmethod[Any, Any], partialmethod[Any]]
-    _V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator,
-        _PartialClsOrStaticMethod)
-    _V1RootValidatorFunctionType = TypeVar('_V1RootValidatorFunctionType',
-        _decorators_v1.V1RootValidatorFunction, _V1RootValidatorClsMethod,
-        _PartialClsOrStaticMethod)
+        _V1ValidatorWithValuesAndKwargsClsMethod,
+        _decorators_v1.V1ValidatorWithValues,
+        _decorators_v1.V1ValidatorWithValuesKwOnly,
+        _decorators_v1.V1ValidatorWithKwargs,
+        _decorators_v1.V1ValidatorWithValuesAndKwargs,
+    ]
+
+    V1RootValidator = Union[
+        _V1RootValidatorClsMethod,
+        _decorators_v1.V1RootValidatorFunction,
+    ]
+
+    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]]
+
+    # Allow both a V1 (assumed pre=False) or V2 (assumed mode='after') validator
+    # We lie to type checkers and say we return the same thing we get
+    # but in reality we return a proxy object that _mostly_ behaves like the wrapped thing
+    _V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator, _PartialClsOrStaticMethod)
+    _V1RootValidatorFunctionType = TypeVar(
+        '_V1RootValidatorFunctionType',
+        _decorators_v1.V1RootValidatorFunction,
+        _V1RootValidatorClsMethod,
+        _PartialClsOrStaticMethod,
+    )
 else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20


 @deprecated(
-    'Pydantic V1 style `@validator` validators are deprecated. You should migrate to Pydantic V2 style `@field_validator` validators, see the migration guide for more details'
-    , category=None)
-def validator(__field: str, *fields: str, pre: bool=False, each_item: bool=
-    False, always: bool=False, check_fields: (bool | None)=None,
-    allow_reuse: bool=False) ->Callable[[_V1ValidatorType], _V1ValidatorType]:
+    'Pydantic V1 style `@validator` validators are deprecated.'
+    ' You should migrate to Pydantic V2 style `@field_validator` validators,'
+    ' see the migration guide for more details',
+    category=None,
+)
+def validator(
+    __field: str,
+    *fields: str,
+    pre: bool = False,
+    each_item: bool = False,
+    always: bool = False,
+    check_fields: bool | None = None,
+    allow_reuse: bool = False,
+) -> Callable[[_V1ValidatorType], _V1ValidatorType]:
     """Decorate methods on the class indicating that they should be used to validate fields.

     Args:
@@ -100,14 +109,104 @@ def validator(__field: str, *fields: str, pre: bool=False, each_item: bool=
         Callable: A decorator that can be used to decorate a
             function to be used as a validator.
     """
-    pass
+    warn(
+        'Pydantic V1 style `@validator` validators are deprecated.'
+        ' You should migrate to Pydantic V2 style `@field_validator` validators,'
+        ' see the migration guide for more details',
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if allow_reuse is True:  # pragma: no cover
+        warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
+    fields = tuple((__field, *fields))
+    if isinstance(fields[0], FunctionType):
+        raise PydanticUserError(
+            '`@validator` should be used with fields and keyword arguments, not bare. '
+            "E.g. usage should be `@validator('<field_name>', ...)`",
+            code='validator-no-fields',
+        )
+    elif not all(isinstance(field, str) for field in fields):
+        raise PydanticUserError(
+            '`@validator` fields should be passed as separate string args. '
+            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
+            code='validator-invalid-fields',
+        )
+
+    mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
+
+    def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]:
+        if _decorators.is_instance_method_from_sig(f):
+            raise PydanticUserError(
+                '`@validator` cannot be applied to instance methods', code='validator-instance-method'
+            )
+        # auto apply the @classmethod decorator
+        f = _decorators.ensure_classmethod_based_on_signature(f)
+        wrap = _decorators_v1.make_generic_v1_field_validator
+        validator_wrapper_info = _decorators.ValidatorDecoratorInfo(
+            fields=fields,
+            mode=mode,
+            each_item=each_item,
+            always=always,
+            check_fields=check_fields,
+        )
+        return _decorators.PydanticDescriptorProxy(f, validator_wrapper_info, shim=wrap)
+
+    return dec  # type: ignore[return-value]
+
+
+@overload
+def root_validator(
+    *,
+    # if you don't specify `pre` the default is `pre=False`
+    # which means you need to specify `skip_on_failure=True`
+    skip_on_failure: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...
+
+
+@overload
+def root_validator(
+    *,
+    # if you specify `pre=True` then you don't need to specify
+    # `skip_on_failure`, in fact it is not allowed as an argument!
+    pre: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...
+
+
+@overload
+def root_validator(
+    *,
+    # if you explicitly specify `pre=False` then you
+    # MUST specify `skip_on_failure=True`
+    pre: Literal[False],
+    skip_on_failure: Literal[True],
+    allow_reuse: bool = ...,
+) -> Callable[
+    [_V1RootValidatorFunctionType],
+    _V1RootValidatorFunctionType,
+]: ...


 @deprecated(
-    'Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details'
-    , category=None)
-def root_validator(*__args, pre: bool=False, skip_on_failure: bool=False,
-    allow_reuse: bool=False) ->Any:
+    'Pydantic V1 style `@root_validator` validators are deprecated.'
+    ' You should migrate to Pydantic V2 style `@model_validator` validators,'
+    ' see the migration guide for more details',
+    category=None,
+)
+def root_validator(
+    *__args,
+    pre: bool = False,
+    skip_on_failure: bool = False,
+    allow_reuse: bool = False,
+) -> Any:
     """Decorate methods on a model indicating that they should be used to validate (and perhaps
     modify) data either before or after standard model parsing/validation is performed.

@@ -122,4 +221,36 @@ def root_validator(*__args, pre: bool=False, skip_on_failure: bool=False,
     Returns:
         Any: A decorator that can be used to decorate a function to be used as a root_validator.
     """
-    pass
+    warn(
+        'Pydantic V1 style `@root_validator` validators are deprecated.'
+        ' You should migrate to Pydantic V2 style `@model_validator` validators,'
+        ' see the migration guide for more details',
+        DeprecationWarning,
+        stacklevel=2,
+    )
+
+    if __args:
+        # Ensure a nice error is raised if someone attempts to use the bare decorator
+        return root_validator()(*__args)  # type: ignore
+
+    if allow_reuse is True:  # pragma: no cover
+        warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning)
+    mode: Literal['before', 'after'] = 'before' if pre is True else 'after'
+    if pre is False and skip_on_failure is not True:
+        raise PydanticUserError(
+            'If you use `@root_validator` with pre=False (the default) you MUST specify `skip_on_failure=True`.'
+            ' Note that `@root_validator` is deprecated and should be replaced with `@model_validator`.',
+            code='root-validator-pre-skip',
+        )
+
+    wrap = partial(_decorators_v1.make_v1_generic_root_validator, pre=pre)
+
+    def dec(f: Callable[..., Any] | classmethod[Any, Any, Any] | staticmethod[Any, Any]) -> Any:
+        if _decorators.is_instance_method_from_sig(f):
+            raise TypeError('`@root_validator` cannot be applied to instance methods')
+        # auto apply the @classmethod decorator
+        res = _decorators.ensure_classmethod_based_on_signature(f)
+        dec_info = _decorators.RootValidatorDecoratorInfo(mode=mode)
+        return _decorators.PydanticDescriptorProxy(res, dec_info, shim=wrap)
+
+    return dec
diff --git a/pydantic/deprecated/config.py b/pydantic/deprecated/config.py
index 32dda96fd..45400c658 100644
--- a/pydantic/deprecated/config.py
+++ b/pydantic/deprecated/config.py
@@ -1,29 +1,32 @@
 from __future__ import annotations as _annotations
+
 import warnings
 from typing import TYPE_CHECKING, Any
+
 from typing_extensions import Literal, deprecated
+
 from .._internal import _config
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 __all__ = 'BaseConfig', 'Extra'


 class _ConfigMetaclass(type):
-
-    def __getattr__(self, item: str) ->Any:
+    def __getattr__(self, item: str) -> Any:
         try:
             obj = _config.config_defaults[item]
             warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
             return obj
         except KeyError as exc:
-            raise AttributeError(
-                f"type object '{self.__name__}' has no attribute {exc}"
-                ) from exc
+            raise AttributeError(f"type object '{self.__name__}' has no attribute {exc}") from exc


-@deprecated('BaseConfig is deprecated. Use the `pydantic.ConfigDict` instead.',
-    category=PydanticDeprecatedSince20)
+@deprecated('BaseConfig is deprecated. Use the `pydantic.ConfigDict` instead.', category=PydanticDeprecatedSince20)
 class BaseConfig(metaclass=_ConfigMetaclass):
     """This class is only retained for backwards compatibility.

@@ -31,7 +34,7 @@ class BaseConfig(metaclass=_ConfigMetaclass):
         BaseConfig is deprecated. Use the [`pydantic.ConfigDict`][pydantic.ConfigDict] instead.
     """

-    def __getattr__(self, item: str) ->Any:
+    def __getattr__(self, item: str) -> Any:
         try:
             obj = super().__getattribute__(item)
             warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
@@ -40,26 +43,29 @@ class BaseConfig(metaclass=_ConfigMetaclass):
             try:
                 return getattr(type(self), item)
             except AttributeError:
+                # re-raising changes the displayed text to reflect that `self` is not a type
                 raise AttributeError(str(exc)) from exc

-    def __init_subclass__(cls, **kwargs: Any) ->None:
+    def __init_subclass__(cls, **kwargs: Any) -> None:
         warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning)
         return super().__init_subclass__(**kwargs)


 class _ExtraMeta(type):
-
-    def __getattribute__(self, __name: str) ->Any:
+    def __getattribute__(self, __name: str) -> Any:
+        # The @deprecated decorator accesses other attributes, so we only emit a warning for the expected ones
         if __name in {'allow', 'ignore', 'forbid'}:
             warnings.warn(
-                "`pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`)"
-                , DeprecationWarning, stacklevel=2)
+                "`pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`)",
+                DeprecationWarning,
+                stacklevel=2,
+            )
         return super().__getattribute__(__name)


 @deprecated(
-    "Extra is deprecated. Use literal values instead (e.g. `extra='allow'`)",
-    category=PydanticDeprecatedSince20)
+    "Extra is deprecated. Use literal values instead (e.g. `extra='allow'`)", category=PydanticDeprecatedSince20
+)
 class Extra(metaclass=_ExtraMeta):
     allow: Literal['allow'] = 'allow'
     ignore: Literal['ignore'] = 'ignore'
diff --git a/pydantic/deprecated/copy_internals.py b/pydantic/deprecated/copy_internals.py
index 6909df428..efe5de289 100644
--- a/pydantic/deprecated/copy_internals.py
+++ b/pydantic/deprecated/copy_internals.py
@@ -1,16 +1,224 @@
 from __future__ import annotations as _annotations
+
 import typing
 from copy import deepcopy
 from enum import Enum
 from typing import Any, Tuple
+
 import typing_extensions
-from .._internal import _model_construction, _typing_extra, _utils
+
+from .._internal import (
+    _model_construction,
+    _typing_extra,
+    _utils,
+)
+
 if typing.TYPE_CHECKING:
     from .. import BaseModel
     from .._internal._utils import AbstractSetIntStr, MappingIntStrAny
+
     AnyClassMethod = classmethod[Any, Any, Any]
     TupleGenerator = typing.Generator[Tuple[str, Any], None, None]
     Model = typing.TypeVar('Model', bound='BaseModel')
-    IncEx: typing_extensions.TypeAlias = (
-        'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None')
+    # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope
+    IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None'
+
 _object_setattr = _model_construction.object_setattr
+
+
+def _iter(
+    self: BaseModel,
+    to_dict: bool = False,
+    by_alias: bool = False,
+    include: AbstractSetIntStr | MappingIntStrAny | None = None,
+    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
+    exclude_unset: bool = False,
+    exclude_defaults: bool = False,
+    exclude_none: bool = False,
+) -> TupleGenerator:
+    # Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
+    # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
+    if exclude is not None:
+        exclude = _utils.ValueItems.merge(
+            {k: v.exclude for k, v in self.model_fields.items() if v.exclude is not None}, exclude
+        )
+
+    if include is not None:
+        include = _utils.ValueItems.merge({k: True for k in self.model_fields}, include, intersect=True)
+
+    allowed_keys = _calculate_keys(self, include=include, exclude=exclude, exclude_unset=exclude_unset)  # type: ignore
+    if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
+        # huge boost for plain _iter()
+        yield from self.__dict__.items()
+        if self.__pydantic_extra__:
+            yield from self.__pydantic_extra__.items()
+        return
+
+    value_exclude = _utils.ValueItems(self, exclude) if exclude is not None else None
+    value_include = _utils.ValueItems(self, include) if include is not None else None
+
+    if self.__pydantic_extra__ is None:
+        items = self.__dict__.items()
+    else:
+        items = list(self.__dict__.items()) + list(self.__pydantic_extra__.items())
+
+    for field_key, v in items:
+        if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
+            continue
+
+        if exclude_defaults:
+            try:
+                field = self.model_fields[field_key]
+            except KeyError:
+                pass
+            else:
+                if not field.is_required() and field.default == v:
+                    continue
+
+        if by_alias and field_key in self.model_fields:
+            dict_key = self.model_fields[field_key].alias or field_key
+        else:
+            dict_key = field_key
+
+        if to_dict or value_include or value_exclude:
+            v = _get_value(
+                type(self),
+                v,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                include=value_include and value_include.for_element(field_key),
+                exclude=value_exclude and value_exclude.for_element(field_key),
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        yield dict_key, v
+
+
+def _copy_and_set_values(
+    self: Model,
+    values: dict[str, Any],
+    fields_set: set[str],
+    extra: dict[str, Any] | None = None,
+    private: dict[str, Any] | None = None,
+    *,
+    deep: bool,  # UP006
+) -> Model:
+    if deep:
+        # chances of having empty dict here are quite low for using smart_deepcopy
+        values = deepcopy(values)
+        extra = deepcopy(extra)
+        private = deepcopy(private)
+
+    cls = self.__class__
+    m = cls.__new__(cls)
+    _object_setattr(m, '__dict__', values)
+    _object_setattr(m, '__pydantic_extra__', extra)
+    _object_setattr(m, '__pydantic_fields_set__', fields_set)
+    _object_setattr(m, '__pydantic_private__', private)
+
+    return m
+
+
+@typing.no_type_check
+def _get_value(
+    cls: type[BaseModel],
+    v: Any,
+    to_dict: bool,
+    by_alias: bool,
+    include: AbstractSetIntStr | MappingIntStrAny | None,
+    exclude: AbstractSetIntStr | MappingIntStrAny | None,
+    exclude_unset: bool,
+    exclude_defaults: bool,
+    exclude_none: bool,
+) -> Any:
+    from .. import BaseModel
+
+    if isinstance(v, BaseModel):
+        if to_dict:
+            return v.model_dump(
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=include,  # type: ignore
+                exclude=exclude,  # type: ignore
+                exclude_none=exclude_none,
+            )
+        else:
+            return v.copy(include=include, exclude=exclude)
+
+    value_exclude = _utils.ValueItems(v, exclude) if exclude else None
+    value_include = _utils.ValueItems(v, include) if include else None
+
+    if isinstance(v, dict):
+        return {
+            k_: _get_value(
+                cls,
+                v_,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=value_include and value_include.for_element(k_),
+                exclude=value_exclude and value_exclude.for_element(k_),
+                exclude_none=exclude_none,
+            )
+            for k_, v_ in v.items()
+            if (not value_exclude or not value_exclude.is_excluded(k_))
+            and (not value_include or value_include.is_included(k_))
+        }
+
+    elif _utils.sequence_like(v):
+        seq_args = (
+            _get_value(
+                cls,
+                v_,
+                to_dict=to_dict,
+                by_alias=by_alias,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                include=value_include and value_include.for_element(i),
+                exclude=value_exclude and value_exclude.for_element(i),
+                exclude_none=exclude_none,
+            )
+            for i, v_ in enumerate(v)
+            if (not value_exclude or not value_exclude.is_excluded(i))
+            and (not value_include or value_include.is_included(i))
+        )
+
+        return v.__class__(*seq_args) if _typing_extra.is_namedtuple(v.__class__) else v.__class__(seq_args)
+
+    elif isinstance(v, Enum) and getattr(cls.model_config, 'use_enum_values', False):
+        return v.value
+
+    else:
+        return v
+
+
+def _calculate_keys(
+    self: BaseModel,
+    include: MappingIntStrAny | None,
+    exclude: MappingIntStrAny | None,
+    exclude_unset: bool,
+    update: typing.Dict[str, Any] | None = None,  # noqa UP006
+) -> typing.AbstractSet[str] | None:
+    if include is None and exclude is None and exclude_unset is False:
+        return None
+
+    keys: typing.AbstractSet[str]
+    if exclude_unset:
+        keys = self.__pydantic_fields_set__.copy()
+    else:
+        keys = set(self.__dict__.keys())
+        keys = keys | (self.__pydantic_extra__ or {}).keys()
+
+    if include is not None:
+        keys &= include.keys()
+
+    if update:
+        keys -= update.keys()
+
+    if exclude:
+        keys -= {k for k, v in exclude.items() if _utils.ValueItems.is_true(v)}
+
+    return keys
diff --git a/pydantic/deprecated/decorator.py b/pydantic/deprecated/decorator.py
index 6be076d95..0c0ea7445 100644
--- a/pydantic/deprecated/decorator.py
+++ b/pydantic/deprecated/decorator.py
@@ -1,29 +1,69 @@
 import warnings
 from functools import wraps
 from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload
+
 from typing_extensions import deprecated
+
 from .._internal import _config, _typing_extra
 from ..alias_generators import to_pascal
 from ..errors import PydanticUserError
 from ..functional_validators import field_validator
 from ..main import BaseModel, create_model
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
-__all__ = 'validate_arguments',
+
+__all__ = ('validate_arguments',)
+
 if TYPE_CHECKING:
     AnyCallable = Callable[..., Any]
+
     AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable)
     ConfigType = Union[None, Type[Any], Dict[str, Any]]


+@overload
+def validate_arguments(
+    func: None = None, *, config: 'ConfigType' = None
+) -> Callable[['AnyCallableT'], 'AnyCallableT']: ...
+
+
+@overload
+def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': ...
+
+
 @deprecated(
-    'The `validate_arguments` method is deprecated; use `validate_call` instead.'
-    , category=None)
-def validate_arguments(func: Optional['AnyCallableT']=None, *, config:
-    'ConfigType'=None) ->Any:
+    'The `validate_arguments` method is deprecated; use `validate_call` instead.',
+    category=None,
+)
+def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any:
     """Decorator to validate the arguments passed to a function."""
-    pass
+    warnings.warn(
+        'The `validate_arguments` method is deprecated; use `validate_call` instead.',
+        PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+
+    def validate(_func: 'AnyCallable') -> 'AnyCallable':
+        vd = ValidatedFunction(_func, config)
+
+        @wraps(_func)
+        def wrapper_function(*args: Any, **kwargs: Any) -> Any:
+            return vd.call(*args, **kwargs)
+
+        wrapper_function.vd = vd  # type: ignore
+        wrapper_function.validate = vd.init_model_instance  # type: ignore
+        wrapper_function.raw_function = vd.raw_function  # type: ignore
+        wrapper_function.model = vd.model  # type: ignore
+        return wrapper_function
+
+    if func:
+        return validate(func)
+    else:
+        return validate


 ALT_V_ARGS = 'v__args'
@@ -33,22 +73,25 @@ V_DUPLICATE_KWARGS = 'v__duplicate_kwargs'


 class ValidatedFunction:
-
     def __init__(self, function: 'AnyCallable', config: 'ConfigType'):
         from inspect import Parameter, signature
+
         parameters: Mapping[str, Parameter] = signature(function).parameters
-        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS,
-            V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
+
+        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
             raise PydanticUserError(
-                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" are not permitted as argument names when using the "{validate_arguments.__name__}" decorator'
-                , code=None)
+                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" '
+                f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator',
+                code=None,
+            )
+
         self.raw_function = function
         self.arg_mapping: Dict[int, str] = {}
         self.positional_only_args: set[str] = set()
         self.v_args_name = 'args'
         self.v_kwargs_name = 'kwargs'
-        type_hints = _typing_extra.get_type_hints(function, include_extras=True
-            )
+
+        type_hints = _typing_extra.get_type_hints(function, include_extras=True)
         takes_args = False
         takes_kwargs = False
         fields: Dict[str, Tuple[Any, Any]] = {}
@@ -57,6 +100,7 @@ class ValidatedFunction:
                 annotation = Any
             else:
                 annotation = type_hints[name]
+
             default = ... if p.default is p.empty else p.default
             if p.kind == Parameter.POSITIONAL_ONLY:
                 self.arg_mapping[i] = name
@@ -78,12 +122,158 @@ class ValidatedFunction:
                 self.v_kwargs_name = name
                 fields[name] = Dict[str, annotation], None
                 takes_kwargs = True
+
+        # these checks avoid a clash between "args" and a field with that name
         if not takes_args and self.v_args_name in fields:
             self.v_args_name = ALT_V_ARGS
+
+        # same with "kwargs"
         if not takes_kwargs and self.v_kwargs_name in fields:
             self.v_kwargs_name = ALT_V_KWARGS
+
         if not takes_args:
+            # we add the field so validation below can raise the correct exception
             fields[self.v_args_name] = List[Any], None
+
         if not takes_kwargs:
+            # same with kwargs
             fields[self.v_kwargs_name] = Dict[Any, Any], None
+
         self.create_model(fields, takes_args, takes_kwargs, config)
+
+    def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel:
+        values = self.build_values(args, kwargs)
+        return self.model(**values)
+
+    def call(self, *args: Any, **kwargs: Any) -> Any:
+        m = self.init_model_instance(*args, **kwargs)
+        return self.execute(m)
+
+    def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]:
+        values: Dict[str, Any] = {}
+        if args:
+            arg_iter = enumerate(args)
+            while True:
+                try:
+                    i, a = next(arg_iter)
+                except StopIteration:
+                    break
+                arg_name = self.arg_mapping.get(i)
+                if arg_name is not None:
+                    values[arg_name] = a
+                else:
+                    values[self.v_args_name] = [a] + [a for _, a in arg_iter]
+                    break
+
+        var_kwargs: Dict[str, Any] = {}
+        wrong_positional_args = []
+        duplicate_kwargs = []
+        fields_alias = [
+            field.alias
+            for name, field in self.model.model_fields.items()
+            if name not in (self.v_args_name, self.v_kwargs_name)
+        ]
+        non_var_fields = set(self.model.model_fields) - {self.v_args_name, self.v_kwargs_name}
+        for k, v in kwargs.items():
+            if k in non_var_fields or k in fields_alias:
+                if k in self.positional_only_args:
+                    wrong_positional_args.append(k)
+                if k in values:
+                    duplicate_kwargs.append(k)
+                values[k] = v
+            else:
+                var_kwargs[k] = v
+
+        if var_kwargs:
+            values[self.v_kwargs_name] = var_kwargs
+        if wrong_positional_args:
+            values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args
+        if duplicate_kwargs:
+            values[V_DUPLICATE_KWARGS] = duplicate_kwargs
+        return values
+
+    def execute(self, m: BaseModel) -> Any:
+        d = {k: v for k, v in m.__dict__.items() if k in m.__pydantic_fields_set__ or m.model_fields[k].default_factory}
+        var_kwargs = d.pop(self.v_kwargs_name, {})
+
+        if self.v_args_name in d:
+            args_: List[Any] = []
+            in_kwargs = False
+            kwargs = {}
+            for name, value in d.items():
+                if in_kwargs:
+                    kwargs[name] = value
+                elif name == self.v_args_name:
+                    args_ += value
+                    in_kwargs = True
+                else:
+                    args_.append(value)
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        elif self.positional_only_args:
+            args_ = []
+            kwargs = {}
+            for name, value in d.items():
+                if name in self.positional_only_args:
+                    args_.append(value)
+                else:
+                    kwargs[name] = value
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        else:
+            return self.raw_function(**d, **var_kwargs)
+
+    def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None:
+        pos_args = len(self.arg_mapping)
+
+        config_wrapper = _config.ConfigWrapper(config)
+
+        if config_wrapper.alias_generator:
+            raise PydanticUserError(
+                'Setting the "alias_generator" property on custom Config for '
+                '@validate_arguments is not yet supported, please remove.',
+                code=None,
+            )
+        if config_wrapper.extra is None:
+            config_wrapper.config_dict['extra'] = 'forbid'
+
+        class DecoratorBaseModel(BaseModel):
+            @field_validator(self.v_args_name, check_fields=False)
+            @classmethod
+            def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
+                if takes_args or v is None:
+                    return v
+
+                raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given')
+
+            @field_validator(self.v_kwargs_name, check_fields=False)
+            @classmethod
+            def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
+                if takes_kwargs or v is None:
+                    return v
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v.keys()))
+                raise TypeError(f'unexpected keyword argument{plural}: {keys}')
+
+            @field_validator(V_POSITIONAL_ONLY_NAME, check_fields=False)
+            @classmethod
+            def check_positional_only(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}')
+
+            @field_validator(V_DUPLICATE_KWARGS, check_fields=False)
+            @classmethod
+            def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'multiple values for argument{plural}: {keys}')
+
+            model_config = config_wrapper.config_dict
+
+        self.model = create_model(to_pascal(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields)
diff --git a/pydantic/deprecated/json.py b/pydantic/deprecated/json.py
index 062beef97..9ba5256cf 100644
--- a/pydantic/deprecated/json.py
+++ b/pydantic/deprecated/json.py
@@ -9,17 +9,27 @@ from re import Pattern
 from types import GeneratorType
 from typing import TYPE_CHECKING, Any, Callable, Dict, Type, Union
 from uuid import UUID
+
 from typing_extensions import deprecated
+
 from ..color import Color
 from ..networks import NameEmail
 from ..types import SecretBytes, SecretStr
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 __all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'


-def decimal_encoder(dec_value: Decimal) ->Union[int, float]:
+def isoformat(o: Union[datetime.date, datetime.time]) -> str:
+    return o.isoformat()
+
+
+def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
     """Encodes a Decimal as int of there's no exponent, otherwise float.

     This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
@@ -33,20 +43,98 @@ def decimal_encoder(dec_value: Decimal) ->Union[int, float]:
     >>> decimal_encoder(Decimal("1"))
     1
     """
-    pass
+    exponent = dec_value.as_tuple().exponent
+    if isinstance(exponent, int) and exponent >= 0:
+        return int(dec_value)
+    else:
+        return float(dec_value)
+
+
+ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
+    bytes: lambda o: o.decode(),
+    Color: str,
+    datetime.date: isoformat,
+    datetime.datetime: isoformat,
+    datetime.time: isoformat,
+    datetime.timedelta: lambda td: td.total_seconds(),
+    Decimal: decimal_encoder,
+    Enum: lambda o: o.value,
+    frozenset: list,
+    deque: list,
+    GeneratorType: list,
+    IPv4Address: str,
+    IPv4Interface: str,
+    IPv4Network: str,
+    IPv6Address: str,
+    IPv6Interface: str,
+    IPv6Network: str,
+    NameEmail: str,
+    Path: str,
+    Pattern: lambda o: o.pattern,
+    SecretBytes: str,
+    SecretStr: str,
+    set: list,
+    UUID: str,
+}
+
+
+@deprecated(
+    '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.',
+    category=None,
+)
+def pydantic_encoder(obj: Any) -> Any:
+    warnings.warn(
+        '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    from dataclasses import asdict, is_dataclass
+
+    from ..main import BaseModel
+
+    if isinstance(obj, BaseModel):
+        return obj.model_dump()
+    elif is_dataclass(obj):
+        return asdict(obj)  # type: ignore
+
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = ENCODERS_BY_TYPE[base]
+        except KeyError:
+            continue
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
+

+# TODO: Add a suggested migration path once there is a way to use custom encoders
+@deprecated(
+    '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.',
+    category=None,
+)
+def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
+    warnings.warn(
+        '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = type_encoders[base]
+        except KeyError:
+            continue

-ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {bytes: lambda o:
-    o.decode(), Color: str, datetime.date: isoformat, datetime.datetime:
-    isoformat, datetime.time: isoformat, datetime.timedelta: lambda td: td.
-    total_seconds(), Decimal: decimal_encoder, Enum: lambda o: o.value,
-    frozenset: list, deque: list, GeneratorType: list, IPv4Address: str,
-    IPv4Interface: str, IPv4Network: str, IPv6Address: str, IPv6Interface:
-    str, IPv6Network: str, NameEmail: str, Path: str, Pattern: lambda o: o.
-    pattern, SecretBytes: str, SecretStr: str, set: list, UUID: str}
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        return pydantic_encoder(obj)


 @deprecated('`timedelta_isoformat` is deprecated.', category=None)
-def timedelta_isoformat(td: datetime.timedelta) ->str:
+def timedelta_isoformat(td: datetime.timedelta) -> str:
     """ISO 8601 encoding for Python timedelta object."""
-    pass
+    warnings.warn('`timedelta_isoformat` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    minutes, seconds = divmod(td.seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
diff --git a/pydantic/deprecated/parse.py b/pydantic/deprecated/parse.py
index fe5986bb7..2a92e62b7 100644
--- a/pydantic/deprecated/parse.py
+++ b/pydantic/deprecated/parse.py
@@ -1,16 +1,80 @@
 from __future__ import annotations
+
 import json
 import pickle
 import warnings
 from enum import Enum
 from pathlib import Path
 from typing import TYPE_CHECKING, Any, Callable
+
 from typing_extensions import deprecated
+
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20


 class Protocol(str, Enum):
     json = 'json'
     pickle = 'pickle'
+
+
+@deprecated('`load_str_bytes` is deprecated.', category=None)
+def load_str_bytes(
+    b: str | bytes,
+    *,
+    content_type: str | None = None,
+    encoding: str = 'utf8',
+    proto: Protocol | None = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    warnings.warn('`load_str_bytes` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    if proto is None and content_type:
+        if content_type.endswith(('json', 'javascript')):
+            pass
+        elif allow_pickle and content_type.endswith('pickle'):
+            proto = Protocol.pickle
+        else:
+            raise TypeError(f'Unknown content-type: {content_type}')
+
+    proto = proto or Protocol.json
+
+    if proto == Protocol.json:
+        if isinstance(b, bytes):
+            b = b.decode(encoding)
+        return json_loads(b)  # type: ignore
+    elif proto == Protocol.pickle:
+        if not allow_pickle:
+            raise RuntimeError('Trying to decode with pickle with allow_pickle=False')
+        bb = b if isinstance(b, bytes) else b.encode()  # type: ignore
+        return pickle.loads(bb)
+    else:
+        raise TypeError(f'Unknown protocol: {proto}')
+
+
+@deprecated('`load_file` is deprecated.', category=None)
+def load_file(
+    path: str | Path,
+    *,
+    content_type: str | None = None,
+    encoding: str = 'utf8',
+    proto: Protocol | None = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    warnings.warn('`load_file` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2)
+    path = Path(path)
+    b = path.read_bytes()
+    if content_type is None:
+        if path.suffix in ('.js', '.json'):
+            proto = Protocol.json
+        elif path.suffix == '.pkl':
+            proto = Protocol.pickle
+
+    return load_str_bytes(
+        b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads
+    )
diff --git a/pydantic/deprecated/tools.py b/pydantic/deprecated/tools.py
index 8bd382997..b04eae400 100644
--- a/pydantic/deprecated/tools.py
+++ b/pydantic/deprecated/tools.py
@@ -1,34 +1,103 @@
 from __future__ import annotations
+
 import json
 import warnings
 from typing import TYPE_CHECKING, Any, Callable, Type, TypeVar, Union
+
 from typing_extensions import deprecated
+
 from ..json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema
 from ..type_adapter import TypeAdapter
 from ..warnings import PydanticDeprecatedSince20
+
 if not TYPE_CHECKING:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 __all__ = 'parse_obj_as', 'schema_of', 'schema_json_of'
+
 NameFactory = Union[str, Callable[[Type[Any]], str]]
+
+
 T = TypeVar('T')


 @deprecated(
-    '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.'
-    , category=None)
-def schema_of(type_: Any, *, title: (NameFactory | None)=None, by_alias:
-    bool=True, ref_template: str=DEFAULT_REF_TEMPLATE, schema_generator:
-    type[GenerateJsonSchema]=GenerateJsonSchema) ->dict[str, Any]:
+    '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.',
+    category=None,
+)
+def parse_obj_as(type_: type[T], obj: Any, type_name: NameFactory | None = None) -> T:
+    warnings.warn(
+        '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    if type_name is not None:  # pragma: no cover
+        warnings.warn(
+            'The type_name parameter is deprecated. parse_obj_as no longer creates temporary models',
+            DeprecationWarning,
+            stacklevel=2,
+        )
+    return TypeAdapter(type_).validate_python(obj)
+
+
+@deprecated(
+    '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+    category=None,
+)
+def schema_of(
+    type_: Any,
+    *,
+    title: NameFactory | None = None,
+    by_alias: bool = True,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+) -> dict[str, Any]:
     """Generate a JSON schema (as dict) for the passed model or dynamically generated one."""
-    pass
+    warnings.warn(
+        '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    res = TypeAdapter(type_).json_schema(
+        by_alias=by_alias,
+        schema_generator=schema_generator,
+        ref_template=ref_template,
+    )
+    if title is not None:
+        if isinstance(title, str):
+            res['title'] = title
+        else:
+            warnings.warn(
+                'Passing a callable for the `title` parameter is deprecated and no longer supported',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            res['title'] = title(type_)
+    return res


 @deprecated(
-    '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.'
-    , category=None)
-def schema_json_of(type_: Any, *, title: (NameFactory | None)=None,
-    by_alias: bool=True, ref_template: str=DEFAULT_REF_TEMPLATE,
-    schema_generator: type[GenerateJsonSchema]=GenerateJsonSchema, **
-    dumps_kwargs: Any) ->str:
+    '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+    category=None,
+)
+def schema_json_of(
+    type_: Any,
+    *,
+    title: NameFactory | None = None,
+    by_alias: bool = True,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+    **dumps_kwargs: Any,
+) -> str:
     """Generate a JSON schema (as JSON) for the passed model or dynamically generated one."""
-    pass
+    warnings.warn(
+        '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.',
+        category=PydanticDeprecatedSince20,
+        stacklevel=2,
+    )
+    return json.dumps(
+        schema_of(type_, title=title, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator),
+        **dumps_kwargs,
+    )
diff --git a/pydantic/env_settings.py b/pydantic/env_settings.py
index b33516aa9..cd0b04e6a 100644
--- a/pydantic/env_settings.py
+++ b/pydantic/env_settings.py
@@ -1,3 +1,5 @@
 """The `env_settings` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/error_wrappers.py b/pydantic/error_wrappers.py
index 2d455f557..2985419ab 100644
--- a/pydantic/error_wrappers.py
+++ b/pydantic/error_wrappers.py
@@ -1,3 +1,5 @@
 """The `error_wrappers` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/errors.py b/pydantic/errors.py
index 3a0976c35..de2e512dd 100644
--- a/pydantic/errors.py
+++ b/pydantic/errors.py
@@ -1,34 +1,71 @@
 """Pydantic-specific errors."""
+
 from __future__ import annotations as _annotations
+
 import re
+
 from typing_extensions import Literal, Self
+
 from ._migration import getattr_migration
 from .version import version_short
-__all__ = ('PydanticUserError', 'PydanticUndefinedAnnotation',
-    'PydanticImportError', 'PydanticSchemaGenerationError',
-    'PydanticInvalidForJsonSchema', 'PydanticErrorCodes')
+
+__all__ = (
+    'PydanticUserError',
+    'PydanticUndefinedAnnotation',
+    'PydanticImportError',
+    'PydanticSchemaGenerationError',
+    'PydanticInvalidForJsonSchema',
+    'PydanticErrorCodes',
+)
+
+# We use this URL to allow for future flexibility about how we host the docs, while allowing for Pydantic
+# code in the while with "old" URLs to still work.
+# 'u' refers to "user errors" - e.g. errors caused by developers using pydantic, as opposed to validation errors.
 DEV_ERROR_DOCS_URL = f'https://errors.pydantic.dev/{version_short()}/u/'
-PydanticErrorCodes = Literal['class-not-fully-defined',
-    'custom-json-schema', 'decorator-missing-field',
-    'discriminator-no-field', 'discriminator-alias-type',
-    'discriminator-needs-literal', 'discriminator-alias',
-    'discriminator-validator', 'callable-discriminator-no-tag',
-    'typed-dict-version', 'model-field-overridden',
-    'model-field-missing-annotation', 'config-both', 'removed-kwargs',
-    'invalid-for-json-schema', 'json-schema-already-used',
-    'base-model-instantiated', 'undefined-annotation',
-    'schema-for-unknown-type', 'import-error',
-    'create-model-field-definitions', 'create-model-config-base',
-    'validator-no-fields', 'validator-invalid-fields',
-    'validator-instance-method', 'root-validator-pre-skip',
-    'model-serializer-instance-method', 'validator-field-config-info',
-    'validator-v1-signature', 'validator-signature',
-    'field-serializer-signature', 'model-serializer-signature',
-    'multiple-field-serializers', 'invalid_annotated_type',
-    'type-adapter-config-unused', 'root-model-extra',
-    'unevaluable-type-annotation', 'dataclass-init-false-extra-allow',
-    'clashing-init-and-init-var', 'model-config-invalid-field-name',
-    'with-config-on-model', 'dataclass-on-model']
+PydanticErrorCodes = Literal[
+    'class-not-fully-defined',
+    'custom-json-schema',
+    'decorator-missing-field',
+    'discriminator-no-field',
+    'discriminator-alias-type',
+    'discriminator-needs-literal',
+    'discriminator-alias',
+    'discriminator-validator',
+    'callable-discriminator-no-tag',
+    'typed-dict-version',
+    'model-field-overridden',
+    'model-field-missing-annotation',
+    'config-both',
+    'removed-kwargs',
+    'invalid-for-json-schema',
+    'json-schema-already-used',
+    'base-model-instantiated',
+    'undefined-annotation',
+    'schema-for-unknown-type',
+    'import-error',
+    'create-model-field-definitions',
+    'create-model-config-base',
+    'validator-no-fields',
+    'validator-invalid-fields',
+    'validator-instance-method',
+    'root-validator-pre-skip',
+    'model-serializer-instance-method',
+    'validator-field-config-info',
+    'validator-v1-signature',
+    'validator-signature',
+    'field-serializer-signature',
+    'model-serializer-signature',
+    'multiple-field-serializers',
+    'invalid_annotated_type',
+    'type-adapter-config-unused',
+    'root-model-extra',
+    'unevaluable-type-annotation',
+    'dataclass-init-false-extra-allow',
+    'clashing-init-and-init-var',
+    'model-config-invalid-field-name',
+    'with-config-on-model',
+    'dataclass-on-model',
+]


 class PydanticErrorMixin:
@@ -39,18 +76,15 @@ class PydanticErrorMixin:
         code: An optional error code from PydanticErrorCodes enum.
     """

-    def __init__(self, message: str, *, code: (PydanticErrorCodes | None)
-        ) ->None:
+    def __init__(self, message: str, *, code: PydanticErrorCodes | None) -> None:
         self.message = message
         self.code = code

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         if self.code is None:
             return self.message
         else:
-            return f"""{self.message}
-
-For further information visit {DEV_ERROR_DOCS_URL}{self.code}"""
+            return f'{self.message}\n\nFor further information visit {DEV_ERROR_DOCS_URL}{self.code}'


 class PydanticUserError(PydanticErrorMixin, TypeError):
@@ -65,12 +99,12 @@ class PydanticUndefinedAnnotation(PydanticErrorMixin, NameError):
         message: Description of the error.
     """

-    def __init__(self, name: str, message: str) ->None:
+    def __init__(self, name: str, message: str) -> None:
         self.name = name
         super().__init__(message=message, code='undefined-annotation')

     @classmethod
-    def from_name_error(cls, name_error: NameError) ->Self:
+    def from_name_error(cls, name_error: NameError) -> Self:
         """Convert a `NameError` to a `PydanticUndefinedAnnotation` error.

         Args:
@@ -79,7 +113,11 @@ class PydanticUndefinedAnnotation(PydanticErrorMixin, NameError):
         Returns:
             Converted `PydanticUndefinedAnnotation` error.
         """
-        pass
+        try:
+            name = name_error.name  # type: ignore  # python > 3.10
+        except AttributeError:
+            name = re.search(r".*'(.+?)'", str(name_error)).group(1)  # type: ignore[union-attr]
+        return cls(name=name, message=str(name_error))


 class PydanticImportError(PydanticErrorMixin, ImportError):
@@ -89,7 +127,7 @@ class PydanticImportError(PydanticErrorMixin, ImportError):
         message: Description of the error.
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         super().__init__(message, code='import-error')


@@ -100,7 +138,7 @@ class PydanticSchemaGenerationError(PydanticUserError):
         message: Description of the error.
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         super().__init__(message, code='schema-for-unknown-type')


@@ -111,7 +149,7 @@ class PydanticInvalidForJsonSchema(PydanticUserError):
         message: Description of the error.
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         super().__init__(message, code='invalid-for-json-schema')


diff --git a/pydantic/experimental/pipeline.py b/pydantic/experimental/pipeline.py
index 253728a36..29da979e8 100644
--- a/pydantic/experimental/pipeline.py
+++ b/pydantic/experimental/pipeline.py
@@ -1,5 +1,7 @@
 """Experimental pipeline API functionality. Be careful with this API, it's subject to change."""
+
 from __future__ import annotations
+
 import datetime
 import operator
 import re
@@ -10,17 +12,24 @@ from dataclasses import dataclass
 from decimal import Decimal
 from functools import cached_property, partial
 from typing import TYPE_CHECKING, Any, Callable, Generic, Pattern, Protocol, TypeVar, Union, overload
+
 import annotated_types
 from typing_extensions import Annotated
+
 if TYPE_CHECKING:
     from pydantic_core import core_schema as cs
+
     from pydantic import GetCoreSchemaHandler
+
 from pydantic._internal._internal_dataclass import slots_true as _slots_true
+
 if sys.version_info < (3, 10):
     EllipsisType = type(Ellipsis)
 else:
     from types import EllipsisType
+
 __all__ = ['validate_as', 'validate_as_deferred', 'transform']
+
 _slots_frozen = {**_slots_true, 'frozen': True}


@@ -34,6 +43,10 @@ class _ValidateAs:
 class _ValidateAsDefer:
     func: Callable[[], type[Any]]

+    @cached_property
+    def tp(self) -> type[Any]:
+        return self.func()
+

 @dataclass(**_slots_frozen)
 class _Transform:
@@ -72,11 +85,24 @@ class _NotIn:
     values: Container[Any]


-_ConstraintAnnotation = Union[annotated_types.Le, annotated_types.Ge,
-    annotated_types.Lt, annotated_types.Gt, annotated_types.Len,
-    annotated_types.MultipleOf, annotated_types.Timezone, annotated_types.
-    Interval, annotated_types.Predicate, _Eq, _NotEq, _In, _NotIn, Pattern[str]
-    ]
+_ConstraintAnnotation = Union[
+    annotated_types.Le,
+    annotated_types.Ge,
+    annotated_types.Lt,
+    annotated_types.Gt,
+    annotated_types.Len,
+    annotated_types.MultipleOf,
+    annotated_types.Timezone,
+    annotated_types.Interval,
+    annotated_types.Predicate,
+    # common predicates not included in annotated_types
+    _Eq,
+    _NotEq,
+    _In,
+    _NotIn,
+    # regular expressions
+    Pattern[str],
+]


 @dataclass(**_slots_frozen)
@@ -84,8 +110,8 @@ class _Constraint:
     constraint: _ConstraintAnnotation


-_Step = Union[_ValidateAs, _ValidateAsDefer, _Transform, _PipelineOr,
-    _PipelineAnd, _Constraint]
+_Step = Union[_ValidateAs, _ValidateAsDefer, _Transform, _PipelineOr, _PipelineAnd, _Constraint]
+
 _InT = TypeVar('_InT')
 _OutT = TypeVar('_OutT')
 _NewOutT = TypeVar('_NewOutT')
@@ -95,22 +121,35 @@ class _FieldTypeMarker:
     pass


+# TODO: ultimately, make this public, see https://github.com/pydantic/pydantic/pull/9459#discussion_r1628197626
+# Also, make this frozen eventually, but that doesn't work right now because of the generic base
+# Which attempts to modify __orig_base__ and such.
+# We could go with a manual freeze, but that seems overkill for now.
 @dataclass(**_slots_true)
 class _Pipeline(Generic[_InT, _OutT]):
     """Abstract representation of a chain of validation, transformation, and parsing steps."""
+
     _steps: tuple[_Step, ...]

-    def transform(self, func: Callable[[_OutT], _NewOutT]) ->_Pipeline[_InT,
-        _NewOutT]:
+    def transform(
+        self,
+        func: Callable[[_OutT], _NewOutT],
+    ) -> _Pipeline[_InT, _NewOutT]:
         """Transform the output of the previous step.

         If used as the first step in a pipeline, the type of the field is used.
         That is, the transformation is applied to after the value is parsed to the field's type.
         """
-        pass
+        return _Pipeline[_InT, _NewOutT](self._steps + (_Transform(func),))
+
+    @overload
+    def validate_as(self, tp: type[_NewOutT], *, strict: bool = ...) -> _Pipeline[_InT, _NewOutT]: ...

-    def validate_as(self, tp: (type[_NewOutT] | EllipsisType), *, strict:
-        bool=False) ->_Pipeline[_InT, Any]:
+    @overload
+    def validate_as(self, tp: EllipsisType, *, strict: bool = ...) -> _Pipeline[_InT, Any]:  # type: ignore
+        ...
+
+    def validate_as(self, tp: type[_NewOutT] | EllipsisType, *, strict: bool = False) -> _Pipeline[_InT, Any]:  # type: ignore
         """Validate / parse the input into a new type.

         If no type is provided, the type of the field is used.
@@ -118,18 +157,70 @@ class _Pipeline(Generic[_InT, _OutT]):
         Types are parsed in Pydantic's `lax` mode by default,
         but you can enable `strict` mode by passing `strict=True`.
         """
-        pass
+        if isinstance(tp, EllipsisType):
+            return _Pipeline[_InT, Any](self._steps + (_ValidateAs(_FieldTypeMarker, strict=strict),))
+        return _Pipeline[_InT, _NewOutT](self._steps + (_ValidateAs(tp, strict=strict),))

-    def validate_as_deferred(self, func: Callable[[], type[_NewOutT]]
-        ) ->_Pipeline[_InT, _NewOutT]:
+    def validate_as_deferred(self, func: Callable[[], type[_NewOutT]]) -> _Pipeline[_InT, _NewOutT]:
         """Parse the input into a new type, deferring resolution of the type until the current class
         is fully defined.

         This is useful when you need to reference the class in it's own type annotations.
         """
-        pass
+        return _Pipeline[_InT, _NewOutT](self._steps + (_ValidateAsDefer(func),))
+
+    # constraints
+    @overload
+    def constrain(self: _Pipeline[_InT, _NewOutGe], constraint: annotated_types.Ge) -> _Pipeline[_InT, _NewOutGe]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _NewOutGt], constraint: annotated_types.Gt) -> _Pipeline[_InT, _NewOutGt]: ...

-    def constrain(self, constraint: _ConstraintAnnotation) ->Any:
+    @overload
+    def constrain(self: _Pipeline[_InT, _NewOutLe], constraint: annotated_types.Le) -> _Pipeline[_InT, _NewOutLe]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _NewOutLt], constraint: annotated_types.Lt) -> _Pipeline[_InT, _NewOutLt]: ...
+
+    @overload
+    def constrain(
+        self: _Pipeline[_InT, _NewOutLen], constraint: annotated_types.Len
+    ) -> _Pipeline[_InT, _NewOutLen]: ...
+
+    @overload
+    def constrain(
+        self: _Pipeline[_InT, _NewOutT], constraint: annotated_types.MultipleOf
+    ) -> _Pipeline[_InT, _NewOutT]: ...
+
+    @overload
+    def constrain(
+        self: _Pipeline[_InT, _NewOutDatetime], constraint: annotated_types.Timezone
+    ) -> _Pipeline[_InT, _NewOutDatetime]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _OutT], constraint: annotated_types.Predicate) -> _Pipeline[_InT, _OutT]: ...
+
+    @overload
+    def constrain(
+        self: _Pipeline[_InT, _NewOutInterval], constraint: annotated_types.Interval
+    ) -> _Pipeline[_InT, _NewOutInterval]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _OutT], constraint: _Eq) -> _Pipeline[_InT, _OutT]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _OutT], constraint: _NotEq) -> _Pipeline[_InT, _OutT]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _OutT], constraint: _In) -> _Pipeline[_InT, _OutT]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _OutT], constraint: _NotIn) -> _Pipeline[_InT, _OutT]: ...
+
+    @overload
+    def constrain(self: _Pipeline[_InT, _NewOutT], constraint: Pattern[str]) -> _Pipeline[_InT, _NewOutT]: ...
+
+    def constrain(self, constraint: _ConstraintAnnotation) -> Any:
         """Constrain a value to meet a certain condition.

         We support most conditions from `annotated_types`, as well as regular expressions.
@@ -137,87 +228,128 @@ class _Pipeline(Generic[_InT, _OutT]):
         Most of the time you'll be calling a shortcut method like `gt`, `lt`, `len`, etc
         so you don't need to call this directly.
         """
-        pass
+        return _Pipeline[_InT, _OutT](self._steps + (_Constraint(constraint),))

-    def predicate(self: _Pipeline[_InT, _NewOutT], func: Callable[[_NewOutT
-        ], bool]) ->_Pipeline[_InT, _NewOutT]:
+    def predicate(self: _Pipeline[_InT, _NewOutT], func: Callable[[_NewOutT], bool]) -> _Pipeline[_InT, _NewOutT]:
         """Constrain a value to meet a certain predicate."""
-        pass
+        return self.constrain(annotated_types.Predicate(func))

-    def gt(self: _Pipeline[_InT, _NewOutGt], gt: _NewOutGt) ->_Pipeline[
-        _InT, _NewOutGt]:
+    def gt(self: _Pipeline[_InT, _NewOutGt], gt: _NewOutGt) -> _Pipeline[_InT, _NewOutGt]:
         """Constrain a value to be greater than a certain value."""
-        pass
+        return self.constrain(annotated_types.Gt(gt))

-    def lt(self: _Pipeline[_InT, _NewOutLt], lt: _NewOutLt) ->_Pipeline[
-        _InT, _NewOutLt]:
+    def lt(self: _Pipeline[_InT, _NewOutLt], lt: _NewOutLt) -> _Pipeline[_InT, _NewOutLt]:
         """Constrain a value to be less than a certain value."""
-        pass
+        return self.constrain(annotated_types.Lt(lt))

-    def ge(self: _Pipeline[_InT, _NewOutGe], ge: _NewOutGe) ->_Pipeline[
-        _InT, _NewOutGe]:
+    def ge(self: _Pipeline[_InT, _NewOutGe], ge: _NewOutGe) -> _Pipeline[_InT, _NewOutGe]:
         """Constrain a value to be greater than or equal to a certain value."""
-        pass
+        return self.constrain(annotated_types.Ge(ge))

-    def le(self: _Pipeline[_InT, _NewOutLe], le: _NewOutLe) ->_Pipeline[
-        _InT, _NewOutLe]:
+    def le(self: _Pipeline[_InT, _NewOutLe], le: _NewOutLe) -> _Pipeline[_InT, _NewOutLe]:
         """Constrain a value to be less than or equal to a certain value."""
-        pass
+        return self.constrain(annotated_types.Le(le))

-    def len(self: _Pipeline[_InT, _NewOutLen], min_len: int, max_len: (int |
-        None)=None) ->_Pipeline[_InT, _NewOutLen]:
+    def len(self: _Pipeline[_InT, _NewOutLen], min_len: int, max_len: int | None = None) -> _Pipeline[_InT, _NewOutLen]:
         """Constrain a value to have a certain length."""
-        pass
+        return self.constrain(annotated_types.Len(min_len, max_len))
+
+    @overload
+    def multiple_of(self: _Pipeline[_InT, _NewOutDiv], multiple_of: _NewOutDiv) -> _Pipeline[_InT, _NewOutDiv]: ...

-    def multiple_of(self: _Pipeline[_InT, Any], multiple_of: Any) ->_Pipeline[
-        _InT, Any]:
+    @overload
+    def multiple_of(self: _Pipeline[_InT, _NewOutMod], multiple_of: _NewOutMod) -> _Pipeline[_InT, _NewOutMod]: ...
+
+    def multiple_of(self: _Pipeline[_InT, Any], multiple_of: Any) -> _Pipeline[_InT, Any]:
         """Constrain a value to be a multiple of a certain number."""
-        pass
+        return self.constrain(annotated_types.MultipleOf(multiple_of))

-    def eq(self: _Pipeline[_InT, _OutT], value: _OutT) ->_Pipeline[_InT, _OutT
-        ]:
+    def eq(self: _Pipeline[_InT, _OutT], value: _OutT) -> _Pipeline[_InT, _OutT]:
         """Constrain a value to be equal to a certain value."""
-        pass
+        return self.constrain(_Eq(value))

-    def not_eq(self: _Pipeline[_InT, _OutT], value: _OutT) ->_Pipeline[_InT,
-        _OutT]:
+    def not_eq(self: _Pipeline[_InT, _OutT], value: _OutT) -> _Pipeline[_InT, _OutT]:
         """Constrain a value to not be equal to a certain value."""
-        pass
+        return self.constrain(_NotEq(value))

-    def in_(self: _Pipeline[_InT, _OutT], values: Container[_OutT]
-        ) ->_Pipeline[_InT, _OutT]:
+    def in_(self: _Pipeline[_InT, _OutT], values: Container[_OutT]) -> _Pipeline[_InT, _OutT]:
         """Constrain a value to be in a certain set."""
-        pass
+        return self.constrain(_In(values))

-    def not_in(self: _Pipeline[_InT, _OutT], values: Container[_OutT]
-        ) ->_Pipeline[_InT, _OutT]:
+    def not_in(self: _Pipeline[_InT, _OutT], values: Container[_OutT]) -> _Pipeline[_InT, _OutT]:
         """Constrain a value to not be in a certain set."""
-        pass
+        return self.constrain(_NotIn(values))
+
+    # timezone methods
+    def datetime_tz_naive(self: _Pipeline[_InT, datetime.datetime]) -> _Pipeline[_InT, datetime.datetime]:
+        return self.constrain(annotated_types.Timezone(None))
+
+    def datetime_tz_aware(self: _Pipeline[_InT, datetime.datetime]) -> _Pipeline[_InT, datetime.datetime]:
+        return self.constrain(annotated_types.Timezone(...))
+
+    def datetime_tz(
+        self: _Pipeline[_InT, datetime.datetime], tz: datetime.tzinfo
+    ) -> _Pipeline[_InT, datetime.datetime]:
+        return self.constrain(annotated_types.Timezone(tz))  # type: ignore

-    def otherwise(self, other: _Pipeline[_OtherIn, _OtherOut]) ->_Pipeline[
-        _InT | _OtherIn, _OutT | _OtherOut]:
+    def datetime_with_tz(
+        self: _Pipeline[_InT, datetime.datetime], tz: datetime.tzinfo | None
+    ) -> _Pipeline[_InT, datetime.datetime]:
+        return self.transform(partial(datetime.datetime.replace, tzinfo=tz))
+
+    # string methods
+    def str_lower(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]:
+        return self.transform(str.lower)
+
+    def str_upper(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]:
+        return self.transform(str.upper)
+
+    def str_title(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]:
+        return self.transform(str.title)
+
+    def str_strip(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]:
+        return self.transform(str.strip)
+
+    def str_pattern(self: _Pipeline[_InT, str], pattern: str) -> _Pipeline[_InT, str]:
+        return self.constrain(re.compile(pattern))
+
+    def str_contains(self: _Pipeline[_InT, str], substring: str) -> _Pipeline[_InT, str]:
+        return self.predicate(lambda v: substring in v)
+
+    def str_starts_with(self: _Pipeline[_InT, str], prefix: str) -> _Pipeline[_InT, str]:
+        return self.predicate(lambda v: v.startswith(prefix))
+
+    def str_ends_with(self: _Pipeline[_InT, str], suffix: str) -> _Pipeline[_InT, str]:
+        return self.predicate(lambda v: v.endswith(suffix))
+
+    # operators
+    def otherwise(self, other: _Pipeline[_OtherIn, _OtherOut]) -> _Pipeline[_InT | _OtherIn, _OutT | _OtherOut]:
         """Combine two validation chains, returning the result of the first chain if it succeeds, and the second chain if it fails."""
-        pass
+        return _Pipeline((_PipelineOr(self, other),))
+
     __or__ = otherwise

-    def then(self, other: _Pipeline[_OutT, _OtherOut]) ->_Pipeline[_InT,
-        _OtherOut]:
+    def then(self, other: _Pipeline[_OutT, _OtherOut]) -> _Pipeline[_InT, _OtherOut]:
         """Pipe the result of one validation chain into another."""
-        pass
+        return _Pipeline((_PipelineAnd(self, other),))
+
     __and__ = then

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->cs.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> cs.CoreSchema:
         from pydantic_core import core_schema as cs
+
         queue = deque(self._steps)
+
         s = None
+
         while queue:
             step = queue.popleft()
             s = _apply_step(step, s, handler, source_type)
+
         s = s or cs.any_schema()
         return s

-    def __supports_type__(self, _: _OutT) ->bool:
+    def __supports_type__(self, _: _OutT) -> bool:
         raise NotImplementedError


@@ -226,21 +358,302 @@ validate_as_deferred = _Pipeline[Any, Any](()).validate_as_deferred
 transform = _Pipeline[Any, Any]((_ValidateAs(_FieldTypeMarker),)).transform


-def _apply_constraint(s: (cs.CoreSchema | None), constraint:
-    _ConstraintAnnotation) ->cs.CoreSchema:
-    """Apply a single constraint to a schema."""
-    pass
+def _check_func(
+    func: Callable[[Any], bool], predicate_err: str | Callable[[], str], s: cs.CoreSchema | None
+) -> cs.CoreSchema:
+    from pydantic_core import core_schema as cs
+
+    def handler(v: Any) -> Any:
+        if func(v):
+            return v
+        raise ValueError(f'Expected {predicate_err if isinstance(predicate_err, str) else predicate_err()}')
+
+    if s is None:
+        return cs.no_info_plain_validator_function(handler)
+    else:
+        return cs.no_info_after_validator_function(handler, s)
+
+
+def _apply_step(step: _Step, s: cs.CoreSchema | None, handler: GetCoreSchemaHandler, source_type: Any) -> cs.CoreSchema:
+    from pydantic_core import core_schema as cs
+
+    if isinstance(step, _ValidateAs):
+        s = _apply_parse(s, step.tp, step.strict, handler, source_type)
+    elif isinstance(step, _ValidateAsDefer):
+        s = _apply_parse(s, step.tp, False, handler, source_type)
+    elif isinstance(step, _Transform):
+        s = _apply_transform(s, step.func, handler)
+    elif isinstance(step, _Constraint):
+        s = _apply_constraint(s, step.constraint)
+    elif isinstance(step, _PipelineOr):
+        s = cs.union_schema([handler(step.left), handler(step.right)])
+    else:
+        assert isinstance(step, _PipelineAnd)
+        s = cs.chain_schema([handler(step.left), handler(step.right)])
+    return s
+
+
+def _apply_parse(
+    s: cs.CoreSchema | None,
+    tp: type[Any],
+    strict: bool,
+    handler: GetCoreSchemaHandler,
+    source_type: Any,
+) -> cs.CoreSchema:
+    from pydantic_core import core_schema as cs
+
+    from pydantic import Strict

+    if tp is _FieldTypeMarker:
+        return handler(source_type)

-class _SupportsRange(annotated_types.SupportsLe, annotated_types.SupportsGe,
-    Protocol):
+    if strict:
+        tp = Annotated[tp, Strict()]  # type: ignore
+
+    if s and s['type'] == 'any':
+        return handler(tp)
+    else:
+        return cs.chain_schema([s, handler(tp)]) if s else handler(tp)
+
+
+def _apply_transform(
+    s: cs.CoreSchema | None, func: Callable[[Any], Any], handler: GetCoreSchemaHandler
+) -> cs.CoreSchema:
+    from pydantic_core import core_schema as cs
+
+    if s is None:
+        return cs.no_info_plain_validator_function(func)
+
+    if s['type'] == 'str':
+        if func is str.strip:
+            s = s.copy()
+            s['strip_whitespace'] = True
+            return s
+        elif func is str.lower:
+            s = s.copy()
+            s['to_lower'] = True
+            return s
+        elif func is str.upper:
+            s = s.copy()
+            s['to_upper'] = True
+            return s
+
+    return cs.no_info_after_validator_function(func, s)
+
+
+def _apply_constraint(  # noqa: C901
+    s: cs.CoreSchema | None, constraint: _ConstraintAnnotation
+) -> cs.CoreSchema:
+    """Apply a single constraint to a schema."""
+    if isinstance(constraint, annotated_types.Gt):
+        gt = constraint.gt
+        if s and s['type'] in {'int', 'float', 'decimal'}:
+            s = s.copy()
+            if s['type'] == 'int' and isinstance(gt, int):
+                s['gt'] = gt
+            elif s['type'] == 'float' and isinstance(gt, float):
+                s['gt'] = gt
+            elif s['type'] == 'decimal' and isinstance(gt, Decimal):
+                s['gt'] = gt
+        else:
+
+            def check_gt(v: Any) -> bool:
+                return v > gt
+
+            s = _check_func(check_gt, f'> {gt}', s)
+    elif isinstance(constraint, annotated_types.Ge):
+        ge = constraint.ge
+        if s and s['type'] in {'int', 'float', 'decimal'}:
+            s = s.copy()
+            if s['type'] == 'int' and isinstance(ge, int):
+                s['ge'] = ge
+            elif s['type'] == 'float' and isinstance(ge, float):
+                s['ge'] = ge
+            elif s['type'] == 'decimal' and isinstance(ge, Decimal):
+                s['ge'] = ge
+
+        def check_ge(v: Any) -> bool:
+            return v >= ge
+
+        s = _check_func(check_ge, f'>= {ge}', s)
+    elif isinstance(constraint, annotated_types.Lt):
+        lt = constraint.lt
+        if s and s['type'] in {'int', 'float', 'decimal'}:
+            s = s.copy()
+            if s['type'] == 'int' and isinstance(lt, int):
+                s['lt'] = lt
+            elif s['type'] == 'float' and isinstance(lt, float):
+                s['lt'] = lt
+            elif s['type'] == 'decimal' and isinstance(lt, Decimal):
+                s['lt'] = lt
+
+        def check_lt(v: Any) -> bool:
+            return v < lt
+
+        s = _check_func(check_lt, f'< {lt}', s)
+    elif isinstance(constraint, annotated_types.Le):
+        le = constraint.le
+        if s and s['type'] in {'int', 'float', 'decimal'}:
+            s = s.copy()
+            if s['type'] == 'int' and isinstance(le, int):
+                s['le'] = le
+            elif s['type'] == 'float' and isinstance(le, float):
+                s['le'] = le
+            elif s['type'] == 'decimal' and isinstance(le, Decimal):
+                s['le'] = le
+
+        def check_le(v: Any) -> bool:
+            return v <= le
+
+        s = _check_func(check_le, f'<= {le}', s)
+    elif isinstance(constraint, annotated_types.Len):
+        min_len = constraint.min_length
+        max_len = constraint.max_length
+
+        if s and s['type'] in {'str', 'list', 'tuple', 'set', 'frozenset', 'dict'}:
+            assert (
+                s['type'] == 'str'
+                or s['type'] == 'list'
+                or s['type'] == 'tuple'
+                or s['type'] == 'set'
+                or s['type'] == 'dict'
+                or s['type'] == 'frozenset'
+            )
+            s = s.copy()
+            if min_len != 0:
+                s['min_length'] = min_len
+            if max_len is not None:
+                s['max_length'] = max_len
+
+        def check_len(v: Any) -> bool:
+            if max_len is not None:
+                return (min_len <= len(v)) and (len(v) <= max_len)
+            return min_len <= len(v)
+
+        s = _check_func(check_len, f'length >= {min_len} and length <= {max_len}', s)
+    elif isinstance(constraint, annotated_types.MultipleOf):
+        multiple_of = constraint.multiple_of
+        if s and s['type'] in {'int', 'float', 'decimal'}:
+            s = s.copy()
+            if s['type'] == 'int' and isinstance(multiple_of, int):
+                s['multiple_of'] = multiple_of
+            elif s['type'] == 'float' and isinstance(multiple_of, float):
+                s['multiple_of'] = multiple_of
+            elif s['type'] == 'decimal' and isinstance(multiple_of, Decimal):
+                s['multiple_of'] = multiple_of
+
+        def check_multiple_of(v: Any) -> bool:
+            return v % multiple_of == 0
+
+        s = _check_func(check_multiple_of, f'% {multiple_of} == 0', s)
+    elif isinstance(constraint, annotated_types.Timezone):
+        tz = constraint.tz
+
+        if tz is ...:
+            if s and s['type'] == 'datetime':
+                s = s.copy()
+                s['tz_constraint'] = 'aware'
+            else:
+
+                def check_tz_aware(v: object) -> bool:
+                    assert isinstance(v, datetime.datetime)
+                    return v.tzinfo is not None
+
+                s = _check_func(check_tz_aware, 'timezone aware', s)
+        elif tz is None:
+            if s and s['type'] == 'datetime':
+                s = s.copy()
+                s['tz_constraint'] = 'naive'
+            else:
+
+                def check_tz_naive(v: object) -> bool:
+                    assert isinstance(v, datetime.datetime)
+                    return v.tzinfo is None
+
+                s = _check_func(check_tz_naive, 'timezone naive', s)
+        else:
+            raise NotImplementedError('Constraining to a specific timezone is not yet supported')
+    elif isinstance(constraint, annotated_types.Interval):
+        if constraint.ge:
+            s = _apply_constraint(s, annotated_types.Ge(constraint.ge))
+        if constraint.gt:
+            s = _apply_constraint(s, annotated_types.Gt(constraint.gt))
+        if constraint.le:
+            s = _apply_constraint(s, annotated_types.Le(constraint.le))
+        if constraint.lt:
+            s = _apply_constraint(s, annotated_types.Lt(constraint.lt))
+        assert s is not None
+    elif isinstance(constraint, annotated_types.Predicate):
+        func = constraint.func
+
+        if func.__name__ == '<lambda>':
+            # attempt to extract the source code for a lambda function
+            # to use as the function name in error messages
+            # TODO: is there a better way? should we just not do this?
+            import inspect
+
+            try:
+                # remove ')' suffix, can use removesuffix once we drop 3.8
+                source = inspect.getsource(func).strip()
+                if source.endswith(')'):
+                    source = source[:-1]
+                lambda_source_code = '`' + ''.join(''.join(source.split('lambda ')[1:]).split(':')[1:]).strip() + '`'
+            except OSError:
+                # stringified annotations
+                lambda_source_code = 'lambda'
+
+            s = _check_func(func, lambda_source_code, s)
+        else:
+            s = _check_func(func, func.__name__, s)
+    elif isinstance(constraint, _NotEq):
+        value = constraint.value
+
+        def check_not_eq(v: Any) -> bool:
+            return operator.__ne__(v, value)
+
+        s = _check_func(check_not_eq, f'!= {value}', s)
+    elif isinstance(constraint, _Eq):
+        value = constraint.value
+
+        def check_eq(v: Any) -> bool:
+            return operator.__eq__(v, value)
+
+        s = _check_func(check_eq, f'== {value}', s)
+    elif isinstance(constraint, _In):
+        values = constraint.values
+
+        def check_in(v: Any) -> bool:
+            return operator.__contains__(values, v)
+
+        s = _check_func(check_in, f'in {values}', s)
+    elif isinstance(constraint, _NotIn):
+        values = constraint.values
+
+        def check_not_in(v: Any) -> bool:
+            return operator.__not__(operator.__contains__(values, v))
+
+        s = _check_func(check_not_in, f'not in {values}', s)
+    else:
+        assert isinstance(constraint, Pattern)
+        if s and s['type'] == 'str':
+            s = s.copy()
+            s['pattern'] = constraint.pattern
+        else:
+
+            def check_pattern(v: object) -> bool:
+                assert isinstance(v, str)
+                return constraint.match(v) is not None
+
+            s = _check_func(check_pattern, f'~ {constraint.pattern}', s)
+    return s
+
+
+class _SupportsRange(annotated_types.SupportsLe, annotated_types.SupportsGe, Protocol):
     pass


 class _SupportsLen(Protocol):
-
-    def __len__(self) ->int:
-        ...
+    def __len__(self) -> int: ...


 _NewOutGt = TypeVar('_NewOutGt', bound=annotated_types.SupportsGt)
diff --git a/pydantic/fields.py b/pydantic/fields.py
index 6b34290ae..7b41a4d94 100644
--- a/pydantic/fields.py
+++ b/pydantic/fields.py
@@ -1,5 +1,7 @@
 """Defining fields on models."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import inspect
 import sys
@@ -9,31 +11,42 @@ from dataclasses import Field as DataclassField
 from functools import cached_property
 from typing import Any, ClassVar
 from warnings import warn
+
 import annotated_types
 import typing_extensions
 from pydantic_core import PydanticUndefined
 from typing_extensions import Literal, TypeAlias, Unpack, deprecated
+
 from . import types
 from ._internal import _decorators, _fields, _generics, _internal_dataclass, _repr, _typing_extra, _utils
 from .aliases import AliasChoices, AliasPath
 from .config import JsonDict
 from .errors import PydanticUserError
 from .warnings import PydanticDeprecatedSince20
+
 if typing.TYPE_CHECKING:
     from ._internal._repr import ReprArgs
 else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 __all__ = 'Field', 'PrivateAttr', 'computed_field'
+
+
 _Unset: Any = PydanticUndefined
+
 if sys.version_info >= (3, 13):
     import warnings
+
     Deprecated: TypeAlias = warnings.deprecated | deprecated
 else:
     Deprecated: TypeAlias = deprecated


-class _FromFieldInfoInputs(typing_extensions.TypedDict, total=(False)):
+class _FromFieldInfoInputs(typing_extensions.TypedDict, total=False):
     """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.from_field`."""
+
     annotation: type[Any] | None
     default_factory: typing.Callable[[], Any] | None
     alias: str | None
@@ -41,8 +54,7 @@ class _FromFieldInfoInputs(typing_extensions.TypedDict, total=(False)):
     validation_alias: str | AliasPath | AliasChoices | None
     serialization_alias: str | None
     title: str | None
-    field_title_generator: typing_extensions.Callable[[str, FieldInfo], str
-        ] | None
+    field_title_generator: typing_extensions.Callable[[str, FieldInfo], str] | None
     description: str | None
     examples: list[Any] | None
     exclude: bool | None
@@ -72,8 +84,9 @@ class _FromFieldInfoInputs(typing_extensions.TypedDict, total=(False)):
     fail_fast: bool | None


-class _FieldInfoInputs(_FromFieldInfoInputs, total=(False)):
+class _FieldInfoInputs(_FromFieldInfoInputs, total=False):
     """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.__init__`."""
+
     default: Any


@@ -112,6 +125,7 @@ class FieldInfo(_repr.Representation):
         kw_only: Whether the field should be a keyword-only argument in the constructor of the dataclass.
         metadata: List of metadata constraints.
     """
+
     annotation: type[Any] | None
     default: Any
     default_factory: typing.Callable[[], Any] | None
@@ -134,69 +148,100 @@ class FieldInfo(_repr.Representation):
     init_var: bool | None
     kw_only: bool | None
     metadata: list[Any]
-    __slots__ = ('annotation', 'default', 'default_factory', 'alias',
-        'alias_priority', 'validation_alias', 'serialization_alias',
-        'title', 'field_title_generator', 'description', 'examples',
-        'exclude', 'discriminator', 'deprecated', 'json_schema_extra',
-        'frozen', 'validate_default', 'repr', 'init', 'init_var', 'kw_only',
-        'metadata', '_attributes_set')
-    metadata_lookup: ClassVar[dict[str, typing.Callable[[Any], Any] | None]
-        ] = {'strict': types.Strict, 'gt': annotated_types.Gt, 'ge':
-        annotated_types.Ge, 'lt': annotated_types.Lt, 'le': annotated_types
-        .Le, 'multiple_of': annotated_types.MultipleOf, 'min_length':
-        annotated_types.MinLen, 'max_length': annotated_types.MaxLen,
-        'pattern': None, 'allow_inf_nan': None, 'max_digits': None,
-        'decimal_places': None, 'union_mode': None, 'coerce_numbers_to_str':
-        None, 'fail_fast': types.FailFast}
-
-    def __init__(self, **kwargs: Unpack[_FieldInfoInputs]) ->None:
+
+    __slots__ = (
+        'annotation',
+        'default',
+        'default_factory',
+        'alias',
+        'alias_priority',
+        'validation_alias',
+        'serialization_alias',
+        'title',
+        'field_title_generator',
+        'description',
+        'examples',
+        'exclude',
+        'discriminator',
+        'deprecated',
+        'json_schema_extra',
+        'frozen',
+        'validate_default',
+        'repr',
+        'init',
+        'init_var',
+        'kw_only',
+        'metadata',
+        '_attributes_set',
+    )
+
+    # used to convert kwargs to metadata/constraints,
+    # None has a special meaning - these items are collected into a `PydanticGeneralMetadata`
+    metadata_lookup: ClassVar[dict[str, typing.Callable[[Any], Any] | None]] = {
+        'strict': types.Strict,
+        'gt': annotated_types.Gt,
+        'ge': annotated_types.Ge,
+        'lt': annotated_types.Lt,
+        'le': annotated_types.Le,
+        'multiple_of': annotated_types.MultipleOf,
+        'min_length': annotated_types.MinLen,
+        'max_length': annotated_types.MaxLen,
+        'pattern': None,
+        'allow_inf_nan': None,
+        'max_digits': None,
+        'decimal_places': None,
+        'union_mode': None,
+        'coerce_numbers_to_str': None,
+        'fail_fast': types.FailFast,
+    }
+
+    def __init__(self, **kwargs: Unpack[_FieldInfoInputs]) -> None:
         """This class should generally not be initialized directly; instead, use the `pydantic.fields.Field` function
         or one of the constructor classmethods.

         See the signature of `pydantic.fields.Field` for more details about the expected arguments.
         """
-        self._attributes_set = {k: v for k, v in kwargs.items() if v is not
-            _Unset}
-        kwargs = {k: (_DefaultValues.get(k) if v is _Unset else v) for k, v in
-            kwargs.items()}
-        self.annotation, annotation_metadata = self._extract_metadata(kwargs
-            .get('annotation'))
+        self._attributes_set = {k: v for k, v in kwargs.items() if v is not _Unset}
+        kwargs = {k: _DefaultValues.get(k) if v is _Unset else v for k, v in kwargs.items()}  # type: ignore
+        self.annotation, annotation_metadata = self._extract_metadata(kwargs.get('annotation'))
+
         default = kwargs.pop('default', PydanticUndefined)
         if default is Ellipsis:
             self.default = PydanticUndefined
         else:
             self.default = default
+
         self.default_factory = kwargs.pop('default_factory', None)
-        if (self.default is not PydanticUndefined and self.default_factory
-             is not None):
+
+        if self.default is not PydanticUndefined and self.default_factory is not None:
             raise TypeError('cannot specify both default and default_factory')
+
         self.alias = kwargs.pop('alias', None)
         self.validation_alias = kwargs.pop('validation_alias', None)
         self.serialization_alias = kwargs.pop('serialization_alias', None)
-        alias_is_set = any(alias is not None for alias in (self.alias, self
-            .validation_alias, self.serialization_alias))
-        self.alias_priority = kwargs.pop('alias_priority', None
-            ) or 2 if alias_is_set else None
+        alias_is_set = any(alias is not None for alias in (self.alias, self.validation_alias, self.serialization_alias))
+        self.alias_priority = kwargs.pop('alias_priority', None) or 2 if alias_is_set else None
         self.title = kwargs.pop('title', None)
         self.field_title_generator = kwargs.pop('field_title_generator', None)
         self.description = kwargs.pop('description', None)
         self.examples = kwargs.pop('examples', None)
         self.exclude = kwargs.pop('exclude', None)
         self.discriminator = kwargs.pop('discriminator', None)
-        self.deprecated = kwargs.pop('deprecated', getattr(self,
-            'deprecated', None))
+        # For compatibility with FastAPI<=0.110.0, we preserve the existing value if it is not overridden
+        self.deprecated = kwargs.pop('deprecated', getattr(self, 'deprecated', None))
         self.repr = kwargs.pop('repr', True)
         self.json_schema_extra = kwargs.pop('json_schema_extra', None)
         self.validate_default = kwargs.pop('validate_default', None)
         self.frozen = kwargs.pop('frozen', None)
+        # currently only used on dataclasses
         self.init = kwargs.pop('init', None)
         self.init_var = kwargs.pop('init_var', None)
         self.kw_only = kwargs.pop('kw_only', None)
-        self.metadata = self._collect_metadata(kwargs) + annotation_metadata
+
+        self.metadata = self._collect_metadata(kwargs) + annotation_metadata  # type: ignore

     @staticmethod
-    def from_field(default: Any=PydanticUndefined, **kwargs: Unpack[
-        _FromFieldInfoInputs]) ->FieldInfo:
+    def from_field(default: Any = PydanticUndefined, **kwargs: Unpack[_FromFieldInfoInputs]) -> FieldInfo:
         """Create a new `FieldInfo` object with the `Field` function.

         Args:
@@ -219,10 +264,12 @@ class FieldInfo(_repr.Representation):
                 foo: int = pydantic.Field(4)
             ```
         """
-        pass
+        if 'annotation' in kwargs:
+            raise TypeError('"annotation" is not permitted as a Field keyword argument')
+        return FieldInfo(default=default, **kwargs)

     @staticmethod
-    def from_annotation(annotation: type[Any]) ->FieldInfo:
+    def from_annotation(annotation: type[Any]) -> FieldInfo:
         """Creates a `FieldInfo` instance from a bare annotation.

         This function is used internally to create a `FieldInfo` from a bare annotation like this:
@@ -254,11 +301,37 @@ class FieldInfo(_repr.Representation):
         Returns:
             An instance of the field metadata.
         """
-        pass
+        final = False
+        if _typing_extra.is_finalvar(annotation):
+            final = True
+            if annotation is not typing_extensions.Final:
+                annotation = typing_extensions.get_args(annotation)[0]
+
+        if _typing_extra.is_annotated(annotation):
+            first_arg, *extra_args = typing_extensions.get_args(annotation)
+            if _typing_extra.is_finalvar(first_arg):
+                final = True
+            field_info_annotations = [a for a in extra_args if isinstance(a, FieldInfo)]
+            field_info = FieldInfo.merge_field_infos(*field_info_annotations, annotation=first_arg)
+            if field_info:
+                new_field_info = copy(field_info)
+                new_field_info.annotation = first_arg
+                new_field_info.frozen = final or field_info.frozen
+                metadata: list[Any] = []
+                for a in extra_args:
+                    if _typing_extra.is_deprecated_instance(a):
+                        new_field_info.deprecated = a.message
+                    elif not isinstance(a, FieldInfo):
+                        metadata.append(a)
+                    else:
+                        metadata.extend(a.metadata)
+                new_field_info.metadata = metadata
+                return new_field_info
+
+        return FieldInfo(annotation=annotation, frozen=final or None)  # pyright: ignore[reportArgumentType]

     @staticmethod
-    def from_annotated_attribute(annotation: type[Any], default: Any
-        ) ->FieldInfo:
+    def from_annotated_attribute(annotation: type[Any], default: Any) -> FieldInfo:
         """Create `FieldInfo` from an annotation with a default value.

         This is used in cases like the following:
@@ -282,11 +355,68 @@ class FieldInfo(_repr.Representation):
         Returns:
             A field object with the passed values.
         """
-        pass
+        if annotation is default:
+            raise PydanticUserError(
+                'Error when building FieldInfo from annotated attribute. '
+                "Make sure you don't have any field name clashing with a type annotation ",
+                code='unevaluable-type-annotation',
+            )
+
+        final = False
+        if _typing_extra.is_finalvar(annotation):
+            final = True
+            if annotation is not typing_extensions.Final:
+                annotation = typing_extensions.get_args(annotation)[0]
+
+        if isinstance(default, FieldInfo):
+            default.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation)  # pyright: ignore[reportArgumentType]
+            default.metadata += annotation_metadata
+            default = default.merge_field_infos(
+                *[x for x in annotation_metadata if isinstance(x, FieldInfo)], default, annotation=default.annotation
+            )
+            default.frozen = final or default.frozen
+            return default
+        elif isinstance(default, dataclasses.Field):
+            init_var = False
+            if annotation is dataclasses.InitVar:
+                init_var = True
+                annotation = typing.cast(Any, Any)
+            elif isinstance(annotation, dataclasses.InitVar):
+                init_var = True
+                annotation = annotation.type
+            pydantic_field = FieldInfo._from_dataclass_field(default)
+            pydantic_field.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation)  # pyright: ignore[reportArgumentType]
+            pydantic_field.metadata += annotation_metadata
+            pydantic_field = pydantic_field.merge_field_infos(
+                *[x for x in annotation_metadata if isinstance(x, FieldInfo)],
+                pydantic_field,
+                annotation=pydantic_field.annotation,
+            )
+            pydantic_field.frozen = final or pydantic_field.frozen
+            pydantic_field.init_var = init_var
+            pydantic_field.init = getattr(default, 'init', None)
+            pydantic_field.kw_only = getattr(default, 'kw_only', None)
+            return pydantic_field
+        else:
+            if _typing_extra.is_annotated(annotation):
+                first_arg, *extra_args = typing_extensions.get_args(annotation)
+                field_infos = [a for a in extra_args if isinstance(a, FieldInfo)]
+                field_info = FieldInfo.merge_field_infos(*field_infos, annotation=first_arg, default=default)
+                metadata: list[Any] = []
+                for a in extra_args:
+                    if _typing_extra.is_deprecated_instance(a):
+                        field_info.deprecated = a.message
+                    elif not isinstance(a, FieldInfo):
+                        metadata.append(a)
+                    else:
+                        metadata.extend(a.metadata)
+                field_info.metadata = metadata
+                return field_info
+
+            return FieldInfo(annotation=annotation, default=default, frozen=final or None)  # pyright: ignore[reportArgumentType]

     @staticmethod
-    def merge_field_infos(*field_infos: FieldInfo, **overrides: Any
-        ) ->FieldInfo:
+    def merge_field_infos(*field_infos: FieldInfo, **overrides: Any) -> FieldInfo:
         """Merge `FieldInfo` instances keeping only explicitly set attributes.

         Later `FieldInfo` instances override earlier ones.
@@ -294,10 +424,40 @@ class FieldInfo(_repr.Representation):
         Returns:
             FieldInfo: A merged FieldInfo instance.
         """
-        pass
+        flattened_field_infos: list[FieldInfo] = []
+        for field_info in field_infos:
+            flattened_field_infos.extend(x for x in field_info.metadata if isinstance(x, FieldInfo))
+            flattened_field_infos.append(field_info)
+        field_infos = tuple(flattened_field_infos)
+        if len(field_infos) == 1:
+            # No merging necessary, but we still need to make a copy and apply the overrides
+            field_info = copy(field_infos[0])
+            field_info._attributes_set.update(overrides)
+
+            default_override = overrides.pop('default', PydanticUndefined)
+            if default_override is Ellipsis:
+                default_override = PydanticUndefined
+            if default_override is not PydanticUndefined:
+                field_info.default = default_override
+
+            for k, v in overrides.items():
+                setattr(field_info, k, v)
+            return field_info  # type: ignore
+
+        new_kwargs: dict[str, Any] = {}
+        metadata = {}
+        for field_info in field_infos:
+            new_kwargs.update(field_info._attributes_set)
+            for x in field_info.metadata:
+                if not isinstance(x, FieldInfo):
+                    metadata[type(x)] = x
+        new_kwargs.update(overrides)
+        field_info = FieldInfo(**new_kwargs)
+        field_info.metadata = list(metadata.values())
+        return field_info

     @staticmethod
-    def _from_dataclass_field(dc_field: DataclassField[Any]) ->FieldInfo:
+    def _from_dataclass_field(dc_field: DataclassField[Any]) -> FieldInfo:
         """Return a new `FieldInfo` instance from a `dataclasses.Field` instance.

         Args:
@@ -309,11 +469,21 @@ class FieldInfo(_repr.Representation):
         Raises:
             TypeError: If any of the `FieldInfo` kwargs does not match the `dataclass.Field` kwargs.
         """
-        pass
+        default = dc_field.default
+        if default is dataclasses.MISSING:
+            default = PydanticUndefined
+
+        if dc_field.default_factory is dataclasses.MISSING:
+            default_factory: typing.Callable[[], Any] | None = None
+        else:
+            default_factory = dc_field.default_factory
+
+        # use the `Field` function so in correct kwargs raise the correct `TypeError`
+        dc_field_metadata = {k: v for k, v in dc_field.metadata.items() if k in _FIELD_ARG_NAMES}
+        return Field(default=default, default_factory=default_factory, repr=dc_field.repr, **dc_field_metadata)

     @staticmethod
-    def _extract_metadata(annotation: (type[Any] | None)) ->tuple[type[Any] |
-        None, list[Any]]:
+    def _extract_metadata(annotation: type[Any] | None) -> tuple[type[Any] | None, list[Any]]:
         """Tries to extract metadata/constraints from an annotation if it uses `Annotated`.

         Args:
@@ -322,10 +492,15 @@ class FieldInfo(_repr.Representation):
         Returns:
             A tuple containing the extracted metadata type and the list of extra arguments.
         """
-        pass
+        if annotation is not None:
+            if _typing_extra.is_annotated(annotation):
+                first_arg, *extra_args = typing_extensions.get_args(annotation)
+                return first_arg, list(extra_args)
+
+        return annotation, []

     @staticmethod
-    def _collect_metadata(kwargs: dict[str, Any]) ->list[Any]:
+    def _collect_metadata(kwargs: dict[str, Any]) -> list[Any]:
         """Collect annotations from kwargs.

         Args:
@@ -335,14 +510,34 @@ class FieldInfo(_repr.Representation):
             A list of metadata objects - a combination of `annotated_types.BaseMetadata` and
                 `PydanticMetadata`.
         """
-        pass
+        metadata: list[Any] = []
+        general_metadata = {}
+        for key, value in list(kwargs.items()):
+            try:
+                marker = FieldInfo.metadata_lookup[key]
+            except KeyError:
+                continue
+
+            del kwargs[key]
+            if value is not None:
+                if marker is None:
+                    general_metadata[key] = value
+                else:
+                    metadata.append(marker(value))
+        if general_metadata:
+            metadata.append(_fields.pydantic_general_metadata(**general_metadata))
+        return metadata

     @property
-    def deprecation_message(self) ->(str | None):
+    def deprecation_message(self) -> str | None:
         """The deprecation message to be emitted, or `None` if not set."""
-        pass
+        if self.deprecated is None:
+            return None
+        if isinstance(self.deprecated, bool):
+            return 'deprecated' if self.deprecated else None
+        return self.deprecated if isinstance(self.deprecated, str) else self.deprecated.message

-    def get_default(self, *, call_default_factory: bool=False) ->Any:
+    def get_default(self, *, call_default_factory: bool = False) -> Any:
         """Get the default value.

         We expose an option for whether to call the default_factory (if present), as calling it may
@@ -355,17 +550,22 @@ class FieldInfo(_repr.Representation):
         Returns:
             The default value, calling the default factory if requested or `None` if not set.
         """
-        pass
+        if self.default_factory is None:
+            return _utils.smart_deepcopy(self.default)
+        elif call_default_factory:
+            return self.default_factory()
+        else:
+            return None

-    def is_required(self) ->bool:
+    def is_required(self) -> bool:
         """Check if the field is required (i.e., does not have a default value or factory).

         Returns:
             `True` if the field is required, `False` otherwise.
         """
-        pass
+        return self.default is PydanticUndefined and self.default_factory is None

-    def rebuild_annotation(self) ->Any:
+    def rebuild_annotation(self) -> Any:
         """Attempts to rebuild the original annotation for use in function signatures.

         If metadata is present, it adds it to the original annotation using
@@ -378,10 +578,13 @@ class FieldInfo(_repr.Representation):
         Returns:
             The rebuilt annotation.
         """
-        pass
+        if not self.metadata:
+            return self.annotation
+        else:
+            # Annotated arguments must be a tuple
+            return typing_extensions.Annotated[(self.annotation, *self.metadata)]  # type: ignore

-    def apply_typevars_map(self, typevars_map: (dict[Any, Any] | None),
-        types_namespace: (dict[str, Any] | None)) ->None:
+    def apply_typevars_map(self, typevars_map: dict[Any, Any] | None, types_namespace: dict[str, Any] | None) -> None:
         """Apply a `typevars_map` to the annotation.

         This method is used when analyzing parametrized generic types to replace typevars with their concrete types.
@@ -396,12 +599,13 @@ class FieldInfo(_repr.Representation):
             pydantic._internal._generics.replace_types is used for replacing the typevars with
                 their concrete types.
         """
-        pass
+        annotation = _typing_extra.eval_type_lenient(self.annotation, types_namespace)
+        self.annotation = _generics.replace_types(annotation, typevars_map)

-    def __repr_args__(self) ->ReprArgs:
-        yield 'annotation', _repr.PlainRepr(_repr.display_as_type(self.
-            annotation))
+    def __repr_args__(self) -> ReprArgs:
+        yield 'annotation', _repr.PlainRepr(_repr.display_as_type(self.annotation))
         yield 'required', self.is_required()
+
         for s in self.__slots__:
             if s == '_attributes_set':
                 continue
@@ -415,14 +619,12 @@ class FieldInfo(_repr.Representation):
                 continue
             if s == 'validation_alias' and self.validation_alias == self.alias:
                 continue
-            if (s == 'serialization_alias' and self.serialization_alias ==
-                self.alias):
+            if s == 'serialization_alias' and self.serialization_alias == self.alias:
                 continue
             if s == 'default' and self.default is not PydanticUndefined:
                 yield 'default', self.default
             elif s == 'default_factory' and self.default_factory is not None:
-                yield 'default_factory', _repr.PlainRepr(_repr.
-                    display_as_type(self.default_factory))
+                yield 'default_factory', _repr.PlainRepr(_repr.display_as_type(self.default_factory))
             else:
                 value = getattr(self, s)
                 if value is not None and value is not PydanticUndefined:
@@ -433,38 +635,80 @@ class _EmptyKwargs(typing_extensions.TypedDict):
     """This class exists solely to ensure that type checking warns about passing `**extra` in `Field`."""


-_DefaultValues = dict(default=..., default_factory=None, alias=None,
-    alias_priority=None, validation_alias=None, serialization_alias=None,
-    title=None, description=None, examples=None, exclude=None,
-    discriminator=None, json_schema_extra=None, frozen=None,
-    validate_default=None, repr=True, init=None, init_var=None, kw_only=
-    None, pattern=None, strict=None, gt=None, ge=None, lt=None, le=None,
-    multiple_of=None, allow_inf_nan=None, max_digits=None, decimal_places=
-    None, min_length=None, max_length=None, coerce_numbers_to_str=None)
-
-
-def Field(default: Any=PydanticUndefined, *, default_factory: (typing.
-    Callable[[], Any] | None)=_Unset, alias: (str | None)=_Unset,
-    alias_priority: (int | None)=_Unset, validation_alias: (str | AliasPath |
-    AliasChoices | None)=_Unset, serialization_alias: (str | None)=_Unset,
-    title: (str | None)=_Unset, field_title_generator: (typing_extensions.
-    Callable[[str, FieldInfo], str] | None)=_Unset, description: (str |
-    None)=_Unset, examples: (list[Any] | None)=_Unset, exclude: (bool |
-    None)=_Unset, discriminator: (str | types.Discriminator | None)=_Unset,
-    deprecated: (Deprecated | str | bool | None)=_Unset, json_schema_extra:
-    (JsonDict | typing.Callable[[JsonDict], None] | None)=_Unset, frozen: (
-    bool | None)=_Unset, validate_default: (bool | None)=_Unset, repr: bool
-    =_Unset, init: (bool | None)=_Unset, init_var: (bool | None)=_Unset,
-    kw_only: (bool | None)=_Unset, pattern: (str | typing.Pattern[str] |
-    None)=_Unset, strict: (bool | None)=_Unset, coerce_numbers_to_str: (
-    bool | None)=_Unset, gt: (annotated_types.SupportsGt | None)=_Unset, ge:
-    (annotated_types.SupportsGe | None)=_Unset, lt: (annotated_types.
-    SupportsLt | None)=_Unset, le: (annotated_types.SupportsLe | None)=
-    _Unset, multiple_of: (float | None)=_Unset, allow_inf_nan: (bool | None
-    )=_Unset, max_digits: (int | None)=_Unset, decimal_places: (int | None)
-    =_Unset, min_length: (int | None)=_Unset, max_length: (int | None)=
-    _Unset, union_mode: Literal['smart', 'left_to_right']=_Unset, fail_fast:
-    (bool | None)=_Unset, **extra: Unpack[_EmptyKwargs]) ->Any:
+_DefaultValues = dict(
+    default=...,
+    default_factory=None,
+    alias=None,
+    alias_priority=None,
+    validation_alias=None,
+    serialization_alias=None,
+    title=None,
+    description=None,
+    examples=None,
+    exclude=None,
+    discriminator=None,
+    json_schema_extra=None,
+    frozen=None,
+    validate_default=None,
+    repr=True,
+    init=None,
+    init_var=None,
+    kw_only=None,
+    pattern=None,
+    strict=None,
+    gt=None,
+    ge=None,
+    lt=None,
+    le=None,
+    multiple_of=None,
+    allow_inf_nan=None,
+    max_digits=None,
+    decimal_places=None,
+    min_length=None,
+    max_length=None,
+    coerce_numbers_to_str=None,
+)
+
+
+def Field(  # noqa: C901
+    default: Any = PydanticUndefined,
+    *,
+    default_factory: typing.Callable[[], Any] | None = _Unset,
+    alias: str | None = _Unset,
+    alias_priority: int | None = _Unset,
+    validation_alias: str | AliasPath | AliasChoices | None = _Unset,
+    serialization_alias: str | None = _Unset,
+    title: str | None = _Unset,
+    field_title_generator: typing_extensions.Callable[[str, FieldInfo], str] | None = _Unset,
+    description: str | None = _Unset,
+    examples: list[Any] | None = _Unset,
+    exclude: bool | None = _Unset,
+    discriminator: str | types.Discriminator | None = _Unset,
+    deprecated: Deprecated | str | bool | None = _Unset,
+    json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = _Unset,
+    frozen: bool | None = _Unset,
+    validate_default: bool | None = _Unset,
+    repr: bool = _Unset,
+    init: bool | None = _Unset,
+    init_var: bool | None = _Unset,
+    kw_only: bool | None = _Unset,
+    pattern: str | typing.Pattern[str] | None = _Unset,
+    strict: bool | None = _Unset,
+    coerce_numbers_to_str: bool | None = _Unset,
+    gt: annotated_types.SupportsGt | None = _Unset,
+    ge: annotated_types.SupportsGe | None = _Unset,
+    lt: annotated_types.SupportsLt | None = _Unset,
+    le: annotated_types.SupportsLe | None = _Unset,
+    multiple_of: float | None = _Unset,
+    allow_inf_nan: bool | None = _Unset,
+    max_digits: int | None = _Unset,
+    decimal_places: int | None = _Unset,
+    min_length: int | None = _Unset,
+    max_length: int | None = _Unset,
+    union_mode: Literal['smart', 'left_to_right'] = _Unset,
+    fail_fast: bool | None = _Unset,
+    **extra: Unpack[_EmptyKwargs],
+) -> Any:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/fields

     Create a field for objects that can be configured.
@@ -529,11 +773,111 @@ def Field(default: Any=PydanticUndefined, *, default_factory: (typing.
         A new [`FieldInfo`][pydantic.fields.FieldInfo]. The return annotation is `Any` so `Field` can be used on
             type-annotated fields without causing a type error.
     """
-    pass
+    # Check deprecated and removed params from V1. This logic should eventually be removed.
+    const = extra.pop('const', None)  # type: ignore
+    if const is not None:
+        raise PydanticUserError('`const` is removed, use `Literal` instead', code='removed-kwargs')
+
+    min_items = extra.pop('min_items', None)  # type: ignore
+    if min_items is not None:
+        warn('`min_items` is deprecated and will be removed, use `min_length` instead', DeprecationWarning)
+        if min_length in (None, _Unset):
+            min_length = min_items  # type: ignore
+
+    max_items = extra.pop('max_items', None)  # type: ignore
+    if max_items is not None:
+        warn('`max_items` is deprecated and will be removed, use `max_length` instead', DeprecationWarning)
+        if max_length in (None, _Unset):
+            max_length = max_items  # type: ignore
+
+    unique_items = extra.pop('unique_items', None)  # type: ignore
+    if unique_items is not None:
+        raise PydanticUserError(
+            (
+                '`unique_items` is removed, use `Set` instead'
+                '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)'
+            ),
+            code='removed-kwargs',
+        )
+
+    allow_mutation = extra.pop('allow_mutation', None)  # type: ignore
+    if allow_mutation is not None:
+        warn('`allow_mutation` is deprecated and will be removed. use `frozen` instead', DeprecationWarning)
+        if allow_mutation is False:
+            frozen = True
+
+    regex = extra.pop('regex', None)  # type: ignore
+    if regex is not None:
+        raise PydanticUserError('`regex` is removed. use `pattern` instead', code='removed-kwargs')
+
+    if extra:
+        warn(
+            'Using extra keyword arguments on `Field` is deprecated and will be removed.'
+            ' Use `json_schema_extra` instead.'
+            f' (Extra keys: {", ".join(k.__repr__() for k in extra.keys())})',
+            DeprecationWarning,
+        )
+        if not json_schema_extra or json_schema_extra is _Unset:
+            json_schema_extra = extra  # type: ignore
+
+    if (
+        validation_alias
+        and validation_alias is not _Unset
+        and not isinstance(validation_alias, (str, AliasChoices, AliasPath))
+    ):
+        raise TypeError('Invalid `validation_alias` type. it should be `str`, `AliasChoices`, or `AliasPath`')
+
+    if serialization_alias in (_Unset, None) and isinstance(alias, str):
+        serialization_alias = alias
+
+    if validation_alias in (_Unset, None):
+        validation_alias = alias
+
+    include = extra.pop('include', None)  # type: ignore
+    if include is not None:
+        warn('`include` is deprecated and does nothing. It will be removed, use `exclude` instead', DeprecationWarning)
+
+    return FieldInfo.from_field(
+        default,
+        default_factory=default_factory,
+        alias=alias,
+        alias_priority=alias_priority,
+        validation_alias=validation_alias,
+        serialization_alias=serialization_alias,
+        title=title,
+        field_title_generator=field_title_generator,
+        description=description,
+        examples=examples,
+        exclude=exclude,
+        discriminator=discriminator,
+        deprecated=deprecated,
+        json_schema_extra=json_schema_extra,
+        frozen=frozen,
+        pattern=pattern,
+        validate_default=validate_default,
+        repr=repr,
+        init=init,
+        init_var=init_var,
+        kw_only=kw_only,
+        coerce_numbers_to_str=coerce_numbers_to_str,
+        strict=strict,
+        gt=gt,
+        ge=ge,
+        lt=lt,
+        le=le,
+        multiple_of=multiple_of,
+        min_length=min_length,
+        max_length=max_length,
+        allow_inf_nan=allow_inf_nan,
+        max_digits=max_digits,
+        decimal_places=decimal_places,
+        union_mode=union_mode,
+        fail_fast=fail_fast,
+    )


 _FIELD_ARG_NAMES = set(inspect.signature(Field).parameters)
-_FIELD_ARG_NAMES.remove('extra')
+_FIELD_ARG_NAMES.remove('extra')  # do not include the varkwargs parameter


 class ModelPrivateAttr(_repr.Representation):
@@ -548,25 +892,28 @@ class ModelPrivateAttr(_repr.Representation):
         default_factory: A callable function that generates the default value of the
             attribute if not provided.
     """
+
     __slots__ = 'default', 'default_factory'

-    def __init__(self, default: Any=PydanticUndefined, *, default_factory:
-        (typing.Callable[[], Any] | None)=None) ->None:
+    def __init__(
+        self, default: Any = PydanticUndefined, *, default_factory: typing.Callable[[], Any] | None = None
+    ) -> None:
         self.default = default
         self.default_factory = default_factory
+
     if not typing.TYPE_CHECKING:
+        # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access

-        def __getattr__(self, item: str) ->Any:
+        def __getattr__(self, item: str) -> Any:
             """This function improves compatibility with custom descriptors by ensuring delegation happens
             as expected when the default value of a private attribute is a descriptor.
             """
             if item in {'__get__', '__set__', '__delete__'}:
                 if hasattr(self.default, item):
                     return getattr(self.default, item)
-            raise AttributeError(
-                f'{type(self).__name__!r} object has no attribute {item!r}')
+            raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')

-    def __set_name__(self, cls: type[Any], name: str) ->None:
+    def __set_name__(self, cls: type[Any], name: str) -> None:
         """Preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487."""
         if self.default is PydanticUndefined:
             return
@@ -576,7 +923,7 @@ class ModelPrivateAttr(_repr.Representation):
         if callable(set_name):
             set_name(cls, name)

-    def get_default(self) ->Any:
+    def get_default(self) -> Any:
         """Retrieve the default value of the object.

         If `self.default_factory` is `None`, the method will return a deep copy of the `self.default` object.
@@ -586,15 +933,21 @@ class ModelPrivateAttr(_repr.Representation):
         Returns:
             The default value of the object.
         """
-        pass
+        return _utils.smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, self.__class__) and (self.default, self.
-            default_factory) == (other.default, other.default_factory)
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and (self.default, self.default_factory) == (
+            other.default,
+            other.default_factory,
+        )


-def PrivateAttr(default: Any=PydanticUndefined, *, default_factory: (typing
-    .Callable[[], Any] | None)=None, init: Literal[False]=False) ->Any:
+def PrivateAttr(
+    default: Any = PydanticUndefined,
+    *,
+    default_factory: typing.Callable[[], Any] | None = None,
+    init: Literal[False] = False,
+) -> Any:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/models/#private-model-attributes

     Indicates that an attribute is intended for private use and not handled during normal validation/serialization.
@@ -616,7 +969,13 @@ def PrivateAttr(default: Any=PydanticUndefined, *, default_factory: (typing
     Raises:
         ValueError: If both `default` and `default_factory` are set.
     """
-    pass
+    if default is not PydanticUndefined and default_factory is not None:
+        raise TypeError('cannot specify both default and default_factory')
+
+    return ModelPrivateAttr(
+        default,
+        default_factory=default_factory,
+    )


 @dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -638,14 +997,14 @@ class ComputedFieldInfo:
         json_schema_extra: A dict or callable to provide extra JSON schema properties.
         repr: A boolean indicating whether to include the field in the __repr__ output.
     """
+
     decorator_repr: ClassVar[str] = '@computed_field'
     wrapped_property: property
     return_type: Any
     alias: str | None
     alias_priority: int | None
     title: str | None
-    field_title_generator: typing.Callable[[str, ComputedFieldInfo], str
-        ] | None
+    field_title_generator: typing.Callable[[str, ComputedFieldInfo], str] | None
     description: str | None
     deprecated: Deprecated | str | bool | None
     examples: list[Any] | None
@@ -653,28 +1012,67 @@ class ComputedFieldInfo:
     repr: bool

     @property
-    def deprecation_message(self) ->(str | None):
+    def deprecation_message(self) -> str | None:
         """The deprecation message to be emitted, or `None` if not set."""
-        pass
+        if self.deprecated is None:
+            return None
+        if isinstance(self.deprecated, bool):
+            return 'deprecated' if self.deprecated else None
+        return self.deprecated if isinstance(self.deprecated, str) else self.deprecated.message


-def _wrapped_property_is_private(property_: (cached_property | property)
-    ) ->bool:
+def _wrapped_property_is_private(property_: cached_property | property) -> bool:  # type: ignore
     """Returns true if provided property is private, False otherwise."""
-    pass
+    wrapped_name: str = ''
+
+    if isinstance(property_, property):
+        wrapped_name = getattr(property_.fget, '__name__', '')
+    elif isinstance(property_, cached_property):  # type: ignore
+        wrapped_name = getattr(property_.func, '__name__', '')  # type: ignore

+    return wrapped_name.startswith('_') and not wrapped_name.startswith('__')

+
+# this should really be `property[T], cached_property[T]` but property is not generic unlike cached_property
+# See https://github.com/python/typing/issues/985 and linked issues
 PropertyT = typing.TypeVar('PropertyT')


-def computed_field(func: (PropertyT | None)=None, /, *, alias: (str | None)
-    =None, alias_priority: (int | None)=None, title: (str | None)=None,
-    field_title_generator: (typing.Callable[[str, ComputedFieldInfo], str] |
-    None)=None, description: (str | None)=None, deprecated: (Deprecated |
-    str | bool | None)=None, examples: (list[Any] | None)=None,
-    json_schema_extra: (JsonDict | typing.Callable[[JsonDict], None] | None
-    )=None, repr: (bool | None)=None, return_type: Any=PydanticUndefined) ->(
-    PropertyT | typing.Callable[[PropertyT], PropertyT]):
+@typing.overload
+def computed_field(
+    *,
+    alias: str | None = None,
+    alias_priority: int | None = None,
+    title: str | None = None,
+    field_title_generator: typing.Callable[[str, ComputedFieldInfo], str] | None = None,
+    description: str | None = None,
+    deprecated: Deprecated | str | bool | None = None,
+    examples: list[Any] | None = None,
+    json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None,
+    repr: bool = True,
+    return_type: Any = PydanticUndefined,
+) -> typing.Callable[[PropertyT], PropertyT]: ...
+
+
+@typing.overload
+def computed_field(__func: PropertyT) -> PropertyT: ...
+
+
+def computed_field(
+    func: PropertyT | None = None,
+    /,
+    *,
+    alias: str | None = None,
+    alias_priority: int | None = None,
+    title: str | None = None,
+    field_title_generator: typing.Callable[[str, ComputedFieldInfo], str] | None = None,
+    description: str | None = None,
+    deprecated: Deprecated | str | bool | None = None,
+    examples: list[Any] | None = None,
+    json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None,
+    repr: bool | None = None,
+    return_type: Any = PydanticUndefined,
+) -> PropertyT | typing.Callable[[PropertyT], PropertyT]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/fields#the-computed_field-decorator

     Decorator to include `property` and `cached_property` when serializing models or dataclasses.
@@ -816,4 +1214,42 @@ def computed_field(func: (PropertyT | None)=None, /, *, alias: (str | None)
     Returns:
         A proxy wrapper for the property.
     """
-    pass
+
+    def dec(f: Any) -> Any:
+        nonlocal description, deprecated, return_type, alias_priority
+        unwrapped = _decorators.unwrap_wrapped_function(f)
+
+        if description is None and unwrapped.__doc__:
+            description = inspect.cleandoc(unwrapped.__doc__)
+
+        if deprecated is None and hasattr(unwrapped, '__deprecated__'):
+            deprecated = unwrapped.__deprecated__
+
+        # if the function isn't already decorated with `@property` (or another descriptor), then we wrap it now
+        f = _decorators.ensure_property(f)
+        alias_priority = (alias_priority or 2) if alias is not None else None
+
+        if repr is None:
+            repr_: bool = not _wrapped_property_is_private(property_=f)
+        else:
+            repr_ = repr
+
+        dec_info = ComputedFieldInfo(
+            f,
+            return_type,
+            alias,
+            alias_priority,
+            title,
+            field_title_generator,
+            description,
+            deprecated,
+            examples,
+            json_schema_extra,
+            repr_,
+        )
+        return _decorators.PydanticDescriptorProxy(f, dec_info)
+
+    if func is None:
+        return dec
+    else:
+        return dec(func)
diff --git a/pydantic/functional_serializers.py b/pydantic/functional_serializers.py
index a588b3b52..478c4a98d 100644
--- a/pydantic/functional_serializers.py
+++ b/pydantic/functional_serializers.py
@@ -1,11 +1,15 @@
 """This module contains related classes and functions for serialization."""
+
 from __future__ import annotations
+
 import dataclasses
 from functools import partialmethod
 from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload
+
 from pydantic_core import PydanticUndefined, core_schema
 from pydantic_core import core_schema as _core_schema
 from typing_extensions import Annotated, Literal, TypeAlias
+
 from . import PydanticUndefinedAnnotation
 from ._internal import _decorators, _internal_dataclass
 from .annotated_handlers import GetCoreSchemaHandler
@@ -43,13 +47,12 @@ class PlainSerializer:
         when_used: Determines when this serializer should be used. Accepts a string with values `'always'`,
             `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'.
     """
+
     func: core_schema.SerializerFunction
     return_type: Any = PydanticUndefined
-    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'
-        ] = 'always'
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always'

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
         """Gets the Pydantic core schema.

         Args:
@@ -61,17 +64,18 @@ class PlainSerializer:
         """
         schema = handler(source_type)
         try:
-            return_type = _decorators.get_function_return_type(self.func,
-                self.return_type, handler._get_types_namespace())
+            return_type = _decorators.get_function_return_type(
+                self.func, self.return_type, handler._get_types_namespace()
+            )
         except NameError as e:
             raise PydanticUndefinedAnnotation.from_name_error(e) from e
-        return_schema = (None if return_type is PydanticUndefined else
-            handler.generate_schema(return_type))
-        schema['serialization'
-            ] = core_schema.plain_serializer_function_ser_schema(function=
-            self.func, info_arg=_decorators.inspect_annotated_serializer(
-            self.func, 'plain'), return_schema=return_schema, when_used=
-            self.when_used)
+        return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type)
+        schema['serialization'] = core_schema.plain_serializer_function_ser_schema(
+            function=self.func,
+            info_arg=_decorators.inspect_annotated_serializer(self.func, 'plain'),
+            return_schema=return_schema,
+            when_used=self.when_used,
+        )
         return schema


@@ -139,13 +143,12 @@ class WrapSerializer:
         when_used: Determines when this serializer should be used. Accepts a string with values `'always'`,
             `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'.
     """
+
     func: core_schema.WrapSerializerFunction
     return_type: Any = PydanticUndefined
-    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'
-        ] = 'always'
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always'

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
         """This method is used to get the Pydantic core schema of the class.

         Args:
@@ -157,37 +160,71 @@ class WrapSerializer:
         """
         schema = handler(source_type)
         try:
-            return_type = _decorators.get_function_return_type(self.func,
-                self.return_type, handler._get_types_namespace())
+            return_type = _decorators.get_function_return_type(
+                self.func, self.return_type, handler._get_types_namespace()
+            )
         except NameError as e:
             raise PydanticUndefinedAnnotation.from_name_error(e) from e
-        return_schema = (None if return_type is PydanticUndefined else
-            handler.generate_schema(return_type))
-        schema['serialization'
-            ] = core_schema.wrap_serializer_function_ser_schema(function=
-            self.func, info_arg=_decorators.inspect_annotated_serializer(
-            self.func, 'wrap'), return_schema=return_schema, when_used=self
-            .when_used)
+        return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type)
+        schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+            function=self.func,
+            info_arg=_decorators.inspect_annotated_serializer(self.func, 'wrap'),
+            return_schema=return_schema,
+            when_used=self.when_used,
+        )
         return schema


 if TYPE_CHECKING:
-    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any],
-        staticmethod[Any, Any], partialmethod[Any]]
-    _PlainSerializationFunction = Union[_core_schema.SerializerFunction,
-        _PartialClsOrStaticMethod]
-    _WrapSerializationFunction = Union[_core_schema.WrapSerializerFunction,
-        _PartialClsOrStaticMethod]
-    _PlainSerializeMethodType = TypeVar('_PlainSerializeMethodType', bound=
-        _PlainSerializationFunction)
-    _WrapSerializeMethodType = TypeVar('_WrapSerializeMethodType', bound=
-        _WrapSerializationFunction)
-
-
-def field_serializer(*fields: str, mode: Literal['plain', 'wrap']='plain',
-    return_type: Any=PydanticUndefined, when_used: Literal['always',
-    'unless-none', 'json', 'json-unless-none']='always', check_fields: (
-    bool | None)=None) ->Callable[[Any], Any]:
+    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]]
+    _PlainSerializationFunction = Union[_core_schema.SerializerFunction, _PartialClsOrStaticMethod]
+    _WrapSerializationFunction = Union[_core_schema.WrapSerializerFunction, _PartialClsOrStaticMethod]
+    _PlainSerializeMethodType = TypeVar('_PlainSerializeMethodType', bound=_PlainSerializationFunction)
+    _WrapSerializeMethodType = TypeVar('_WrapSerializeMethodType', bound=_WrapSerializationFunction)
+
+
+@overload
+def field_serializer(
+    field: str,
+    /,
+    *fields: str,
+    return_type: Any = ...,
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ...,
+    check_fields: bool | None = ...,
+) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: ...
+
+
+@overload
+def field_serializer(
+    field: str,
+    /,
+    *fields: str,
+    mode: Literal['plain'],
+    return_type: Any = ...,
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ...,
+    check_fields: bool | None = ...,
+) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: ...
+
+
+@overload
+def field_serializer(
+    field: str,
+    /,
+    *fields: str,
+    mode: Literal['wrap'],
+    return_type: Any = ...,
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ...,
+    check_fields: bool | None = ...,
+) -> Callable[[_WrapSerializeMethodType], _WrapSerializeMethodType]: ...
+
+
+def field_serializer(
+    *fields: str,
+    mode: Literal['plain', 'wrap'] = 'plain',
+    return_type: Any = PydanticUndefined,
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always',
+    check_fields: bool | None = None,
+) -> Callable[[Any], Any]:
     """Decorator that enables custom field serialization.

     In the below example, a field of type `set` is used to mitigate duplication. A `field_serializer` is used to serialize the data as a sorted list.
@@ -233,16 +270,46 @@ def field_serializer(*fields: str, mode: Literal['plain', 'wrap']='plain',
     Returns:
         The decorator function.
     """
-    pass
+
+    def dec(
+        f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any],
+    ) -> _decorators.PydanticDescriptorProxy[Any]:
+        dec_info = _decorators.FieldSerializerDecoratorInfo(
+            fields=fields,
+            mode=mode,
+            return_type=return_type,
+            when_used=when_used,
+            check_fields=check_fields,
+        )
+        return _decorators.PydanticDescriptorProxy(f, dec_info)
+
+    return dec


 FuncType = TypeVar('FuncType', bound=Callable[..., Any])


-def model_serializer(f: (Callable[..., Any] | None)=None, /, *, mode:
-    Literal['plain', 'wrap']='plain', when_used: Literal['always',
-    'unless-none', 'json', 'json-unless-none']='always', return_type: Any=
-    PydanticUndefined) ->Callable[[Any], Any]:
+@overload
+def model_serializer(__f: FuncType) -> FuncType: ...
+
+
+@overload
+def model_serializer(
+    *,
+    mode: Literal['plain', 'wrap'] = ...,
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always',
+    return_type: Any = ...,
+) -> Callable[[FuncType], FuncType]: ...
+
+
+def model_serializer(
+    f: Callable[..., Any] | None = None,
+    /,
+    *,
+    mode: Literal['plain', 'wrap'] = 'plain',
+    when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always',
+    return_type: Any = PydanticUndefined,
+) -> Callable[[Any], Any]:
     """Decorator that enables custom model serialization.

     This is useful when a model need to be serialized in a customized manner, allowing for flexibility beyond just specific fields.
@@ -284,12 +351,22 @@ def model_serializer(f: (Callable[..., Any] | None)=None, /, *, mode:
     Returns:
         The decorator function.
     """
-    pass
+
+    def dec(f: Callable[..., Any]) -> _decorators.PydanticDescriptorProxy[Any]:
+        dec_info = _decorators.ModelSerializerDecoratorInfo(mode=mode, return_type=return_type, when_used=when_used)
+        return _decorators.PydanticDescriptorProxy(f, dec_info)
+
+    if f is None:
+        return dec
+    else:
+        return dec(f)  # type: ignore


 AnyType = TypeVar('AnyType')
+
+
 if TYPE_CHECKING:
-    SerializeAsAny = Annotated[AnyType, ...]
+    SerializeAsAny = Annotated[AnyType, ...]  # SerializeAsAny[list[str]] will be treated by type checkers as list[str]
     """Force serialization to ignore whatever is defined in the schema and instead ask the object
     itself how it should be serialized.
     In particular, this means that when model subclasses are serialized, fields present in the subclass
@@ -297,22 +374,22 @@ if TYPE_CHECKING:
     """
 else:

-
     @dataclasses.dataclass(**_internal_dataclass.slots_true)
-    class SerializeAsAny:
-
-        def __class_getitem__(cls, item: Any) ->Any:
+    class SerializeAsAny:  # noqa: D101
+        def __class_getitem__(cls, item: Any) -> Any:
             return Annotated[item, SerializeAsAny()]

-        def __get_pydantic_core_schema__(self, source_type: Any, handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            self, source_type: Any, handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             schema = handler(source_type)
             schema_to_update = schema
             while schema_to_update['type'] == 'definitions':
                 schema_to_update = schema_to_update.copy()
                 schema_to_update = schema_to_update['schema']
-            schema_to_update['serialization'
-                ] = core_schema.wrap_serializer_function_ser_schema(lambda
-                x, h: h(x), schema=core_schema.any_schema())
+            schema_to_update['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+                lambda x, h: h(x), schema=core_schema.any_schema()
+            )
             return schema
+
         __hash__ = object.__hash__
diff --git a/pydantic/functional_validators.py b/pydantic/functional_validators.py
index 92ed2fbb5..b29880fd4 100644
--- a/pydantic/functional_validators.py
+++ b/pydantic/functional_validators.py
@@ -1,21 +1,27 @@
 """This module contains related classes and functions for validation."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import sys
 from functools import partialmethod
 from types import FunctionType
 from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, cast, overload
+
 from pydantic_core import core_schema
 from pydantic_core import core_schema as _core_schema
 from typing_extensions import Annotated, Literal, TypeAlias
+
 from . import GetCoreSchemaHandler as _GetCoreSchemaHandler
 from ._internal import _core_metadata, _decorators, _generics, _internal_dataclass
 from .annotated_handlers import GetCoreSchemaHandler
 from .errors import PydanticUserError
+
 if sys.version_info < (3, 11):
     from typing_extensions import Protocol
 else:
     from typing import Protocol
+
 _inspect_validator = _decorators.inspect_validator


@@ -61,20 +67,18 @@ class AfterValidator:
             '''
         ```
     """
+
     func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        _GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema:
         schema = handler(source_type)
         info_arg = _inspect_validator(self.func, 'after')
         if info_arg:
             func = cast(core_schema.WithInfoValidatorFunction, self.func)
-            return core_schema.with_info_after_validator_function(func,
-                schema=schema, field_name=handler.field_name)
+            return core_schema.with_info_after_validator_function(func, schema=schema, field_name=handler.field_name)
         else:
             func = cast(core_schema.NoInfoValidatorFunction, self.func)
-            return core_schema.no_info_after_validator_function(func,
-                schema=schema)
+            return core_schema.no_info_after_validator_function(func, schema=schema)


 @dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true)
@@ -107,20 +111,18 @@ class BeforeValidator:
             #> can only concatenate str (not "int") to str
         ```
     """
+
     func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        _GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema:
         schema = handler(source_type)
         info_arg = _inspect_validator(self.func, 'before')
         if info_arg:
             func = cast(core_schema.WithInfoValidatorFunction, self.func)
-            return core_schema.with_info_before_validator_function(func,
-                schema=schema, field_name=handler.field_name)
+            return core_schema.with_info_before_validator_function(func, schema=schema, field_name=handler.field_name)
         else:
             func = cast(core_schema.NoInfoValidatorFunction, self.func)
-            return core_schema.no_info_before_validator_function(func,
-                schema=schema)
+            return core_schema.no_info_before_validator_function(func, schema=schema)


 @dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true)
@@ -147,26 +149,32 @@ class PlainValidator:
         #> 2
         ```
     """
+
     func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        _GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        # Note that for some valid uses of PlainValidator, it is not possible to generate a core schema for the
+        # source_type, so calling `handler(source_type)` will error, which prevents us from generating a proper
+        # serialization schema. To work around this for use cases that will not involve serialization, we simply
+        # catch any PydanticSchemaGenerationError that may be raised while attempting to build the serialization schema
+        # and abort any attempts to handle special serialization.
         from pydantic import PydanticSchemaGenerationError
+
         try:
             schema = handler(source_type)
-            serialization = core_schema.wrap_serializer_function_ser_schema(
-                function=lambda v, h: h(v), schema=schema)
+            serialization = core_schema.wrap_serializer_function_ser_schema(function=lambda v, h: h(v), schema=schema)
         except PydanticSchemaGenerationError:
             serialization = None
+
         info_arg = _inspect_validator(self.func, 'plain')
         if info_arg:
             func = cast(core_schema.WithInfoValidatorFunction, self.func)
-            return core_schema.with_info_plain_validator_function(func,
-                field_name=handler.field_name, serialization=serialization)
+            return core_schema.with_info_plain_validator_function(
+                func, field_name=handler.field_name, serialization=serialization
+            )
         else:
             func = cast(core_schema.NoInfoValidatorFunction, self.func)
-            return core_schema.no_info_plain_validator_function(func,
-                serialization=serialization)
+            return core_schema.no_info_plain_validator_function(func, serialization=serialization)


 @dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true)
@@ -206,61 +214,90 @@ class WrapValidator:
     #> 2000-01-01 00:00:00
     ```
     """
+
     func: core_schema.NoInfoWrapValidatorFunction | core_schema.WithInfoWrapValidatorFunction

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        _GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema:
         schema = handler(source_type)
         info_arg = _inspect_validator(self.func, 'wrap')
         if info_arg:
             func = cast(core_schema.WithInfoWrapValidatorFunction, self.func)
-            return core_schema.with_info_wrap_validator_function(func,
-                schema=schema, field_name=handler.field_name)
+            return core_schema.with_info_wrap_validator_function(func, schema=schema, field_name=handler.field_name)
         else:
             func = cast(core_schema.NoInfoWrapValidatorFunction, self.func)
-            return core_schema.no_info_wrap_validator_function(func, schema
-                =schema)
+            return core_schema.no_info_wrap_validator_function(func, schema=schema)


 if TYPE_CHECKING:

-
     class _OnlyValueValidatorClsMethod(Protocol):
+        def __call__(self, cls: Any, value: Any, /) -> Any: ...

-        def __call__(self, cls: Any, value: Any, /) ->Any:
-            ...
+    class _V2ValidatorClsMethod(Protocol):
+        def __call__(self, cls: Any, value: Any, info: _core_schema.ValidationInfo, /) -> Any: ...

+    class _V2WrapValidatorClsMethod(Protocol):
+        def __call__(
+            self,
+            cls: Any,
+            value: Any,
+            handler: _core_schema.ValidatorFunctionWrapHandler,
+            info: _core_schema.ValidationInfo,
+            /,
+        ) -> Any: ...
+
+    _V2Validator = Union[
+        _V2ValidatorClsMethod,
+        _core_schema.WithInfoValidatorFunction,
+        _OnlyValueValidatorClsMethod,
+        _core_schema.NoInfoValidatorFunction,
+    ]
+
+    _V2WrapValidator = Union[
+        _V2WrapValidatorClsMethod,
+        _core_schema.WithInfoWrapValidatorFunction,
+    ]
+
+    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]]

-    class _V2ValidatorClsMethod(Protocol):
+    _V2BeforeAfterOrPlainValidatorType = TypeVar(
+        '_V2BeforeAfterOrPlainValidatorType',
+        _V2Validator,
+        _PartialClsOrStaticMethod,
+    )
+    _V2WrapValidatorType = TypeVar('_V2WrapValidatorType', _V2WrapValidator, _PartialClsOrStaticMethod)

-        def __call__(self, cls: Any, value: Any, info: _core_schema.
-            ValidationInfo, /) ->Any:
-            ...

+@overload
+def field_validator(
+    field: str,
+    /,
+    *fields: str,
+    mode: Literal['before', 'after', 'plain'] = ...,
+    check_fields: bool | None = ...,
+) -> Callable[[_V2BeforeAfterOrPlainValidatorType], _V2BeforeAfterOrPlainValidatorType]: ...
+
+
+@overload
+def field_validator(
+    field: str,
+    /,
+    *fields: str,
+    mode: Literal['wrap'],
+    check_fields: bool | None = ...,
+) -> Callable[[_V2WrapValidatorType], _V2WrapValidatorType]: ...

-    class _V2WrapValidatorClsMethod(Protocol):

-        def __call__(self, cls: Any, value: Any, handler: _core_schema.
-            ValidatorFunctionWrapHandler, info: _core_schema.ValidationInfo, /
-            ) ->Any:
-            ...
-    _V2Validator = Union[_V2ValidatorClsMethod, _core_schema.
-        WithInfoValidatorFunction, _OnlyValueValidatorClsMethod,
-        _core_schema.NoInfoValidatorFunction]
-    _V2WrapValidator = Union[_V2WrapValidatorClsMethod, _core_schema.
-        WithInfoWrapValidatorFunction]
-    _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any],
-        staticmethod[Any, Any], partialmethod[Any]]
-    _V2BeforeAfterOrPlainValidatorType = TypeVar(
-        '_V2BeforeAfterOrPlainValidatorType', _V2Validator,
-        _PartialClsOrStaticMethod)
-    _V2WrapValidatorType = TypeVar('_V2WrapValidatorType', _V2WrapValidator,
-        _PartialClsOrStaticMethod)
 FieldValidatorModes: TypeAlias = Literal['before', 'after', 'wrap', 'plain']


-def field_validator(field: str, /, *fields: str, mode: FieldValidatorModes=
-    'after', check_fields: (bool | None)=None) ->Callable[[Any], Any]:
+def field_validator(
+    field: str,
+    /,
+    *fields: str,
+    mode: FieldValidatorModes = 'after',
+    check_fields: bool | None = None,
+) -> Callable[[Any], Any]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/validators/#field-validators

     Decorate methods on the class indicating that they should be used to validate fields.
@@ -317,19 +354,50 @@ def field_validator(field: str, /, *fields: str, mode: FieldValidatorModes=
             - If the args passed to `@field_validator` as fields are not strings.
             - If `@field_validator` applied to instance methods.
     """
-    pass
+    if isinstance(field, FunctionType):
+        raise PydanticUserError(
+            '`@field_validator` should be used with fields and keyword arguments, not bare. '
+            "E.g. usage should be `@validator('<field_name>', ...)`",
+            code='validator-no-fields',
+        )
+    fields = field, *fields
+    if not all(isinstance(field, str) for field in fields):
+        raise PydanticUserError(
+            '`@field_validator` fields should be passed as separate string args. '
+            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
+            code='validator-invalid-fields',
+        )
+
+    def dec(
+        f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any],
+    ) -> _decorators.PydanticDescriptorProxy[Any]:
+        if _decorators.is_instance_method_from_sig(f):
+            raise PydanticUserError(
+                '`@field_validator` cannot be applied to instance methods', code='validator-instance-method'
+            )
+
+        # auto apply the @classmethod decorator
+        f = _decorators.ensure_classmethod_based_on_signature(f)
+
+        dec_info = _decorators.FieldValidatorDecoratorInfo(fields=fields, mode=mode, check_fields=check_fields)
+        return _decorators.PydanticDescriptorProxy(f, dec_info)
+
+    return dec


 _ModelType = TypeVar('_ModelType')
 _ModelTypeCo = TypeVar('_ModelTypeCo', covariant=True)


-class ModelWrapValidatorHandler(_core_schema.ValidatorFunctionWrapHandler,
-    Protocol[_ModelTypeCo]):
+class ModelWrapValidatorHandler(_core_schema.ValidatorFunctionWrapHandler, Protocol[_ModelTypeCo]):
     """@model_validator decorated function handler argument type. This is used when `mode='wrap'`."""

-    def __call__(self, value: Any, outer_location: (str | int | None)=None, /
-        ) ->_ModelTypeCo:
+    def __call__(  # noqa: D102
+        self,
+        value: Any,
+        outer_location: str | int | None = None,
+        /,
+    ) -> _ModelTypeCo:  # pragma: no cover
         ...


@@ -338,18 +406,32 @@ class ModelWrapValidatorWithoutInfo(Protocol[_ModelType]):
     This is used when `mode='wrap'` and the function does not have info argument.
     """

-    def __call__(self, cls: type[_ModelType], value: Any, handler:
-        ModelWrapValidatorHandler[_ModelType], /) ->_ModelType:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        cls: type[_ModelType],
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        handler: ModelWrapValidatorHandler[_ModelType],
+        /,
+    ) -> _ModelType: ...


 class ModelWrapValidator(Protocol[_ModelType]):
     """A @model_validator decorated function signature. This is used when `mode='wrap'`."""

-    def __call__(self, cls: type[_ModelType], value: Any, handler:
-        ModelWrapValidatorHandler[_ModelType], info: _core_schema.
-        ValidationInfo, /) ->_ModelType:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        cls: type[_ModelType],
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        handler: ModelWrapValidatorHandler[_ModelType],
+        info: _core_schema.ValidationInfo,
+        /,
+    ) -> _ModelType: ...


 class FreeModelBeforeValidatorWithoutInfo(Protocol):
@@ -357,8 +439,14 @@ class FreeModelBeforeValidatorWithoutInfo(Protocol):
     This is used when `mode='before'` and the function does not have info argument.
     """

-    def __call__(self, value: Any, /) ->Any:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        /,
+    ) -> Any: ...


 class ModelBeforeValidatorWithoutInfo(Protocol):
@@ -366,42 +454,92 @@ class ModelBeforeValidatorWithoutInfo(Protocol):
     This is used when `mode='before'` and the function does not have info argument.
     """

-    def __call__(self, cls: Any, value: Any, /) ->Any:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        cls: Any,
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        /,
+    ) -> Any: ...


 class FreeModelBeforeValidator(Protocol):
     """A `@model_validator` decorated function signature. This is used when `mode='before'`."""

-    def __call__(self, value: Any, info: _core_schema.ValidationInfo, /) ->Any:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        info: _core_schema.ValidationInfo,
+        /,
+    ) -> Any: ...


 class ModelBeforeValidator(Protocol):
     """A `@model_validator` decorated function signature. This is used when `mode='before'`."""

-    def __call__(self, cls: Any, value: Any, info: _core_schema.
-        ValidationInfo, /) ->Any:
-        ...
+    def __call__(  # noqa: D102
+        self,
+        cls: Any,
+        # this can be a dict, a model instance
+        # or anything else that gets passed to validate_python
+        # thus validators _must_ handle all cases
+        value: Any,
+        info: _core_schema.ValidationInfo,
+        /,
+    ) -> Any: ...


 ModelAfterValidatorWithoutInfo = Callable[[_ModelType], _ModelType]
 """A `@model_validator` decorated function signature. This is used when `mode='after'` and the function does not
 have info argument.
 """
-ModelAfterValidator = Callable[[_ModelType, _core_schema.ValidationInfo],
-    _ModelType]
+
+ModelAfterValidator = Callable[[_ModelType, _core_schema.ValidationInfo], _ModelType]
 """A `@model_validator` decorated function signature. This is used when `mode='after'`."""
-_AnyModelWrapValidator = Union[ModelWrapValidator[_ModelType],
-    ModelWrapValidatorWithoutInfo[_ModelType]]
-_AnyModeBeforeValidator = Union[FreeModelBeforeValidator,
-    ModelBeforeValidator, FreeModelBeforeValidatorWithoutInfo,
-    ModelBeforeValidatorWithoutInfo]
-_AnyModelAfterValidator = Union[ModelAfterValidator[_ModelType],
-    ModelAfterValidatorWithoutInfo[_ModelType]]

+_AnyModelWrapValidator = Union[ModelWrapValidator[_ModelType], ModelWrapValidatorWithoutInfo[_ModelType]]
+_AnyModeBeforeValidator = Union[
+    FreeModelBeforeValidator, ModelBeforeValidator, FreeModelBeforeValidatorWithoutInfo, ModelBeforeValidatorWithoutInfo
+]
+_AnyModelAfterValidator = Union[ModelAfterValidator[_ModelType], ModelAfterValidatorWithoutInfo[_ModelType]]
+
+
+@overload
+def model_validator(
+    *,
+    mode: Literal['wrap'],
+) -> Callable[
+    [_AnyModelWrapValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo]
+]: ...
+
+
+@overload
+def model_validator(
+    *,
+    mode: Literal['before'],
+) -> Callable[
+    [_AnyModeBeforeValidator], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo]
+]: ...

-def model_validator(*, mode: Literal['wrap', 'before', 'after']) ->Any:
+
+@overload
+def model_validator(
+    *,
+    mode: Literal['after'],
+) -> Callable[
+    [_AnyModelAfterValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo]
+]: ...
+
+
+def model_validator(
+    *,
+    mode: Literal['wrap', 'before', 'after'],
+) -> Any:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/validators/#model-validators

     Decorate model methods for validation purposes.
@@ -445,18 +583,28 @@ def model_validator(*, mode: Literal['wrap', 'before', 'after']) ->Any:
     Returns:
         A decorator that can be used to decorate a function to be used as a model validator.
     """
-    pass
+
+    def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]:
+        # auto apply the @classmethod decorator
+        f = _decorators.ensure_classmethod_based_on_signature(f)
+        dec_info = _decorators.ModelValidatorDecoratorInfo(mode=mode)
+        return _decorators.PydanticDescriptorProxy(f, dec_info)
+
+    return dec


 AnyType = TypeVar('AnyType')
+
+
 if TYPE_CHECKING:
-    InstanceOf = Annotated[AnyType, ...]
-else:
+    # If we add configurable attributes to IsInstance, we'd probably need to stop hiding it from type checkers like this
+    InstanceOf = Annotated[AnyType, ...]  # `IsInstance[Sequence]` will be recognized by type checkers as `Sequence`

+else:

     @dataclasses.dataclass(**_internal_dataclass.slots_true)
     class InstanceOf:
-        """Generic type for annotating a type that is an instance of a given class.
+        '''Generic type for annotating a type that is an instance of a given class.

         Example:
             ```py
@@ -473,7 +621,7 @@ else:
                 Bar(foo=42)
             except ValidationError as e:
                 print(e)
-                ""\"
+                """
                 [
                 │   {
                 │   │   'type': 'is_instance_of',
@@ -484,36 +632,41 @@ else:
                 │   │   'url': 'https://errors.pydantic.dev/0.38.0/v/is_instance_of'
                 │   }
                 ]
-                ""\"
+                """
             ```
-        """
+        '''

         @classmethod
-        def __class_getitem__(cls, item: AnyType) ->AnyType:
+        def __class_getitem__(cls, item: AnyType) -> AnyType:
             return Annotated[item, cls()]

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: Any, handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
             from pydantic import PydanticSchemaGenerationError
-            instance_of_schema = core_schema.is_instance_schema(_generics.
-                get_origin(source) or source)
+
+            # use the generic _origin_ as the second argument to isinstance when appropriate
+            instance_of_schema = core_schema.is_instance_schema(_generics.get_origin(source) or source)
+
             try:
+                # Try to generate the "standard" schema, which will be used when loading from JSON
                 original_schema = handler(source)
             except PydanticSchemaGenerationError:
+                # If that fails, just produce a schema that can validate from python
                 return instance_of_schema
             else:
-                instance_of_schema['serialization'
-                    ] = core_schema.wrap_serializer_function_ser_schema(
-                    function=lambda v, h: h(v), schema=original_schema)
-                return core_schema.json_or_python_schema(python_schema=
-                    instance_of_schema, json_schema=original_schema)
+                # Use the "original" approach to serialization
+                instance_of_schema['serialization'] = core_schema.wrap_serializer_function_ser_schema(
+                    function=lambda v, h: h(v), schema=original_schema
+                )
+                return core_schema.json_or_python_schema(python_schema=instance_of_schema, json_schema=original_schema)
+
         __hash__ = object.__hash__
+
+
 if TYPE_CHECKING:
-    SkipValidation = Annotated[AnyType, ...]
+    SkipValidation = Annotated[AnyType, ...]  # SkipValidation[list[str]] will be treated by type checkers as list[str]
 else:

-
     @dataclasses.dataclass(**_internal_dataclass.slots_true)
     class SkipValidation:
         """If this is applied as an annotation (e.g., via `x: Annotated[int, SkipValidation]`), validation will be
@@ -527,16 +680,18 @@ else:
         annotation applied to a type.
         """

-        def __class_getitem__(cls, item: Any) ->Any:
+        def __class_getitem__(cls, item: Any) -> Any:
             return Annotated[item, SkipValidation()]

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: Any, handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
             original_schema = handler(source)
-            metadata = _core_metadata.build_metadata_dict(
-                js_annotation_functions=[lambda _c, h: h(original_schema)])
-            return core_schema.any_schema(metadata=metadata, serialization=
-                core_schema.wrap_serializer_function_ser_schema(function=lambda
-                v, h: h(v), schema=original_schema))
+            metadata = _core_metadata.build_metadata_dict(js_annotation_functions=[lambda _c, h: h(original_schema)])
+            return core_schema.any_schema(
+                metadata=metadata,
+                serialization=core_schema.wrap_serializer_function_ser_schema(
+                    function=lambda v, h: h(v), schema=original_schema
+                ),
+            )
+
         __hash__ = object.__hash__
diff --git a/pydantic/generics.py b/pydantic/generics.py
index 84c33fed7..3f1070d08 100644
--- a/pydantic/generics.py
+++ b/pydantic/generics.py
@@ -1,3 +1,5 @@
 """The `generics` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/json.py b/pydantic/json.py
index 5207cab1e..bcaff9f57 100644
--- a/pydantic/json.py
+++ b/pydantic/json.py
@@ -1,3 +1,5 @@
 """The `json` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/json_schema.py b/pydantic/json_schema.py
index 32ba228d9..63dcde7a3 100644
--- a/pydantic/json_schema.py
+++ b/pydantic/json_schema.py
@@ -8,7 +8,9 @@ In general you shouldn't need to use this module directly; instead, you can use
 [`BaseModel.model_json_schema`][pydantic.BaseModel.model_json_schema] and
 [`TypeAdapter.json_schema`][pydantic.TypeAdapter.json_schema].
 """
+
 from __future__ import annotations as _annotations
+
 import dataclasses
 import inspect
 import math
@@ -18,33 +20,64 @@ from collections import defaultdict
 from copy import deepcopy
 from dataclasses import is_dataclass
 from enum import Enum
-from typing import TYPE_CHECKING, Any, Callable, Counter, Dict, Hashable, Iterable, NewType, Pattern, Sequence, Tuple, TypeVar, Union, cast
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Counter,
+    Dict,
+    Hashable,
+    Iterable,
+    NewType,
+    Pattern,
+    Sequence,
+    Tuple,
+    TypeVar,
+    Union,
+    cast,
+)
+
 import pydantic_core
 from pydantic_core import CoreSchema, PydanticOmit, core_schema, to_jsonable_python
 from pydantic_core.core_schema import ComputedField
 from typing_extensions import Annotated, Literal, TypeAlias, assert_never, deprecated, final
+
 from pydantic.warnings import PydanticDeprecatedSince26
-from ._internal import _config, _core_metadata, _core_utils, _decorators, _internal_dataclass, _mock_val_ser, _schema_generation_shared, _typing_extra
+
+from ._internal import (
+    _config,
+    _core_metadata,
+    _core_utils,
+    _decorators,
+    _internal_dataclass,
+    _mock_val_ser,
+    _schema_generation_shared,
+    _typing_extra,
+)
 from .annotated_handlers import GetJsonSchemaHandler
 from .config import JsonDict, JsonSchemaExtraCallable, JsonValue
 from .errors import PydanticInvalidForJsonSchema, PydanticSchemaGenerationError, PydanticUserError
+
 if TYPE_CHECKING:
     from . import ConfigDict
     from ._internal._core_utils import CoreSchemaField, CoreSchemaOrField
     from ._internal._dataclasses import PydanticDataclass
     from ._internal._schema_generation_shared import GetJsonSchemaFunction
     from .main import BaseModel
-CoreSchemaOrFieldType = Literal[core_schema.CoreSchemaType, core_schema.
-    CoreSchemaFieldType]
+
+
+CoreSchemaOrFieldType = Literal[core_schema.CoreSchemaType, core_schema.CoreSchemaFieldType]
 """
 A type alias for defined schema types that represents a union of
 `core_schema.CoreSchemaType` and
 `core_schema.CoreSchemaFieldType`.
 """
+
 JsonSchemaValue = Dict[str, Any]
 """
 A type alias for a JSON schema value. This is a dictionary of string keys to arbitrary JSON values.
 """
+
 JsonSchemaMode = Literal['validation', 'serialization']
 """
 A type alias that represents the mode of a JSON schema; either 'validation' or 'serialization'.
@@ -54,15 +87,15 @@ computed fields will only be present when serializing, and should not be provide
 validating. This flag provides a way to indicate whether you want the JSON schema required
 for validation inputs, or that will be matched by serialization outputs.
 """
-_MODE_TITLE_MAPPING: dict[JsonSchemaMode, str] = {'validation': 'Input',
-    'serialization': 'Output'}
+
+_MODE_TITLE_MAPPING: dict[JsonSchemaMode, str] = {'validation': 'Input', 'serialization': 'Output'}


 @deprecated(
-    '`update_json_schema` is deprecated, use a simple `my_dict.update(update_dict)` call instead.'
-    , category=None)
-def update_json_schema(schema: JsonSchemaValue, updates: dict[str, Any]
-    ) ->JsonSchemaValue:
+    '`update_json_schema` is deprecated, use a simple `my_dict.update(update_dict)` call instead.',
+    category=None,
+)
+def update_json_schema(schema: JsonSchemaValue, updates: dict[str, Any]) -> JsonSchemaValue:
     """Update a JSON schema in-place by providing a dictionary of updates.

     This function sets the provided key-value pairs in the schema and returns the updated schema.
@@ -74,7 +107,8 @@ def update_json_schema(schema: JsonSchemaValue, updates: dict[str, Any]
     Returns:
         The updated JSON schema.
     """
-    pass
+    schema.update(updates)
+    return schema


 JsonSchemaWarningKind = Literal['skipped-choice', 'non-serializable-default']
@@ -94,11 +128,22 @@ class PydanticJsonSchemaWarning(UserWarning):
     """


+# ##### JSON Schema Generation #####
 DEFAULT_REF_TEMPLATE = '#/$defs/{model}'
 """The default format string used to generate reference names."""
+
+# There are three types of references relevant to building JSON schemas:
+#   1. core_schema "ref" values; these are not exposed as part of the JSON schema
+#       * these might look like the fully qualified path of a model, its id, or something similar
 CoreRef = NewType('CoreRef', str)
+#   2. keys of the "definitions" object that will eventually go into the JSON schema
+#       * by default, these look like "MyModel", though may change in the presence of collisions
+#       * eventually, we may want to make it easier to modify the way these names are generated
 DefsRef = NewType('DefsRef', str)
+#   3. the values corresponding to the "$ref" key in the schema
+#       * By default, these look like "#/$defs/MyModel", as in {"$ref": "#/$defs/MyModel"}
 JsonRef = NewType('JsonRef', str)
+
 CoreModeRef = Tuple[CoreRef, JsonSchemaMode]
 JsonSchemaKeyT = TypeVar('JsonSchemaKeyT', bound=Hashable)

@@ -109,20 +154,79 @@ class _DefinitionsRemapping:
     json_remapping: dict[JsonRef, JsonRef]

     @staticmethod
-    def from_prioritized_choices(prioritized_choices: dict[DefsRef, list[
-        DefsRef]], defs_to_json: dict[DefsRef, JsonRef], definitions: dict[
-        DefsRef, JsonSchemaValue]) ->_DefinitionsRemapping:
+    def from_prioritized_choices(
+        prioritized_choices: dict[DefsRef, list[DefsRef]],
+        defs_to_json: dict[DefsRef, JsonRef],
+        definitions: dict[DefsRef, JsonSchemaValue],
+    ) -> _DefinitionsRemapping:
         """
         This function should produce a remapping that replaces complex DefsRef with the simpler ones from the
         prioritized_choices such that applying the name remapping would result in an equivalent JSON schema.
         """
-        pass
-
-    def remap_json_schema(self, schema: Any) ->Any:
+        # We need to iteratively simplify the definitions until we reach a fixed point.
+        # The reason for this is that outer definitions may reference inner definitions that get simplified
+        # into an equivalent reference, and the outer definitions won't be equivalent until we've simplified
+        # the inner definitions.
+        copied_definitions = deepcopy(definitions)
+        definitions_schema = {'$defs': copied_definitions}
+        for _iter in range(100):  # prevent an infinite loop in the case of a bug, 100 iterations should be enough
+            # For every possible remapped DefsRef, collect all schemas that that DefsRef might be used for:
+            schemas_for_alternatives: dict[DefsRef, list[JsonSchemaValue]] = defaultdict(list)
+            for defs_ref in copied_definitions:
+                alternatives = prioritized_choices[defs_ref]
+                for alternative in alternatives:
+                    schemas_for_alternatives[alternative].append(copied_definitions[defs_ref])
+
+            # Deduplicate the schemas for each alternative; the idea is that we only want to remap to a new DefsRef
+            # if it introduces no ambiguity, i.e., there is only one distinct schema for that DefsRef.
+            for defs_ref, schemas in schemas_for_alternatives.items():
+                schemas_for_alternatives[defs_ref] = _deduplicate_schemas(schemas_for_alternatives[defs_ref])
+
+            # Build the remapping
+            defs_remapping: dict[DefsRef, DefsRef] = {}
+            json_remapping: dict[JsonRef, JsonRef] = {}
+            for original_defs_ref in definitions:
+                alternatives = prioritized_choices[original_defs_ref]
+                # Pick the first alternative that has only one schema, since that means there is no collision
+                remapped_defs_ref = next(x for x in alternatives if len(schemas_for_alternatives[x]) == 1)
+                defs_remapping[original_defs_ref] = remapped_defs_ref
+                json_remapping[defs_to_json[original_defs_ref]] = defs_to_json[remapped_defs_ref]
+            remapping = _DefinitionsRemapping(defs_remapping, json_remapping)
+            new_definitions_schema = remapping.remap_json_schema({'$defs': copied_definitions})
+            if definitions_schema == new_definitions_schema:
+                # We've reached the fixed point
+                return remapping
+            definitions_schema = new_definitions_schema
+
+        raise PydanticInvalidForJsonSchema('Failed to simplify the JSON schema definitions')
+
+    def remap_defs_ref(self, ref: DefsRef) -> DefsRef:
+        return self.defs_remapping.get(ref, ref)
+
+    def remap_json_ref(self, ref: JsonRef) -> JsonRef:
+        return self.json_remapping.get(ref, ref)
+
+    def remap_json_schema(self, schema: Any) -> Any:
         """
         Recursively update the JSON schema replacing all $refs
         """
-        pass
+        if isinstance(schema, str):
+            # Note: this may not really be a JsonRef; we rely on having no collisions between JsonRefs and other strings
+            return self.remap_json_ref(JsonRef(schema))
+        elif isinstance(schema, list):
+            return [self.remap_json_schema(item) for item in schema]
+        elif isinstance(schema, dict):
+            for key, value in schema.items():
+                if key == '$ref' and isinstance(value, str):
+                    schema['$ref'] = self.remap_json_ref(JsonRef(value))
+                elif key == '$defs':
+                    schema['$defs'] = {
+                        self.remap_defs_ref(DefsRef(key)): self.remap_json_schema(value)
+                        for key, value in schema['$defs'].items()
+                    }
+                else:
+                    schema[key] = self.remap_json_schema(value)
+        return schema


 class GenerateJsonSchema:
@@ -157,31 +261,63 @@ class GenerateJsonSchema:
     Raises:
         JsonSchemaError: If the instance of the class is inadvertently re-used after generating a schema.
     """
+
     schema_dialect = 'https://json-schema.org/draft/2020-12/schema'
+
+    # `self.render_warning_message` will do nothing if its argument `kind` is in `ignored_warning_kinds`;
+    # this value can be modified on subclasses to easily control which warnings are emitted
     ignored_warning_kinds: set[JsonSchemaWarningKind] = {'skipped-choice'}

-    def __init__(self, by_alias: bool=True, ref_template: str=
-        DEFAULT_REF_TEMPLATE):
+    def __init__(self, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE):
         self.by_alias = by_alias
         self.ref_template = ref_template
+
         self.core_to_json_refs: dict[CoreModeRef, JsonRef] = {}
         self.core_to_defs_refs: dict[CoreModeRef, DefsRef] = {}
         self.defs_to_core_refs: dict[DefsRef, CoreModeRef] = {}
         self.json_to_defs_refs: dict[JsonRef, DefsRef] = {}
+
         self.definitions: dict[DefsRef, JsonSchemaValue] = {}
-        self._config_wrapper_stack = _config.ConfigWrapperStack(_config.
-            ConfigWrapper({}))
+        self._config_wrapper_stack = _config.ConfigWrapperStack(_config.ConfigWrapper({}))
+
         self._mode: JsonSchemaMode = 'validation'
+
+        # The following includes a mapping of a fully-unique defs ref choice to a list of preferred
+        # alternatives, which are generally simpler, such as only including the class name.
+        # At the end of schema generation, we use these to produce a JSON schema with more human-readable
+        # definitions, which would also work better in a generated OpenAPI client, etc.
         self._prioritized_defsref_choices: dict[DefsRef, list[DefsRef]] = {}
         self._collision_counter: dict[str, int] = defaultdict(int)
         self._collision_index: dict[str, int] = {}
+
         self._schema_type_to_method = self.build_schema_type_to_method()
-        self._core_defs_invalid_for_json_schema: dict[DefsRef,
-            PydanticInvalidForJsonSchema] = {}
+
+        # When we encounter definitions we need to try to build them immediately
+        # so that they are available schemas that reference them
+        # But it's possible that CoreSchema was never going to be used
+        # (e.g. because the CoreSchema that references short circuits is JSON schema generation without needing
+        #  the reference) so instead of failing altogether if we can't build a definition we
+        # store the error raised and re-throw it if we end up needing that def
+        self._core_defs_invalid_for_json_schema: dict[DefsRef, PydanticInvalidForJsonSchema] = {}
+
+        # This changes to True after generating a schema, to prevent issues caused by accidental re-use
+        # of a single instance of a schema generator
         self._used = False

-    def build_schema_type_to_method(self) ->dict[CoreSchemaOrFieldType,
-        Callable[[CoreSchemaOrField], JsonSchemaValue]]:
+    @property
+    def _config(self) -> _config.ConfigWrapper:
+        return self._config_wrapper_stack.tail
+
+    @property
+    def mode(self) -> JsonSchemaMode:
+        if self._config.json_schema_mode_override is not None:
+            return self._config.json_schema_mode_override
+        else:
+            return self._mode
+
+    def build_schema_type_to_method(
+        self,
+    ) -> dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]]:
         """Builds a dictionary mapping fields to methods for generating JSON schemas.

         Returns:
@@ -190,12 +326,24 @@ class GenerateJsonSchema:
         Raises:
             TypeError: If no method has been defined for generating a JSON schema for a given pydantic core schema type.
         """
-        pass
-
-    def generate_definitions(self, inputs: Sequence[tuple[JsonSchemaKeyT,
-        JsonSchemaMode, core_schema.CoreSchema]]) ->tuple[dict[tuple[
-        JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], dict[DefsRef,
-        JsonSchemaValue]]:
+        mapping: dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]] = {}
+        core_schema_types: list[CoreSchemaOrFieldType] = _typing_extra.all_literal_values(
+            CoreSchemaOrFieldType  # type: ignore
+        )
+        for key in core_schema_types:
+            method_name = f"{key.replace('-', '_')}_schema"
+            try:
+                mapping[key] = getattr(self, method_name)
+            except AttributeError as e:  # pragma: no cover
+                raise TypeError(
+                    f'No method for generating JsonSchema for core_schema.type={key!r} '
+                    f'(expected: {type(self).__name__}.{method_name})'
+                ) from e
+        return mapping
+
+    def generate_definitions(
+        self, inputs: Sequence[tuple[JsonSchemaKeyT, JsonSchemaMode, core_schema.CoreSchema]]
+    ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], dict[DefsRef, JsonSchemaValue]]:
         """Generates JSON schema definitions from a list of core schemas, pairing the generated definitions with a
         mapping that links the input keys to the definition references.

@@ -218,10 +366,31 @@ class GenerateJsonSchema:
         Raises:
             PydanticUserError: Raised if the JSON schema generator has already been used to generate a JSON schema.
         """
-        pass
+        if self._used:
+            raise PydanticUserError(
+                'This JSON schema generator has already been used to generate a JSON schema. '
+                f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.',
+                code='json-schema-already-used',
+            )
+
+        for key, mode, schema in inputs:
+            self._mode = mode
+            self.generate_inner(schema)
+
+        definitions_remapping = self._build_definitions_remapping()

-    def generate(self, schema: CoreSchema, mode: JsonSchemaMode='validation'
-        ) ->JsonSchemaValue:
+        json_schemas_map: dict[tuple[JsonSchemaKeyT, JsonSchemaMode], DefsRef] = {}
+        for key, mode, schema in inputs:
+            self._mode = mode
+            json_schema = self.generate_inner(schema)
+            json_schemas_map[(key, mode)] = definitions_remapping.remap_json_schema(json_schema)
+
+        json_schema = {'$defs': self.definitions}
+        json_schema = definitions_remapping.remap_json_schema(json_schema)
+        self._used = True
+        return json_schemas_map, _sort_json_schema(json_schema['$defs'])  # type: ignore
+
+    def generate(self, schema: CoreSchema, mode: JsonSchemaMode = 'validation') -> JsonSchemaValue:
         """Generates a JSON schema for a specified schema in a specified mode.

         Args:
@@ -234,9 +403,46 @@ class GenerateJsonSchema:
         Raises:
             PydanticUserError: If the JSON schema generator has already been used to generate a JSON schema.
         """
-        pass
-
-    def generate_inner(self, schema: CoreSchemaOrField) ->JsonSchemaValue:
+        self._mode = mode
+        if self._used:
+            raise PydanticUserError(
+                'This JSON schema generator has already been used to generate a JSON schema. '
+                f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.',
+                code='json-schema-already-used',
+            )
+
+        json_schema: JsonSchemaValue = self.generate_inner(schema)
+        json_ref_counts = self.get_json_ref_counts(json_schema)
+
+        # Remove the top-level $ref if present; note that the _generate method already ensures there are no sibling keys
+        ref = cast(JsonRef, json_schema.get('$ref'))
+        while ref is not None:  # may need to unpack multiple levels
+            ref_json_schema = self.get_schema_from_definitions(ref)
+            if json_ref_counts[ref] > 1 or ref_json_schema is None:
+                # Keep the ref, but use an allOf to remove the top level $ref
+                json_schema = {'allOf': [{'$ref': ref}]}
+            else:
+                # "Unpack" the ref since this is the only reference
+                json_schema = ref_json_schema.copy()  # copy to prevent recursive dict reference
+                json_ref_counts[ref] -= 1
+            ref = cast(JsonRef, json_schema.get('$ref'))
+
+        self._garbage_collect_definitions(json_schema)
+        definitions_remapping = self._build_definitions_remapping()
+
+        if self.definitions:
+            json_schema['$defs'] = self.definitions
+
+        json_schema = definitions_remapping.remap_json_schema(json_schema)
+
+        # For now, we will not set the $schema key. However, if desired, this can be easily added by overriding
+        # this method and adding the following line after a call to super().generate(schema):
+        # json_schema['$schema'] = self.schema_dialect
+
+        self._used = True
+        return _sort_json_schema(json_schema)
+
+    def generate_inner(self, schema: CoreSchemaOrField) -> JsonSchemaValue:  # noqa: C901
         """Generates a JSON schema for a given core schema.

         Args:
@@ -245,9 +451,113 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        # If a schema with the same CoreRef has been handled, just return a reference to it
+        # Note that this assumes that it will _never_ be the case that the same CoreRef is used
+        # on types that should have different JSON schemas
+        if 'ref' in schema:
+            core_ref = CoreRef(schema['ref'])  # type: ignore[typeddict-item]
+            core_mode_ref = (core_ref, self.mode)
+            if core_mode_ref in self.core_to_defs_refs and self.core_to_defs_refs[core_mode_ref] in self.definitions:
+                return {'$ref': self.core_to_json_refs[core_mode_ref]}
+
+        # Generate the JSON schema, accounting for the json_schema_override and core_schema_override
+        metadata_handler = _core_metadata.CoreMetadataHandler(schema)
+
+        def populate_defs(core_schema: CoreSchema, json_schema: JsonSchemaValue) -> JsonSchemaValue:
+            if 'ref' in core_schema:
+                core_ref = CoreRef(core_schema['ref'])  # type: ignore[typeddict-item]
+                defs_ref, ref_json_schema = self.get_cache_defs_ref_schema(core_ref)
+                json_ref = JsonRef(ref_json_schema['$ref'])
+                self.json_to_defs_refs[json_ref] = defs_ref
+                # Replace the schema if it's not a reference to itself
+                # What we want to avoid is having the def be just a ref to itself
+                # which is what would happen if we blindly assigned any
+                if json_schema.get('$ref', None) != json_ref:
+                    self.definitions[defs_ref] = json_schema
+                    self._core_defs_invalid_for_json_schema.pop(defs_ref, None)
+                json_schema = ref_json_schema
+            return json_schema
+
+        def convert_to_all_of(json_schema: JsonSchemaValue) -> JsonSchemaValue:
+            if '$ref' in json_schema and len(json_schema.keys()) > 1:
+                # technically you can't have any other keys next to a "$ref"
+                # but it's an easy mistake to make and not hard to correct automatically here
+                json_schema = json_schema.copy()
+                ref = json_schema.pop('$ref')
+                json_schema = {'allOf': [{'$ref': ref}], **json_schema}
+            return json_schema
+
+        def handler_func(schema_or_field: CoreSchemaOrField) -> JsonSchemaValue:
+            """Generate a JSON schema based on the input schema.
+
+            Args:
+                schema_or_field: The core schema to generate a JSON schema from.
+
+            Returns:
+                The generated JSON schema.
+
+            Raises:
+                TypeError: If an unexpected schema type is encountered.
+            """
+            # Generate the core-schema-type-specific bits of the schema generation:
+            json_schema: JsonSchemaValue | None = None
+            if self.mode == 'serialization' and 'serialization' in schema_or_field:
+                ser_schema = schema_or_field['serialization']  # type: ignore
+                json_schema = self.ser_schema(ser_schema)
+            if json_schema is None:
+                if _core_utils.is_core_schema(schema_or_field) or _core_utils.is_core_schema_field(schema_or_field):
+                    generate_for_schema_type = self._schema_type_to_method[schema_or_field['type']]
+                    json_schema = generate_for_schema_type(schema_or_field)
+                else:
+                    raise TypeError(f'Unexpected schema type: schema={schema_or_field}')
+            if _core_utils.is_core_schema(schema_or_field):
+                json_schema = populate_defs(schema_or_field, json_schema)
+                json_schema = convert_to_all_of(json_schema)
+            return json_schema
+
+        current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, handler_func)
+
+        for js_modify_function in metadata_handler.metadata.get('pydantic_js_functions', ()):
+
+            def new_handler_func(
+                schema_or_field: CoreSchemaOrField,
+                current_handler: GetJsonSchemaHandler = current_handler,
+                js_modify_function: GetJsonSchemaFunction = js_modify_function,
+            ) -> JsonSchemaValue:
+                json_schema = js_modify_function(schema_or_field, current_handler)
+                if _core_utils.is_core_schema(schema_or_field):
+                    json_schema = populate_defs(schema_or_field, json_schema)
+                original_schema = current_handler.resolve_ref_schema(json_schema)
+                ref = json_schema.pop('$ref', None)
+                if ref and json_schema:
+                    original_schema.update(json_schema)
+                return original_schema
+
+            current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func)
+
+        for js_modify_function in metadata_handler.metadata.get('pydantic_js_annotation_functions', ()):
+
+            def new_handler_func(
+                schema_or_field: CoreSchemaOrField,
+                current_handler: GetJsonSchemaHandler = current_handler,
+                js_modify_function: GetJsonSchemaFunction = js_modify_function,
+            ) -> JsonSchemaValue:
+                json_schema = js_modify_function(schema_or_field, current_handler)
+                if _core_utils.is_core_schema(schema_or_field):
+                    json_schema = populate_defs(schema_or_field, json_schema)
+                    json_schema = convert_to_all_of(json_schema)
+                return json_schema
+
+            current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func)
+
+        json_schema = current_handler(schema)
+        if _core_utils.is_core_schema(schema):
+            json_schema = populate_defs(schema, json_schema)
+            json_schema = convert_to_all_of(json_schema)
+        return json_schema

-    def any_schema(self, schema: core_schema.AnySchema) ->JsonSchemaValue:
+    # ### Schema generation methods
+    def any_schema(self, schema: core_schema.AnySchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches any value.

         Args:
@@ -256,9 +566,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {}

-    def none_schema(self, schema: core_schema.NoneSchema) ->JsonSchemaValue:
+    def none_schema(self, schema: core_schema.NoneSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches `None`.

         Args:
@@ -267,9 +577,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {'type': 'null'}

-    def bool_schema(self, schema: core_schema.BoolSchema) ->JsonSchemaValue:
+    def bool_schema(self, schema: core_schema.BoolSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a bool value.

         Args:
@@ -278,9 +588,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {'type': 'boolean'}

-    def int_schema(self, schema: core_schema.IntSchema) ->JsonSchemaValue:
+    def int_schema(self, schema: core_schema.IntSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches an int value.

         Args:
@@ -289,9 +599,12 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema: dict[str, Any] = {'type': 'integer'}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric)
+        json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}}
+        return json_schema

-    def float_schema(self, schema: core_schema.FloatSchema) ->JsonSchemaValue:
+    def float_schema(self, schema: core_schema.FloatSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a float value.

         Args:
@@ -300,10 +613,12 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema: dict[str, Any] = {'type': 'number'}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric)
+        json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}}
+        return json_schema

-    def decimal_schema(self, schema: core_schema.DecimalSchema
-        ) ->JsonSchemaValue:
+    def decimal_schema(self, schema: core_schema.DecimalSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a decimal value.

         Args:
@@ -312,9 +627,31 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = self.str_schema(core_schema.str_schema())
+        if self.mode == 'validation':
+            multiple_of = schema.get('multiple_of')
+            le = schema.get('le')
+            ge = schema.get('ge')
+            lt = schema.get('lt')
+            gt = schema.get('gt')
+            json_schema = {
+                'anyOf': [
+                    self.float_schema(
+                        core_schema.float_schema(
+                            allow_inf_nan=schema.get('allow_inf_nan'),
+                            multiple_of=None if multiple_of is None else float(multiple_of),
+                            le=None if le is None else float(le),
+                            ge=None if ge is None else float(ge),
+                            lt=None if lt is None else float(lt),
+                            gt=None if gt is None else float(gt),
+                        )
+                    ),
+                    json_schema,
+                ],
+            }
+        return json_schema

-    def str_schema(self, schema: core_schema.StringSchema) ->JsonSchemaValue:
+    def str_schema(self, schema: core_schema.StringSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a string value.

         Args:
@@ -323,9 +660,14 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = {'type': 'string'}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.string)
+        if isinstance(json_schema.get('pattern'), Pattern):
+            # TODO: should we add regex flags to the pattern?
+            json_schema['pattern'] = json_schema.get('pattern').pattern  # type: ignore
+        return json_schema

-    def bytes_schema(self, schema: core_schema.BytesSchema) ->JsonSchemaValue:
+    def bytes_schema(self, schema: core_schema.BytesSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a bytes value.

         Args:
@@ -334,9 +676,11 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = {'type': 'string', 'format': 'base64url' if self._config.ser_json_bytes == 'base64' else 'binary'}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.bytes)
+        return json_schema

-    def date_schema(self, schema: core_schema.DateSchema) ->JsonSchemaValue:
+    def date_schema(self, schema: core_schema.DateSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a date value.

         Args:
@@ -345,9 +689,11 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = {'type': 'string', 'format': 'date'}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.date)
+        return json_schema

-    def time_schema(self, schema: core_schema.TimeSchema) ->JsonSchemaValue:
+    def time_schema(self, schema: core_schema.TimeSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a time value.

         Args:
@@ -356,10 +702,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {'type': 'string', 'format': 'time'}

-    def datetime_schema(self, schema: core_schema.DatetimeSchema
-        ) ->JsonSchemaValue:
+    def datetime_schema(self, schema: core_schema.DatetimeSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a datetime value.

         Args:
@@ -368,10 +713,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {'type': 'string', 'format': 'date-time'}

-    def timedelta_schema(self, schema: core_schema.TimedeltaSchema
-        ) ->JsonSchemaValue:
+    def timedelta_schema(self, schema: core_schema.TimedeltaSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a timedelta value.

         Args:
@@ -380,10 +724,11 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        if self._config.ser_json_timedelta == 'float':
+            return {'type': 'number'}
+        return {'type': 'string', 'format': 'duration'}

-    def literal_schema(self, schema: core_schema.LiteralSchema
-        ) ->JsonSchemaValue:
+    def literal_schema(self, schema: core_schema.LiteralSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a literal value.

         Args:
@@ -392,9 +737,30 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
-
-    def enum_schema(self, schema: core_schema.EnumSchema) ->JsonSchemaValue:
+        expected = [v.value if isinstance(v, Enum) else v for v in schema['expected']]
+        # jsonify the expected values
+        expected = [to_jsonable_python(v) for v in expected]
+
+        result: dict[str, Any] = {'enum': expected}
+        if len(expected) == 1:
+            result['const'] = expected[0]
+
+        types = {type(e) for e in expected}
+        if types == {str}:
+            result['type'] = 'string'
+        elif types == {int}:
+            result['type'] = 'integer'
+        elif types == {float}:
+            result['type'] = 'numeric'
+        elif types == {bool}:
+            result['type'] = 'boolean'
+        elif types == {list}:
+            result['type'] = 'array'
+        elif types == {type(None)}:
+            result['type'] = 'null'
+        return result
+
+    def enum_schema(self, schema: core_schema.EnumSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches an Enum value.

         Args:
@@ -403,10 +769,36 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
-
-    def is_instance_schema(self, schema: core_schema.IsInstanceSchema
-        ) ->JsonSchemaValue:
+        enum_type = schema['cls']
+        description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__)
+        if (
+            description == 'An enumeration.'
+        ):  # This is the default value provided by enum.EnumMeta.__new__; don't use it
+            description = None
+        result: dict[str, Any] = {'title': enum_type.__name__, 'description': description}
+        result = {k: v for k, v in result.items() if v is not None}
+
+        expected = [to_jsonable_python(v.value) for v in schema['members']]
+
+        result['enum'] = expected
+        if len(expected) == 1:
+            result['const'] = expected[0]
+
+        types = {type(e) for e in expected}
+        if isinstance(enum_type, str) or types == {str}:
+            result['type'] = 'string'
+        elif isinstance(enum_type, int) or types == {int}:
+            result['type'] = 'integer'
+        elif isinstance(enum_type, float) or types == {float}:
+            result['type'] = 'numeric'
+        elif types == {bool}:
+            result['type'] = 'boolean'
+        elif types == {list}:
+            result['type'] = 'array'
+
+        return result
+
+    def is_instance_schema(self, schema: core_schema.IsInstanceSchema) -> JsonSchemaValue:
         """Handles JSON schema generation for a core schema that checks if a value is an instance of a class.

         Unless overridden in a subclass, this raises an error.
@@ -417,10 +809,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.handle_invalid_for_json_schema(schema, f'core_schema.IsInstanceSchema ({schema["cls"]})')

-    def is_subclass_schema(self, schema: core_schema.IsSubclassSchema
-        ) ->JsonSchemaValue:
+    def is_subclass_schema(self, schema: core_schema.IsSubclassSchema) -> JsonSchemaValue:
         """Handles JSON schema generation for a core schema that checks if a value is a subclass of a class.

         For backwards compatibility with v1, this does not raise an error, but can be overridden to change this.
@@ -431,10 +822,10 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        # Note: This is for compatibility with V1; you can override if you want different behavior.
+        return {}

-    def callable_schema(self, schema: core_schema.CallableSchema
-        ) ->JsonSchemaValue:
+    def callable_schema(self, schema: core_schema.CallableSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a callable value.

         Unless overridden in a subclass, this raises an error.
@@ -445,9 +836,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.handle_invalid_for_json_schema(schema, 'core_schema.CallableSchema')

-    def list_schema(self, schema: core_schema.ListSchema) ->JsonSchemaValue:
+    def list_schema(self, schema: core_schema.ListSchema) -> JsonSchemaValue:
         """Returns a schema that matches a list schema.

         Args:
@@ -456,27 +847,34 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema'])
+        json_schema = {'type': 'array', 'items': items_schema}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.array)
+        return json_schema

-    @deprecated(
-        '`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.',
-        category=None)
+    @deprecated('`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.', category=None)
     @final
-    def tuple_positional_schema(self, schema: core_schema.TupleSchema
-        ) ->JsonSchemaValue:
+    def tuple_positional_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue:
         """Replaced by `tuple_schema`."""
-        pass
-
-    @deprecated(
-        '`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.',
-        category=None)
+        warnings.warn(
+            '`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.',
+            PydanticDeprecatedSince26,
+            stacklevel=2,
+        )
+        return self.tuple_schema(schema)
+
+    @deprecated('`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.', category=None)
     @final
-    def tuple_variable_schema(self, schema: core_schema.TupleSchema
-        ) ->JsonSchemaValue:
+    def tuple_variable_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue:
         """Replaced by `tuple_schema`."""
-        pass
-
-    def tuple_schema(self, schema: core_schema.TupleSchema) ->JsonSchemaValue:
+        warnings.warn(
+            '`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.',
+            PydanticDeprecatedSince26,
+            stacklevel=2,
+        )
+        return self.tuple_schema(schema)
+
+    def tuple_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a tuple schema e.g. `Tuple[int,
         str, bool]` or `Tuple[int, ...]`.

@@ -486,9 +884,32 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema: JsonSchemaValue = {'type': 'array'}
+        if 'variadic_item_index' in schema:
+            variadic_item_index = schema['variadic_item_index']
+            if variadic_item_index > 0:
+                json_schema['minItems'] = variadic_item_index
+                json_schema['prefixItems'] = [
+                    self.generate_inner(item) for item in schema['items_schema'][:variadic_item_index]
+                ]
+            if variadic_item_index + 1 == len(schema['items_schema']):
+                # if the variadic item is the last item, then represent it faithfully
+                json_schema['items'] = self.generate_inner(schema['items_schema'][variadic_item_index])
+            else:
+                # otherwise, 'items' represents the schema for the variadic
+                # item plus the suffix, so just allow anything for simplicity
+                # for now
+                json_schema['items'] = True
+        else:
+            prefixItems = [self.generate_inner(item) for item in schema['items_schema']]
+            if prefixItems:
+                json_schema['prefixItems'] = prefixItems
+            json_schema['minItems'] = len(prefixItems)
+            json_schema['maxItems'] = len(prefixItems)
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.array)
+        return json_schema

-    def set_schema(self, schema: core_schema.SetSchema) ->JsonSchemaValue:
+    def set_schema(self, schema: core_schema.SetSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a set schema.

         Args:
@@ -497,10 +918,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._common_set_schema(schema)

-    def frozenset_schema(self, schema: core_schema.FrozenSetSchema
-        ) ->JsonSchemaValue:
+    def frozenset_schema(self, schema: core_schema.FrozenSetSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a frozenset schema.

         Args:
@@ -509,10 +929,15 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._common_set_schema(schema)
+
+    def _common_set_schema(self, schema: core_schema.SetSchema | core_schema.FrozenSetSchema) -> JsonSchemaValue:
+        items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema'])
+        json_schema = {'type': 'array', 'uniqueItems': True, 'items': items_schema}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.array)
+        return json_schema

-    def generator_schema(self, schema: core_schema.GeneratorSchema
-        ) ->JsonSchemaValue:
+    def generator_schema(self, schema: core_schema.GeneratorSchema) -> JsonSchemaValue:
         """Returns a JSON schema that represents the provided GeneratorSchema.

         Args:
@@ -521,9 +946,12 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema'])
+        json_schema = {'type': 'array', 'items': items_schema}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.array)
+        return json_schema

-    def dict_schema(self, schema: core_schema.DictSchema) ->JsonSchemaValue:
+    def dict_schema(self, schema: core_schema.DictSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a dict schema.

         Args:
@@ -532,10 +960,37 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema: JsonSchemaValue = {'type': 'object'}
+
+        keys_schema = self.generate_inner(schema['keys_schema']).copy() if 'keys_schema' in schema else {}
+        keys_pattern = keys_schema.pop('pattern', None)
+
+        values_schema = self.generate_inner(schema['values_schema']).copy() if 'values_schema' in schema else {}
+        values_schema.pop('title', None)  # don't give a title to the additionalProperties
+        if values_schema or keys_pattern is not None:  # don't add additionalProperties if it's empty
+            if keys_pattern is None:
+                json_schema['additionalProperties'] = values_schema
+            else:
+                json_schema['patternProperties'] = {keys_pattern: values_schema}
+
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.object)
+        return json_schema

-    def function_before_schema(self, schema: core_schema.
-        BeforeValidatorFunctionSchema) ->JsonSchemaValue:
+    def _function_schema(
+        self,
+        schema: _core_utils.AnyFunctionSchema,
+    ) -> JsonSchemaValue:
+        if _core_utils.is_function_with_inner_schema(schema):
+            # This could be wrong if the function's mode is 'before', but in practice will often be right, and when it
+            # isn't, I think it would be hard to automatically infer what the desired schema should be.
+            return self.generate_inner(schema['schema'])
+
+        # function-plain
+        return self.handle_invalid_for_json_schema(
+            schema, f'core_schema.PlainValidatorFunctionSchema ({schema["function"]})'
+        )
+
+    def function_before_schema(self, schema: core_schema.BeforeValidatorFunctionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a function-before schema.

         Args:
@@ -544,10 +999,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._function_schema(schema)

-    def function_after_schema(self, schema: core_schema.
-        AfterValidatorFunctionSchema) ->JsonSchemaValue:
+    def function_after_schema(self, schema: core_schema.AfterValidatorFunctionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a function-after schema.

         Args:
@@ -556,10 +1010,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._function_schema(schema)

-    def function_plain_schema(self, schema: core_schema.
-        PlainValidatorFunctionSchema) ->JsonSchemaValue:
+    def function_plain_schema(self, schema: core_schema.PlainValidatorFunctionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a function-plain schema.

         Args:
@@ -568,10 +1021,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._function_schema(schema)

-    def function_wrap_schema(self, schema: core_schema.
-        WrapValidatorFunctionSchema) ->JsonSchemaValue:
+    def function_wrap_schema(self, schema: core_schema.WrapValidatorFunctionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a function-wrap schema.

         Args:
@@ -580,10 +1032,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self._function_schema(schema)

-    def default_schema(self, schema: core_schema.WithDefaultSchema
-        ) ->JsonSchemaValue:
+    def default_schema(self, schema: core_schema.WithDefaultSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema with a default value.

         Args:
@@ -592,10 +1043,51 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = self.generate_inner(schema['schema'])
+
+        if 'default' not in schema:
+            return json_schema
+        default = schema['default']
+        # Note: if you want to include the value returned by the default_factory,
+        # override this method and replace the code above with:
+        # if 'default' in schema:
+        #     default = schema['default']
+        # elif 'default_factory' in schema:
+        #     default = schema['default_factory']()
+        # else:
+        #     return json_schema
+
+        # we reflect the application of custom plain, no-info serializers to defaults for
+        # json schemas viewed in serialization mode
+        # TODO: improvements along with https://github.com/pydantic/pydantic/issues/8208
+        # TODO: improve type safety here
+        if self.mode == 'serialization':
+            if (
+                (ser_schema := schema['schema'].get('serialization', {}))
+                and (ser_func := ser_schema.get('function'))
+                and ser_schema.get('type') == 'function-plain'  # type: ignore
+                and ser_schema.get('info_arg') is False  # type: ignore
+            ):
+                default = ser_func(default)  # type: ignore
+
+        try:
+            encoded_default = self.encode_default(default)
+        except pydantic_core.PydanticSerializationError:
+            self.emit_warning(
+                'non-serializable-default',
+                f'Default value {default} is not JSON serializable; excluding default from JSON schema',
+            )
+            # Return the inner schema, as though there was no default
+            return json_schema
+
+        if '$ref' in json_schema:
+            # Since reference schemas do not support child keys, we wrap the reference schema in a single-case allOf:
+            return {'allOf': [json_schema], 'default': encoded_default}
+        else:
+            json_schema['default'] = encoded_default
+            return json_schema

-    def nullable_schema(self, schema: core_schema.NullableSchema
-        ) ->JsonSchemaValue:
+    def nullable_schema(self, schema: core_schema.NullableSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that allows null values.

         Args:
@@ -604,9 +1096,17 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        null_schema = {'type': 'null'}
+        inner_json_schema = self.generate_inner(schema['schema'])

-    def union_schema(self, schema: core_schema.UnionSchema) ->JsonSchemaValue:
+        if inner_json_schema == null_schema:
+            return null_schema
+        else:
+            # Thanks to the equality check against `null_schema` above, I think 'oneOf' would also be valid here;
+            # I'll use 'anyOf' for now, but it could be changed it if it would work better with some external tooling
+            return self.get_flattened_anyof([inner_json_schema, null_schema])
+
+    def union_schema(self, schema: core_schema.UnionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that allows values matching any of the given schemas.

         Args:
@@ -615,10 +1115,23 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
-
-    def tagged_union_schema(self, schema: core_schema.TaggedUnionSchema
-        ) ->JsonSchemaValue:
+        generated: list[JsonSchemaValue] = []
+
+        choices = schema['choices']
+        for choice in choices:
+            # choice will be a tuple if an explicit label was provided
+            choice_schema = choice[0] if isinstance(choice, tuple) else choice
+            try:
+                generated.append(self.generate_inner(choice_schema))
+            except PydanticOmit:
+                continue
+            except PydanticInvalidForJsonSchema as exc:
+                self.emit_warning('skipped-choice', exc.message)
+        if len(generated) == 1:
+            return generated[0]
+        return self.get_flattened_anyof(generated)
+
+    def tagged_union_schema(self, schema: core_schema.TaggedUnionSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that allows values matching any of the given schemas, where
         the schemas are tagged with a discriminator field that indicates which schema should be used to validate
         the value.
@@ -629,15 +1142,73 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        generated: dict[str, JsonSchemaValue] = {}
+        for k, v in schema['choices'].items():
+            if isinstance(k, Enum):
+                k = k.value
+            try:
+                # Use str(k) since keys must be strings for json; while not technically correct,
+                # it's the closest that can be represented in valid JSON
+                generated[str(k)] = self.generate_inner(v).copy()
+            except PydanticOmit:
+                continue
+            except PydanticInvalidForJsonSchema as exc:
+                self.emit_warning('skipped-choice', exc.message)
+
+        one_of_choices = _deduplicate_schemas(generated.values())
+        json_schema: JsonSchemaValue = {'oneOf': one_of_choices}
+
+        # This reflects the v1 behavior; TODO: we should make it possible to exclude OpenAPI stuff from the JSON schema
+        openapi_discriminator = self._extract_discriminator(schema, one_of_choices)
+        if openapi_discriminator is not None:
+            json_schema['discriminator'] = {
+                'propertyName': openapi_discriminator,
+                'mapping': {k: v.get('$ref', v) for k, v in generated.items()},
+            }
+
+        return json_schema

-    def _extract_discriminator(self, schema: core_schema.TaggedUnionSchema,
-        one_of_choices: list[JsonDict]) ->(str | None):
+    def _extract_discriminator(
+        self, schema: core_schema.TaggedUnionSchema, one_of_choices: list[JsonDict]
+    ) -> str | None:
         """Extract a compatible OpenAPI discriminator from the schema and one_of choices that end up in the final
         schema."""
-        pass
-
-    def chain_schema(self, schema: core_schema.ChainSchema) ->JsonSchemaValue:
+        openapi_discriminator: str | None = None
+
+        if isinstance(schema['discriminator'], str):
+            return schema['discriminator']
+
+        if isinstance(schema['discriminator'], list):
+            # If the discriminator is a single item list containing a string, that is equivalent to the string case
+            if len(schema['discriminator']) == 1 and isinstance(schema['discriminator'][0], str):
+                return schema['discriminator'][0]
+            # When an alias is used that is different from the field name, the discriminator will be a list of single
+            # str lists, one for the attribute and one for the actual alias. The logic here will work even if there is
+            # more than one possible attribute, and looks for whether a single alias choice is present as a documented
+            # property on all choices. If so, that property will be used as the OpenAPI discriminator.
+            for alias_path in schema['discriminator']:
+                if not isinstance(alias_path, list):
+                    break  # this means that the discriminator is not a list of alias paths
+                if len(alias_path) != 1:
+                    continue  # this means that the "alias" does not represent a single field
+                alias = alias_path[0]
+                if not isinstance(alias, str):
+                    continue  # this means that the "alias" does not represent a field
+                alias_is_present_on_all_choices = True
+                for choice in one_of_choices:
+                    while '$ref' in choice:
+                        assert isinstance(choice['$ref'], str)
+                        choice = self.get_schema_from_definitions(JsonRef(choice['$ref'])) or {}
+                    properties = choice.get('properties', {})
+                    if not isinstance(properties, dict) or alias not in properties:
+                        alias_is_present_on_all_choices = False
+                        break
+                if alias_is_present_on_all_choices:
+                    openapi_discriminator = alias
+                    break
+        return openapi_discriminator
+
+    def chain_schema(self, schema: core_schema.ChainSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a core_schema.ChainSchema.

         When generating a schema for validation, we return the validation JSON schema for the first step in the chain.
@@ -649,10 +1220,10 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        step_index = 0 if self.mode == 'validation' else -1  # use first step for validation, last for serialization
+        return self.generate_inner(schema['steps'][step_index])

-    def lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema
-        ) ->JsonSchemaValue:
+    def lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that allows values matching either the lax schema or the
         strict schema.

@@ -662,10 +1233,16 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        # TODO: Need to read the default value off of model config or whatever
+        use_strict = schema.get('strict', False)  # TODO: replace this default False
+        # If your JSON schema fails to generate it is probably
+        # because one of the following two branches failed.
+        if use_strict:
+            return self.generate_inner(schema['strict_schema'])
+        else:
+            return self.generate_inner(schema['lax_schema'])

-    def json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema
-        ) ->JsonSchemaValue:
+    def json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that allows values matching either the JSON schema or the
         Python schema.

@@ -678,10 +1255,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['json_schema'])

-    def typed_dict_schema(self, schema: core_schema.TypedDictSchema
-        ) ->JsonSchemaValue:
+    def typed_dict_schema(self, schema: core_schema.TypedDictSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a typed dict.

         Args:
@@ -690,10 +1266,88 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        total = schema.get('total', True)
+        named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [
+            (name, self.field_is_required(field, total), field)
+            for name, field in schema['fields'].items()
+            if self.field_is_present(field)
+        ]
+        if self.mode == 'serialization':
+            named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', [])))
+        cls = _get_typed_dict_cls(schema)
+        config = _get_typed_dict_config(cls)
+        with self._config_wrapper_stack.push(config):
+            json_schema = self._named_required_fields_schema(named_required_fields)
+
+        json_schema_extra = config.get('json_schema_extra')
+        extra = schema.get('extra_behavior')
+        if extra is None:
+            extra = config.get('extra', 'ignore')
+
+        if cls is not None:
+            title = config.get('title') or cls.__name__
+            json_schema = self._update_class_schema(json_schema, title, extra, cls, json_schema_extra)
+        else:
+            if extra == 'forbid':
+                json_schema['additionalProperties'] = False
+            elif extra == 'allow':
+                json_schema['additionalProperties'] = True
+
+        return json_schema

-    def typed_dict_field_schema(self, schema: core_schema.TypedDictField
-        ) ->JsonSchemaValue:
+    @staticmethod
+    def _name_required_computed_fields(
+        computed_fields: list[ComputedField],
+    ) -> list[tuple[str, bool, core_schema.ComputedField]]:
+        return [(field['property_name'], True, field) for field in computed_fields]
+
+    def _named_required_fields_schema(
+        self, named_required_fields: Sequence[tuple[str, bool, CoreSchemaField]]
+    ) -> JsonSchemaValue:
+        properties: dict[str, JsonSchemaValue] = {}
+        required_fields: list[str] = []
+        for name, required, field in named_required_fields:
+            if self.by_alias:
+                name = self._get_alias_name(field, name)
+            try:
+                field_json_schema = self.generate_inner(field).copy()
+            except PydanticOmit:
+                continue
+            if 'title' not in field_json_schema and self.field_title_should_be_set(field):
+                title = self.get_title_from_name(name)
+                field_json_schema['title'] = title
+            field_json_schema = self.handle_ref_overrides(field_json_schema)
+            properties[name] = field_json_schema
+            if required:
+                required_fields.append(name)
+
+        json_schema = {'type': 'object', 'properties': properties}
+        if required_fields:
+            json_schema['required'] = required_fields
+        return json_schema
+
+    def _get_alias_name(self, field: CoreSchemaField, name: str) -> str:
+        if field['type'] == 'computed-field':
+            alias: Any = field.get('alias', name)
+        elif self.mode == 'validation':
+            alias = field.get('validation_alias', name)
+        else:
+            alias = field.get('serialization_alias', name)
+        if isinstance(alias, str):
+            name = alias
+        elif isinstance(alias, list):
+            alias = cast('list[str] | str', alias)
+            for path in alias:
+                if isinstance(path, list) and len(path) == 1 and isinstance(path[0], str):
+                    # Use the first valid single-item string path; the code that constructs the alias array
+                    # should ensure the first such item is what belongs in the JSON schema
+                    name = path[0]
+                    break
+        else:
+            assert_never(alias)
+        return name
+
+    def typed_dict_field_schema(self, schema: core_schema.TypedDictField) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a typed dict field.

         Args:
@@ -702,10 +1356,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['schema'])

-    def dataclass_field_schema(self, schema: core_schema.DataclassField
-        ) ->JsonSchemaValue:
+    def dataclass_field_schema(self, schema: core_schema.DataclassField) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a dataclass field.

         Args:
@@ -714,10 +1367,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['schema'])

-    def model_field_schema(self, schema: core_schema.ModelField
-        ) ->JsonSchemaValue:
+    def model_field_schema(self, schema: core_schema.ModelField) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a model field.

         Args:
@@ -726,10 +1378,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['schema'])

-    def computed_field_schema(self, schema: core_schema.ComputedField
-        ) ->JsonSchemaValue:
+    def computed_field_schema(self, schema: core_schema.ComputedField) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a computed field.

         Args:
@@ -738,9 +1389,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['return_schema'])

-    def model_schema(self, schema: core_schema.ModelSchema) ->JsonSchemaValue:
+    def model_schema(self, schema: core_schema.ModelSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a model.

         Args:
@@ -749,10 +1400,75 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        # We do not use schema['model'].model_json_schema() here
+        # because it could lead to inconsistent refs handling, etc.
+        cls = cast('type[BaseModel]', schema['cls'])
+        config = cls.model_config
+        title = config.get('title')
+
+        with self._config_wrapper_stack.push(config):
+            json_schema = self.generate_inner(schema['schema'])
+
+        json_schema_extra = config.get('json_schema_extra')
+        if cls.__pydantic_root_model__:
+            root_json_schema_extra = cls.model_fields['root'].json_schema_extra
+            if json_schema_extra and root_json_schema_extra:
+                raise ValueError(
+                    '"model_config[\'json_schema_extra\']" and "Field.json_schema_extra" on "RootModel.root"'
+                    ' field must not be set simultaneously'
+                )
+            if root_json_schema_extra:
+                json_schema_extra = root_json_schema_extra
+
+        json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra)
+
+        return json_schema
+
+    def _update_class_schema(
+        self,
+        json_schema: JsonSchemaValue,
+        title: str | None,
+        extra: Literal['allow', 'ignore', 'forbid'] | None,
+        cls: type[Any],
+        json_schema_extra: JsonDict | JsonSchemaExtraCallable | None,
+    ) -> JsonSchemaValue:
+        if '$ref' in json_schema:
+            schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) or json_schema
+        else:
+            schema_to_update = json_schema
+
+        if title is not None:
+            # referenced_schema['title'] = title
+            schema_to_update.setdefault('title', title)
+
+        if 'additionalProperties' not in schema_to_update:
+            if extra == 'allow':
+                schema_to_update['additionalProperties'] = True
+            elif extra == 'forbid':
+                schema_to_update['additionalProperties'] = False
+
+        if isinstance(json_schema_extra, (staticmethod, classmethod)):
+            # In older versions of python, this is necessary to ensure staticmethod/classmethods are callable
+            json_schema_extra = json_schema_extra.__get__(cls)
+
+        if isinstance(json_schema_extra, dict):
+            schema_to_update.update(json_schema_extra)
+        elif callable(json_schema_extra):
+            if len(inspect.signature(json_schema_extra).parameters) > 1:
+                json_schema_extra(schema_to_update, cls)  # type: ignore
+            else:
+                json_schema_extra(schema_to_update)  # type: ignore
+        elif json_schema_extra is not None:
+            raise ValueError(
+                f"model_config['json_schema_extra']={json_schema_extra} should be a dict, callable, or None"
+            )
+
+        if hasattr(cls, '__deprecated__'):
+            json_schema['deprecated'] = True
+
+        return json_schema

-    def resolve_schema_to_update(self, json_schema: JsonSchemaValue
-        ) ->JsonSchemaValue:
+    def resolve_schema_to_update(self, json_schema: JsonSchemaValue) -> JsonSchemaValue:
         """Resolve a JsonSchemaValue to the non-ref schema if it is a $ref schema.

         Args:
@@ -761,10 +1477,16 @@ class GenerateJsonSchema:
         Returns:
             The resolved schema.
         """
-        pass
+        if '$ref' in json_schema:
+            schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref']))
+            if schema_to_update is None:
+                raise RuntimeError(f'Cannot update undefined schema for $ref={json_schema["$ref"]}')
+            return self.resolve_schema_to_update(schema_to_update)
+        else:
+            schema_to_update = json_schema
+        return schema_to_update

-    def model_fields_schema(self, schema: core_schema.ModelFieldsSchema
-        ) ->JsonSchemaValue:
+    def model_fields_schema(self, schema: core_schema.ModelFieldsSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a model's fields.

         Args:
@@ -773,9 +1495,21 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [
+            (name, self.field_is_required(field, total=True), field)
+            for name, field in schema['fields'].items()
+            if self.field_is_present(field)
+        ]
+        if self.mode == 'serialization':
+            named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', [])))
+        json_schema = self._named_required_fields_schema(named_required_fields)
+        extras_schema = schema.get('extras_schema', None)
+        if extras_schema is not None:
+            schema_to_update = self.resolve_schema_to_update(json_schema)
+            schema_to_update['additionalProperties'] = self.generate_inner(extras_schema)
+        return json_schema

-    def field_is_present(self, field: CoreSchemaField) ->bool:
+    def field_is_present(self, field: CoreSchemaField) -> bool:
         """Whether the field should be included in the generated JSON schema.

         Args:
@@ -784,11 +1518,20 @@ class GenerateJsonSchema:
         Returns:
             `True` if the field should be included in the generated JSON schema, `False` otherwise.
         """
-        pass
+        if self.mode == 'serialization':
+            # If you still want to include the field in the generated JSON schema,
+            # override this method and return True
+            return not field.get('serialization_exclude')
+        elif self.mode == 'validation':
+            return True
+        else:
+            assert_never(self.mode)

-    def field_is_required(self, field: (core_schema.ModelField |
-        core_schema.DataclassField | core_schema.TypedDictField), total: bool
-        ) ->bool:
+    def field_is_required(
+        self,
+        field: core_schema.ModelField | core_schema.DataclassField | core_schema.TypedDictField,
+        total: bool,
+    ) -> bool:
         """Whether the field should be marked as required in the generated JSON schema.
         (Note that this is irrelevant if the field is not present in the JSON schema.).

@@ -801,10 +1544,15 @@ class GenerateJsonSchema:
         Returns:
             `True` if the field should be marked as required in the generated JSON schema, `False` otherwise.
         """
-        pass
+        if self.mode == 'serialization' and self._config.json_schema_serialization_defaults_required:
+            return not field.get('serialization_exclude')
+        else:
+            if field['type'] == 'typed-dict-field':
+                return field.get('required', total)
+            else:
+                return field['schema']['type'] != 'default'

-    def dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema
-        ) ->JsonSchemaValue:
+    def dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a dataclass's constructor arguments.

         Args:
@@ -813,10 +1561,16 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [
+            (field['name'], self.field_is_required(field, total=True), field)
+            for field in schema['fields']
+            if self.field_is_present(field)
+        ]
+        if self.mode == 'serialization':
+            named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', [])))
+        return self._named_required_fields_schema(named_required_fields)

-    def dataclass_schema(self, schema: core_schema.DataclassSchema
-        ) ->JsonSchemaValue:
+    def dataclass_schema(self, schema: core_schema.DataclassSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a dataclass.

         Args:
@@ -825,10 +1579,28 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        cls = schema['cls']
+        config: ConfigDict = getattr(cls, '__pydantic_config__', cast('ConfigDict', {}))
+        title = config.get('title') or cls.__name__
+
+        with self._config_wrapper_stack.push(config):
+            json_schema = self.generate_inner(schema['schema']).copy()
+
+        json_schema_extra = config.get('json_schema_extra')
+        json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra)
+
+        # Dataclass-specific handling of description
+        if is_dataclass(cls) and not hasattr(cls, '__pydantic_validator__'):
+            # vanilla dataclass; don't use cls.__doc__ as it will contain the class signature by default
+            description = None
+        else:
+            description = None if cls.__doc__ is None else inspect.cleandoc(cls.__doc__)
+        if description:
+            json_schema['description'] = description
+
+        return json_schema

-    def arguments_schema(self, schema: core_schema.ArgumentsSchema
-        ) ->JsonSchemaValue:
+    def arguments_schema(self, schema: core_schema.ArgumentsSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a function's arguments.

         Args:
@@ -837,11 +1609,37 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
-
-    def kw_arguments_schema(self, arguments: list[core_schema.
-        ArgumentsParameter], var_kwargs_schema: (CoreSchema | None)
-        ) ->JsonSchemaValue:
+        metadata = _core_metadata.CoreMetadataHandler(schema).metadata
+        prefer_positional = metadata.get('pydantic_js_prefer_positional_arguments')
+
+        arguments = schema['arguments_schema']
+        kw_only_arguments = [a for a in arguments if a.get('mode') == 'keyword_only']
+        kw_or_p_arguments = [a for a in arguments if a.get('mode') in {'positional_or_keyword', None}]
+        p_only_arguments = [a for a in arguments if a.get('mode') == 'positional_only']
+        var_args_schema = schema.get('var_args_schema')
+        var_kwargs_schema = schema.get('var_kwargs_schema')
+
+        if prefer_positional:
+            positional_possible = not kw_only_arguments and not var_kwargs_schema
+            if positional_possible:
+                return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema)
+
+        keyword_possible = not p_only_arguments and not var_args_schema
+        if keyword_possible:
+            return self.kw_arguments_schema(kw_or_p_arguments + kw_only_arguments, var_kwargs_schema)
+
+        if not prefer_positional:
+            positional_possible = not kw_only_arguments and not var_kwargs_schema
+            if positional_possible:
+                return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema)
+
+        raise PydanticInvalidForJsonSchema(
+            'Unable to generate JSON schema for arguments validator with positional-only and keyword-only arguments'
+        )
+
+    def kw_arguments_schema(
+        self, arguments: list[core_schema.ArgumentsParameter], var_kwargs_schema: CoreSchema | None
+    ) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a function's keyword arguments.

         Args:
@@ -850,11 +1648,35 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        properties: dict[str, JsonSchemaValue] = {}
+        required: list[str] = []
+        for argument in arguments:
+            name = self.get_argument_name(argument)
+            argument_schema = self.generate_inner(argument['schema']).copy()
+            argument_schema['title'] = self.get_title_from_name(name)
+            properties[name] = argument_schema
+
+            if argument['schema']['type'] != 'default':
+                # This assumes that if the argument has a default value,
+                # the inner schema must be of type WithDefaultSchema.
+                # I believe this is true, but I am not 100% sure
+                required.append(name)
+
+        json_schema: JsonSchemaValue = {'type': 'object', 'properties': properties}
+        if required:
+            json_schema['required'] = required
+
+        if var_kwargs_schema:
+            additional_properties_schema = self.generate_inner(var_kwargs_schema)
+            if additional_properties_schema:
+                json_schema['additionalProperties'] = additional_properties_schema
+        else:
+            json_schema['additionalProperties'] = False
+        return json_schema

-    def p_arguments_schema(self, arguments: list[core_schema.
-        ArgumentsParameter], var_args_schema: (CoreSchema | None)
-        ) ->JsonSchemaValue:
+    def p_arguments_schema(
+        self, arguments: list[core_schema.ArgumentsParameter], var_args_schema: CoreSchema | None
+    ) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a function's positional arguments.

         Args:
@@ -863,10 +1685,36 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        prefix_items: list[JsonSchemaValue] = []
+        min_items = 0
+
+        for argument in arguments:
+            name = self.get_argument_name(argument)
+
+            argument_schema = self.generate_inner(argument['schema']).copy()
+            argument_schema['title'] = self.get_title_from_name(name)
+            prefix_items.append(argument_schema)
+
+            if argument['schema']['type'] != 'default':
+                # This assumes that if the argument has a default value,
+                # the inner schema must be of type WithDefaultSchema.
+                # I believe this is true, but I am not 100% sure
+                min_items += 1
+
+        json_schema: JsonSchemaValue = {'type': 'array', 'prefixItems': prefix_items}
+        if min_items:
+            json_schema['minItems'] = min_items

-    def get_argument_name(self, argument: core_schema.ArgumentsParameter
-        ) ->str:
+        if var_args_schema:
+            items_schema = self.generate_inner(var_args_schema)
+            if items_schema:
+                json_schema['items'] = items_schema
+        else:
+            json_schema['maxItems'] = len(prefix_items)
+
+        return json_schema
+
+    def get_argument_name(self, argument: core_schema.ArgumentsParameter) -> str:
         """Retrieves the name of an argument.

         Args:
@@ -875,9 +1723,16 @@ class GenerateJsonSchema:
         Returns:
             The name of the argument.
         """
-        pass
+        name = argument['name']
+        if self.by_alias:
+            alias = argument.get('alias')
+            if isinstance(alias, str):
+                name = alias
+            else:
+                pass  # might want to do something else?
+        return name

-    def call_schema(self, schema: core_schema.CallSchema) ->JsonSchemaValue:
+    def call_schema(self, schema: core_schema.CallSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a function call.

         Args:
@@ -886,10 +1741,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['arguments_schema'])

-    def custom_error_schema(self, schema: core_schema.CustomErrorSchema
-        ) ->JsonSchemaValue:
+    def custom_error_schema(self, schema: core_schema.CustomErrorSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a custom error.

         Args:
@@ -898,9 +1752,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return self.generate_inner(schema['schema'])

-    def json_schema(self, schema: core_schema.JsonSchema) ->JsonSchemaValue:
+    def json_schema(self, schema: core_schema.JsonSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a JSON object.

         Args:
@@ -909,9 +1763,15 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        content_core_schema = schema.get('schema') or core_schema.any_schema()
+        content_json_schema = self.generate_inner(content_core_schema)
+        if self.mode == 'validation':
+            return {'type': 'string', 'contentMediaType': 'application/json', 'contentSchema': content_json_schema}
+        else:
+            # self.mode == 'serialization'
+            return content_json_schema

-    def url_schema(self, schema: core_schema.UrlSchema) ->JsonSchemaValue:
+    def url_schema(self, schema: core_schema.UrlSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a URL.

         Args:
@@ -920,10 +1780,11 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        json_schema = {'type': 'string', 'format': 'uri', 'minLength': 1}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.string)
+        return json_schema

-    def multi_host_url_schema(self, schema: core_schema.MultiHostUrlSchema
-        ) ->JsonSchemaValue:
+    def multi_host_url_schema(self, schema: core_schema.MultiHostUrlSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a URL that can be used with multiple hosts.

         Args:
@@ -932,9 +1793,12 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        # Note: 'multi-host-uri' is a custom/pydantic-specific format, not part of the JSON Schema spec
+        json_schema = {'type': 'string', 'format': 'multi-host-uri', 'minLength': 1}
+        self.update_with_validations(json_schema, schema, self.ValidationsMapping.string)
+        return json_schema

-    def uuid_schema(self, schema: core_schema.UuidSchema) ->JsonSchemaValue:
+    def uuid_schema(self, schema: core_schema.UuidSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a UUID.

         Args:
@@ -943,10 +1807,9 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        return {'type': 'string', 'format': 'uuid'}

-    def definitions_schema(self, schema: core_schema.DefinitionsSchema
-        ) ->JsonSchemaValue:
+    def definitions_schema(self, schema: core_schema.DefinitionsSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that defines a JSON object with definitions.

         Args:
@@ -955,10 +1818,16 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        for definition in schema['definitions']:
+            try:
+                self.generate_inner(definition)
+            except PydanticInvalidForJsonSchema as e:
+                core_ref: CoreRef = CoreRef(definition['ref'])  # type: ignore
+                self._core_defs_invalid_for_json_schema[self.get_defs_ref((core_ref, self.mode))] = e
+                continue
+        return self.generate_inner(schema['schema'])

-    def definition_ref_schema(self, schema: core_schema.
-        DefinitionReferenceSchema) ->JsonSchemaValue:
+    def definition_ref_schema(self, schema: core_schema.DefinitionReferenceSchema) -> JsonSchemaValue:
         """Generates a JSON schema that matches a schema that references a definition.

         Args:
@@ -967,11 +1836,13 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
+        core_ref = CoreRef(schema['schema_ref'])
+        _, ref_json_schema = self.get_cache_defs_ref_schema(core_ref)
+        return ref_json_schema

-    def ser_schema(self, schema: (core_schema.SerSchema | core_schema.
-        IncExSeqSerSchema | core_schema.IncExDictSerSchema)) ->(JsonSchemaValue
-         | None):
+    def ser_schema(
+        self, schema: core_schema.SerSchema | core_schema.IncExSeqSerSchema | core_schema.IncExDictSerSchema
+    ) -> JsonSchemaValue | None:
         """Generates a JSON schema that matches a schema that defines a serialized object.

         Args:
@@ -980,9 +1851,23 @@ class GenerateJsonSchema:
         Returns:
             The generated JSON schema.
         """
-        pass
-
-    def get_title_from_name(self, name: str) ->str:
+        schema_type = schema['type']
+        if schema_type == 'function-plain' or schema_type == 'function-wrap':
+            # PlainSerializerFunctionSerSchema or WrapSerializerFunctionSerSchema
+            return_schema = schema.get('return_schema')
+            if return_schema is not None:
+                return self.generate_inner(return_schema)
+        elif schema_type == 'format' or schema_type == 'to-string':
+            # FormatSerSchema or ToStringSerSchema
+            return self.str_schema(core_schema.str_schema())
+        elif schema['type'] == 'model':
+            # ModelSerSchema
+            return self.generate_inner(schema['schema'])
+        return None
+
+    # ### Utility methods
+
+    def get_title_from_name(self, name: str) -> str:
         """Retrieves a title from a name.

         Args:
@@ -991,9 +1876,9 @@ class GenerateJsonSchema:
         Returns:
             The title.
         """
-        pass
+        return name.title().replace('_', ' ')

-    def field_title_should_be_set(self, schema: CoreSchemaOrField) ->bool:
+    def field_title_should_be_set(self, schema: CoreSchemaOrField) -> bool:
         """Returns true if a field with the given schema should have a title set based on the field name.

         Intuitively, we want this to return true for schemas that wouldn't otherwise provide their own title
@@ -1005,9 +1890,30 @@ class GenerateJsonSchema:
         Returns:
             `True` if the field should have a title set, `False` otherwise.
         """
-        pass
+        if _core_utils.is_core_schema_field(schema):
+            if schema['type'] == 'computed-field':
+                field_schema = schema['return_schema']
+            else:
+                field_schema = schema['schema']
+            return self.field_title_should_be_set(field_schema)
+
+        elif _core_utils.is_core_schema(schema):
+            if schema.get('ref'):  # things with refs, such as models and enums, should not have titles set
+                return False
+            if schema['type'] in {'default', 'nullable', 'definitions'}:
+                return self.field_title_should_be_set(schema['schema'])  # type: ignore[typeddict-item]
+            if _core_utils.is_function_with_inner_schema(schema):
+                return self.field_title_should_be_set(schema['schema'])
+            if schema['type'] == 'definition-ref':
+                # Referenced schemas should not have titles set for the same reason
+                # schemas with refs should not
+                return False
+            return True  # anything else should have title set
+
+        else:
+            raise PydanticInvalidForJsonSchema(f'Unexpected schema type: schema={schema}')  # pragma: no cover

-    def normalize_name(self, name: str) ->str:
+    def normalize_name(self, name: str) -> str:
         """Normalizes a name to be used as a key in a dictionary.

         Args:
@@ -1016,9 +1922,9 @@ class GenerateJsonSchema:
         Returns:
             The normalized name.
         """
-        pass
+        return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name).replace('.', '__')

-    def get_defs_ref(self, core_mode_ref: CoreModeRef) ->DefsRef:
+    def get_defs_ref(self, core_mode_ref: CoreModeRef) -> DefsRef:
         """Override this method to change the way that definitions keys are generated from a core reference.

         Args:
@@ -1027,10 +1933,46 @@ class GenerateJsonSchema:
         Returns:
             The definitions key.
         """
-        pass
-
-    def get_cache_defs_ref_schema(self, core_ref: CoreRef) ->tuple[DefsRef,
-        JsonSchemaValue]:
+        # Split the core ref into "components"; generic origins and arguments are each separate components
+        core_ref, mode = core_mode_ref
+        components = re.split(r'([\][,])', core_ref)
+        # Remove IDs from each component
+        components = [x.rsplit(':', 1)[0] for x in components]
+        core_ref_no_id = ''.join(components)
+        # Remove everything before the last period from each "component"
+        components = [re.sub(r'(?:[^.[\]]+\.)+((?:[^.[\]]+))', r'\1', x) for x in components]
+        short_ref = ''.join(components)
+
+        mode_title = _MODE_TITLE_MAPPING[mode]
+
+        # It is important that the generated defs_ref values be such that at least one choice will not
+        # be generated for any other core_ref. Currently, this should be the case because we include
+        # the id of the source type in the core_ref
+        name = DefsRef(self.normalize_name(short_ref))
+        name_mode = DefsRef(self.normalize_name(short_ref) + f'-{mode_title}')
+        module_qualname = DefsRef(self.normalize_name(core_ref_no_id))
+        module_qualname_mode = DefsRef(f'{module_qualname}-{mode_title}')
+        module_qualname_id = DefsRef(self.normalize_name(core_ref))
+        occurrence_index = self._collision_index.get(module_qualname_id)
+        if occurrence_index is None:
+            self._collision_counter[module_qualname] += 1
+            occurrence_index = self._collision_index[module_qualname_id] = self._collision_counter[module_qualname]
+
+        module_qualname_occurrence = DefsRef(f'{module_qualname}__{occurrence_index}')
+        module_qualname_occurrence_mode = DefsRef(f'{module_qualname_mode}__{occurrence_index}')
+
+        self._prioritized_defsref_choices[module_qualname_occurrence_mode] = [
+            name,
+            name_mode,
+            module_qualname,
+            module_qualname_mode,
+            module_qualname_occurrence,
+            module_qualname_occurrence_mode,
+        ]
+
+        return module_qualname_occurrence_mode
+
+    def get_cache_defs_ref_schema(self, core_ref: CoreRef) -> tuple[DefsRef, JsonSchemaValue]:
         """This method wraps the get_defs_ref method with some cache-lookup/population logic,
         and returns both the produced defs_ref and the JSON schema that will refer to the right definition.

@@ -1040,10 +1982,25 @@ class GenerateJsonSchema:
         Returns:
             A tuple of the definitions reference and the JSON schema that will refer to it.
         """
-        pass
+        core_mode_ref = (core_ref, self.mode)
+        maybe_defs_ref = self.core_to_defs_refs.get(core_mode_ref)
+        if maybe_defs_ref is not None:
+            json_ref = self.core_to_json_refs[core_mode_ref]
+            return maybe_defs_ref, {'$ref': json_ref}
+
+        defs_ref = self.get_defs_ref(core_mode_ref)
+
+        # populate the ref translation mappings
+        self.core_to_defs_refs[core_mode_ref] = defs_ref
+        self.defs_to_core_refs[defs_ref] = core_mode_ref

-    def handle_ref_overrides(self, json_schema: JsonSchemaValue
-        ) ->JsonSchemaValue:
+        json_ref = JsonRef(self.ref_template.format(model=defs_ref))
+        self.core_to_json_refs[core_mode_ref] = json_ref
+        self.json_to_defs_refs[json_ref] = defs_ref
+        ref_json_schema = {'$ref': json_ref}
+        return defs_ref, ref_json_schema
+
+    def handle_ref_overrides(self, json_schema: JsonSchemaValue) -> JsonSchemaValue:
         """It is not valid for a schema with a top-level $ref to have sibling keys.

         During our own schema generation, we treat sibling keys as overrides to the referenced schema,
@@ -1053,9 +2010,44 @@ class GenerateJsonSchema:
         any remain, we transform the schema from a top-level '$ref' to use allOf to move the $ref out of the top level.
         (See bottom of https://swagger.io/docs/specification/using-ref/ for a reference about this behavior)
         """
-        pass
+        if '$ref' in json_schema:
+            # prevent modifications to the input; this copy may be safe to drop if there is significant overhead
+            json_schema = json_schema.copy()
+
+            referenced_json_schema = self.get_schema_from_definitions(JsonRef(json_schema['$ref']))
+            if referenced_json_schema is None:
+                # This can happen when building schemas for models with not-yet-defined references.
+                # It may be a good idea to do a recursive pass at the end of the generation to remove
+                # any redundant override keys.
+                if len(json_schema) > 1:
+                    # Make it an allOf to at least resolve the sibling keys issue
+                    json_schema = json_schema.copy()
+                    json_schema.setdefault('allOf', [])
+                    json_schema['allOf'].append({'$ref': json_schema['$ref']})
+                    del json_schema['$ref']
+
+                return json_schema
+            for k, v in list(json_schema.items()):
+                if k == '$ref':
+                    continue
+                if k in referenced_json_schema and referenced_json_schema[k] == v:
+                    del json_schema[k]  # redundant key
+            if len(json_schema) > 1:
+                # There is a remaining "override" key, so we need to move $ref out of the top level
+                json_ref = JsonRef(json_schema['$ref'])
+                del json_schema['$ref']
+                assert 'allOf' not in json_schema  # this should never happen, but just in case
+                json_schema['allOf'] = [{'$ref': json_ref}]
+
+        return json_schema
+
+    def get_schema_from_definitions(self, json_ref: JsonRef) -> JsonSchemaValue | None:
+        def_ref = self.json_to_defs_refs[json_ref]
+        if def_ref in self._core_defs_invalid_for_json_schema:
+            raise self._core_defs_invalid_for_json_schema[def_ref]
+        return self.definitions.get(def_ref, None)

-    def encode_default(self, dft: Any) ->Any:
+    def encode_default(self, dft: Any) -> Any:
         """Encode a default value to a JSON-serializable value.

         This is used to encode default values for fields in the generated JSON schema.
@@ -1066,10 +2058,27 @@ class GenerateJsonSchema:
         Returns:
             The encoded default value.
         """
-        pass
-
-    def update_with_validations(self, json_schema: JsonSchemaValue,
-        core_schema: CoreSchema, mapping: dict[str, str]) ->None:
+        from .type_adapter import TypeAdapter, _type_has_config
+
+        config = self._config
+        try:
+            default = (
+                dft
+                if _type_has_config(type(dft))
+                else TypeAdapter(type(dft), config=config.config_dict).dump_python(dft, mode='json')
+            )
+        except PydanticSchemaGenerationError:
+            raise pydantic_core.PydanticSerializationError(f'Unable to encode default value {dft}')
+
+        return pydantic_core.to_jsonable_python(
+            default,
+            timedelta_mode=config.ser_json_timedelta,
+            bytes_mode=config.ser_json_bytes,
+        )
+
+    def update_with_validations(
+        self, json_schema: JsonSchemaValue, core_schema: CoreSchema, mapping: dict[str, str]
+    ) -> None:
         """Update the json_schema with the corresponding validations specified in the core_schema,
         using the provided mapping to translate keys in core_schema to the appropriate keys for a JSON schema.

@@ -1078,8 +2087,9 @@ class GenerateJsonSchema:
             core_schema: The core schema to get the validations from.
             mapping: A mapping from core_schema attribute names to the corresponding JSON schema attribute names.
         """
-        pass
-
+        for core_key, json_schema_key in mapping.items():
+            if core_key in core_schema:
+                json_schema[json_schema_key] = core_schema[core_key]

     class ValidationsMapping:
         """This class just contains mappings from core_schema attribute names to the corresponding
@@ -1087,27 +2097,88 @@ class GenerateJsonSchema:
         principle override this class in a subclass of GenerateJsonSchema (by inheriting from
         GenerateJsonSchema.ValidationsMapping) to change these mappings.
         """
-        numeric = {'multiple_of': 'multipleOf', 'le': 'maximum', 'ge':
-            'minimum', 'lt': 'exclusiveMaximum', 'gt': 'exclusiveMinimum'}
-        bytes = {'min_length': 'minLength', 'max_length': 'maxLength'}
-        string = {'min_length': 'minLength', 'max_length': 'maxLength',
-            'pattern': 'pattern'}
-        array = {'min_length': 'minItems', 'max_length': 'maxItems'}
-        object = {'min_length': 'minProperties', 'max_length': 'maxProperties'}
-        date = {'le': 'maximum', 'ge': 'minimum', 'lt': 'exclusiveMaximum',
-            'gt': 'exclusiveMinimum'}
-
-    def get_json_ref_counts(self, json_schema: JsonSchemaValue) ->dict[
-        JsonRef, int]:
-        """Get all values corresponding to the key '$ref' anywhere in the json_schema."""
-        pass

-    def emit_warning(self, kind: JsonSchemaWarningKind, detail: str) ->None:
+        numeric = {
+            'multiple_of': 'multipleOf',
+            'le': 'maximum',
+            'ge': 'minimum',
+            'lt': 'exclusiveMaximum',
+            'gt': 'exclusiveMinimum',
+        }
+        bytes = {
+            'min_length': 'minLength',
+            'max_length': 'maxLength',
+        }
+        string = {
+            'min_length': 'minLength',
+            'max_length': 'maxLength',
+            'pattern': 'pattern',
+        }
+        array = {
+            'min_length': 'minItems',
+            'max_length': 'maxItems',
+        }
+        object = {
+            'min_length': 'minProperties',
+            'max_length': 'maxProperties',
+        }
+        date = {
+            'le': 'maximum',
+            'ge': 'minimum',
+            'lt': 'exclusiveMaximum',
+            'gt': 'exclusiveMinimum',
+        }
+
+    def get_flattened_anyof(self, schemas: list[JsonSchemaValue]) -> JsonSchemaValue:
+        members = []
+        for schema in schemas:
+            if len(schema) == 1 and 'anyOf' in schema:
+                members.extend(schema['anyOf'])
+            else:
+                members.append(schema)
+        members = _deduplicate_schemas(members)
+        if len(members) == 1:
+            return members[0]
+        return {'anyOf': members}
+
+    def get_json_ref_counts(self, json_schema: JsonSchemaValue) -> dict[JsonRef, int]:
+        """Get all values corresponding to the key '$ref' anywhere in the json_schema."""
+        json_refs: dict[JsonRef, int] = Counter()
+
+        def _add_json_refs(schema: Any) -> None:
+            if isinstance(schema, dict):
+                if '$ref' in schema:
+                    json_ref = JsonRef(schema['$ref'])
+                    if not isinstance(json_ref, str):
+                        return  # in this case, '$ref' might have been the name of a property
+                    already_visited = json_ref in json_refs
+                    json_refs[json_ref] += 1
+                    if already_visited:
+                        return  # prevent recursion on a definition that was already visited
+                    defs_ref = self.json_to_defs_refs[json_ref]
+                    if defs_ref in self._core_defs_invalid_for_json_schema:
+                        raise self._core_defs_invalid_for_json_schema[defs_ref]
+                    _add_json_refs(self.definitions[defs_ref])
+
+                for v in schema.values():
+                    _add_json_refs(v)
+            elif isinstance(schema, list):
+                for v in schema:
+                    _add_json_refs(v)
+
+        _add_json_refs(json_schema)
+        return json_refs
+
+    def handle_invalid_for_json_schema(self, schema: CoreSchemaOrField, error_info: str) -> JsonSchemaValue:
+        raise PydanticInvalidForJsonSchema(f'Cannot generate a JsonSchema for {error_info}')
+
+    def emit_warning(self, kind: JsonSchemaWarningKind, detail: str) -> None:
         """This method simply emits PydanticJsonSchemaWarnings based on handling in the `warning_message` method."""
-        pass
+        message = self.render_warning_message(kind, detail)
+        if message is not None:
+            warnings.warn(message, PydanticJsonSchemaWarning)

-    def render_warning_message(self, kind: JsonSchemaWarningKind, detail: str
-        ) ->(str | None):
+    def render_warning_message(self, kind: JsonSchemaWarningKind, detail: str) -> str | None:
         """This method is responsible for ignoring warnings as desired, and for formatting the warning messages.

         You can override the value of `ignored_warning_kinds` in a subclass of GenerateJsonSchema
@@ -1124,13 +2195,45 @@ class GenerateJsonSchema:
         Returns:
             The formatted warning message, or `None` if no warning should be emitted.
         """
-        pass
-
-
-def model_json_schema(cls: (type[BaseModel] | type[PydanticDataclass]),
-    by_alias: bool=True, ref_template: str=DEFAULT_REF_TEMPLATE,
-    schema_generator: type[GenerateJsonSchema]=GenerateJsonSchema, mode:
-    JsonSchemaMode='validation') ->dict[str, Any]:
+        if kind in self.ignored_warning_kinds:
+            return None
+        return f'{detail} [{kind}]'
+
+    def _build_definitions_remapping(self) -> _DefinitionsRemapping:
+        defs_to_json: dict[DefsRef, JsonRef] = {}
+        for defs_refs in self._prioritized_defsref_choices.values():
+            for defs_ref in defs_refs:
+                json_ref = JsonRef(self.ref_template.format(model=defs_ref))
+                defs_to_json[defs_ref] = json_ref
+
+        return _DefinitionsRemapping.from_prioritized_choices(
+            self._prioritized_defsref_choices, defs_to_json, self.definitions
+        )
+
+    def _garbage_collect_definitions(self, schema: JsonSchemaValue) -> None:
+        visited_defs_refs: set[DefsRef] = set()
+        unvisited_json_refs = _get_all_json_refs(schema)
+        while unvisited_json_refs:
+            next_json_ref = unvisited_json_refs.pop()
+            next_defs_ref = self.json_to_defs_refs[next_json_ref]
+            if next_defs_ref in visited_defs_refs:
+                continue
+            visited_defs_refs.add(next_defs_ref)
+            unvisited_json_refs.update(_get_all_json_refs(self.definitions[next_defs_ref]))
+
+        self.definitions = {k: v for k, v in self.definitions.items() if k in visited_defs_refs}
+
+
+# ##### Start JSON Schema Generation Functions #####
+
+
+def model_json_schema(
+    cls: type[BaseModel] | type[PydanticDataclass],
+    by_alias: bool = True,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+    mode: JsonSchemaMode = 'validation',
+) -> dict[str, Any]:
     """Utility function to generate a JSON Schema for a model.

     Args:
@@ -1147,15 +2250,29 @@ def model_json_schema(cls: (type[BaseModel] | type[PydanticDataclass]),
     Returns:
         The generated JSON Schema.
     """
-    pass
+    from .main import BaseModel
+
+    schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template)

+    if isinstance(cls.__pydantic_core_schema__, _mock_val_ser.MockCoreSchema):
+        cls.__pydantic_core_schema__.rebuild()

-def models_json_schema(models: Sequence[tuple[type[BaseModel] | type[
-    PydanticDataclass], JsonSchemaMode]], *, by_alias: bool=True, title: (
-    str | None)=None, description: (str | None)=None, ref_template: str=
-    DEFAULT_REF_TEMPLATE, schema_generator: type[GenerateJsonSchema]=
-    GenerateJsonSchema) ->tuple[dict[tuple[type[BaseModel] | type[
-    PydanticDataclass], JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
+    if cls is BaseModel:
+        raise AttributeError('model_json_schema() must be called on a subclass of BaseModel, not BaseModel itself.')
+
+    assert not isinstance(cls.__pydantic_core_schema__, _mock_val_ser.MockCoreSchema), 'this is a bug! please report it'
+    return schema_generator_instance.generate(cls.__pydantic_core_schema__, mode=mode)
+
+
+def models_json_schema(
+    models: Sequence[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode]],
+    *,
+    by_alias: bool = True,
+    title: str | None = None,
+    description: str | None = None,
+    ref_template: str = DEFAULT_REF_TEMPLATE,
+    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+) -> tuple[dict[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
     """Utility function to generate a JSON Schema for multiple models.

     Args:
@@ -1174,11 +2291,64 @@ def models_json_schema(models: Sequence[tuple[type[BaseModel] | type[
             - The second element is a JSON schema containing all definitions referenced in the first returned
                     element, along with the optional title and description keys.
     """
-    pass
+    for cls, _ in models:
+        if isinstance(cls.__pydantic_core_schema__, _mock_val_ser.MockCoreSchema):
+            cls.__pydantic_core_schema__.rebuild()
+
+    instance = schema_generator(by_alias=by_alias, ref_template=ref_template)
+    inputs: list[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode, CoreSchema]] = [
+        (m, mode, m.__pydantic_core_schema__) for m, mode in models
+    ]
+    json_schemas_map, definitions = instance.generate_definitions(inputs)
+
+    json_schema: dict[str, Any] = {}
+    if definitions:
+        json_schema['$defs'] = definitions
+    if title:
+        json_schema['title'] = title
+    if description:
+        json_schema['description'] = description

+    return json_schemas_map, json_schema

-_HashableJsonValue: TypeAlias = Union[int, float, str, bool, None, Tuple[
-    '_HashableJsonValue', ...], Tuple[Tuple[str, '_HashableJsonValue'], ...]]
+
+# ##### End JSON Schema Generation Functions #####
+
+
+_HashableJsonValue: TypeAlias = Union[
+    int, float, str, bool, None, Tuple['_HashableJsonValue', ...], Tuple[Tuple[str, '_HashableJsonValue'], ...]
+]
+
+
+def _deduplicate_schemas(schemas: Iterable[JsonDict]) -> list[JsonDict]:
+    return list({_make_json_hashable(schema): schema for schema in schemas}.values())
+
+
+def _make_json_hashable(value: JsonValue) -> _HashableJsonValue:
+    if isinstance(value, dict):
+        return tuple(sorted((k, _make_json_hashable(v)) for k, v in value.items()))
+    elif isinstance(value, list):
+        return tuple(_make_json_hashable(v) for v in value)
+    else:
+        return value
+
+
+def _sort_json_schema(value: JsonSchemaValue, parent_key: str | None = None) -> JsonSchemaValue:
+    if isinstance(value, dict):
+        sorted_dict: dict[str, JsonSchemaValue] = {}
+        keys = value.keys()
+        if (parent_key != 'properties') and (parent_key != 'default'):
+            keys = sorted(keys)
+        for key in keys:
+            sorted_dict[key] = _sort_json_schema(value[key], parent_key=key)
+        return sorted_dict
+    elif isinstance(value, list):
+        sorted_list: list[JsonSchemaValue] = []
+        for item in value:  # type: ignore
+            sorted_list.append(_sort_json_schema(item, parent_key))
+        return sorted_list  # type: ignore
+    else:
+        return value


 @dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -1195,20 +2365,23 @@ class WithJsonSchema:
     If `mode` is set this will only apply to that schema generation mode, allowing you
     to set different json schemas for validation and serialization.
     """
+
     json_schema: JsonSchemaValue | None
     mode: Literal['validation', 'serialization'] | None = None

-    def __get_pydantic_json_schema__(self, core_schema: core_schema.
-        CoreSchema, handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         mode = self.mode or handler.mode
         if mode != handler.mode:
             return handler(core_schema)
         if self.json_schema is None:
+            # This exception is handled in pydantic.json_schema.GenerateJsonSchema._named_required_fields_schema
             raise PydanticOmit
         else:
             return self.json_schema

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(type(self.mode))


@@ -1222,11 +2395,13 @@ class Examples:
     If `mode` is set this will only apply to that schema generation mode,
     allowing you to add different examples for validation and serialization.
     """
+
     examples: dict[str, Any]
     mode: Literal['validation', 'serialization'] | None = None

-    def __get_pydantic_json_schema__(self, core_schema: core_schema.
-        CoreSchema, handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         mode = self.mode or handler.mode
         json_schema = handler(core_schema)
         if mode != handler.mode:
@@ -1236,21 +2411,37 @@ class Examples:
         json_schema['examples'] = examples
         return json_schema

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(type(self.mode))


-def _get_all_json_refs(item: Any) ->set[JsonRef]:
+def _get_all_json_refs(item: Any) -> set[JsonRef]:
     """Get all the definitions references from a JSON schema."""
-    pass
+    refs: set[JsonRef] = set()
+    stack = [item]
+
+    while stack:
+        current = stack.pop()
+        if isinstance(current, dict):
+            for key, value in current.items():
+                if key == '$ref' and isinstance(value, str):
+                    refs.add(JsonRef(value))
+                elif isinstance(value, dict):
+                    stack.append(value)
+                elif isinstance(value, list):
+                    stack.extend(value)
+        elif isinstance(current, list):
+            stack.extend(current)
+
+    return refs


 AnyType = TypeVar('AnyType')
+
 if TYPE_CHECKING:
     SkipJsonSchema = Annotated[AnyType, ...]
 else:

-
     @dataclasses.dataclass(**_internal_dataclass.slots_true)
     class SkipJsonSchema:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/json_schema/#skipjsonschema-annotation
@@ -1302,12 +2493,28 @@ else:
             3. The entirety of the `c` field is omitted from the schema.
         """

-        def __class_getitem__(cls, item: AnyType) ->AnyType:
+        def __class_getitem__(cls, item: AnyType) -> AnyType:
             return Annotated[item, cls()]

-        def __get_pydantic_json_schema__(self, core_schema: CoreSchema,
-            handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+        def __get_pydantic_json_schema__(
+            self, core_schema: CoreSchema, handler: GetJsonSchemaHandler
+        ) -> JsonSchemaValue:
             raise PydanticOmit

-        def __hash__(self) ->int:
+        def __hash__(self) -> int:
             return hash(type(self))
+
+
+def _get_typed_dict_cls(schema: core_schema.TypedDictSchema) -> type[Any] | None:
+    metadata = _core_metadata.CoreMetadataHandler(schema).metadata
+    cls = metadata.get('pydantic_typed_dict_cls')
+    return cls
+
+
+def _get_typed_dict_config(cls: type[Any] | None) -> ConfigDict:
+    if cls is not None:
+        try:
+            return _decorators.get_attribute_from_bases(cls, '__pydantic_config__')
+        except AttributeError:
+            pass
+    return {}
diff --git a/pydantic/main.py b/pydantic/main.py
index 0f604ce09..c40c818b4 100644
--- a/pydantic/main.py
+++ b/pydantic/main.py
@@ -1,17 +1,46 @@
 """Logic for creating models."""
+
 from __future__ import annotations as _annotations
+
 import operator
 import sys
 import types
 import typing
 import warnings
 from copy import copy, deepcopy
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Literal, Set, Tuple, TypeVar, Union, cast, overload
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    Generator,
+    Literal,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+    cast,
+    overload,
+)
+
 import pydantic_core
 import typing_extensions
 from pydantic_core import PydanticUndefined
 from typing_extensions import Self, TypeAlias, Unpack
-from ._internal import _config, _decorators, _fields, _forward_ref, _generics, _mock_val_ser, _model_construction, _repr, _typing_extra, _utils
+
+from ._internal import (
+    _config,
+    _decorators,
+    _fields,
+    _forward_ref,
+    _generics,
+    _mock_val_ser,
+    _model_construction,
+    _repr,
+    _typing_extra,
+    _utils,
+)
 from ._migration import getattr_migration
 from .aliases import AliasChoices, AliasPath
 from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
@@ -20,21 +49,32 @@ from .errors import PydanticUndefinedAnnotation, PydanticUserError
 from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema
 from .plugin._schema_validator import PluggableSchemaValidator
 from .warnings import PydanticDeprecatedSince20
+
+# Always define certain types that are needed to resolve method type hints/annotations
+# (even when not type checking) via typing.get_type_hints.
 ModelT = TypeVar('ModelT', bound='BaseModel')
 TupleGenerator = Generator[Tuple[str, Any], None, None]
-IncEx: TypeAlias = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any],
-    None]
+# should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope
+IncEx: TypeAlias = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any], None]
+
+
 if TYPE_CHECKING:
     from inspect import Signature
     from pathlib import Path
+
     from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
+
     from ._internal._utils import AbstractSetIntStr, MappingIntStrAny
     from .deprecated.parse import Protocol as DeprecatedParseProtocol
     from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr
     from .fields import PrivateAttr as _PrivateAttr
 else:
+    # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915
+    # and https://youtrack.jetbrains.com/issue/PY-51428
     DeprecationWarning = PydanticDeprecatedSince20
+
 __all__ = 'BaseModel', 'create_model'
+
 _object_setattr = _model_construction.object_setattr


@@ -66,11 +106,19 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         __pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
         __pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
     """
+
     if TYPE_CHECKING:
+        # Here we provide annotations for the attributes of BaseModel.
+        # Many of these are populated by the metaclass, which is why this section is in a `TYPE_CHECKING` block.
+        # However, for the sake of easy review, we have included type annotations of all class and instance attributes
+        # of `BaseModel` here:
+
+        # Class attributes
         model_config: ClassVar[ConfigDict]
         """
         Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict].
         """
+
         model_fields: ClassVar[dict[str, FieldInfo]]
         """
         Metadata about the fields defined on the model,
@@ -78,47 +126,61 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):

         This replaces `Model.__fields__` from Pydantic V1.
         """
+
         model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]]
         """A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects."""
+
         __class_vars__: ClassVar[set[str]]
         __private_attributes__: ClassVar[dict[str, ModelPrivateAttr]]
         __signature__: ClassVar[Signature]
+
         __pydantic_complete__: ClassVar[bool]
         __pydantic_core_schema__: ClassVar[CoreSchema]
         __pydantic_custom_init__: ClassVar[bool]
         __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos]
-        __pydantic_generic_metadata__: ClassVar[_generics.
-            PydanticGenericMetadata]
+        __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata]
         __pydantic_parent_namespace__: ClassVar[dict[str, Any] | None]
         __pydantic_post_init__: ClassVar[None | Literal['model_post_init']]
         __pydantic_root_model__: ClassVar[bool]
         __pydantic_serializer__: ClassVar[SchemaSerializer]
-        __pydantic_validator__: ClassVar[SchemaValidator |
-            PluggableSchemaValidator]
+        __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
+
+        # Instance attributes
         __pydantic_extra__: dict[str, Any] | None = _PrivateAttr()
         __pydantic_fields_set__: set[str] = _PrivateAttr()
         __pydantic_private__: dict[str, Any] | None = _PrivateAttr()
+
     else:
+        # `model_fields` and `__pydantic_decorators__` must be set for
+        # pydantic._internal._generate_schema.GenerateSchema.model_schema to work for a plain BaseModel annotation
         model_fields = {}
         model_computed_fields = {}
+
         __pydantic_decorators__ = _decorators.DecoratorInfos()
         __pydantic_parent_namespace__ = None
+        # Prevent `BaseModel` from being instantiated directly:
         __pydantic_core_schema__ = _mock_val_ser.MockCoreSchema(
-            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly'
-            , code='base-model-instantiated')
+            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
+            code='base-model-instantiated',
+        )
         __pydantic_validator__ = _mock_val_ser.MockValSer(
-            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly'
-            , val_or_ser='validator', code='base-model-instantiated')
+            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
+            val_or_ser='validator',
+            code='base-model-instantiated',
+        )
         __pydantic_serializer__ = _mock_val_ser.MockValSer(
-            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly'
-            , val_or_ser='serializer', code='base-model-instantiated')
-    __slots__ = ('__dict__', '__pydantic_fields_set__',
-        '__pydantic_extra__', '__pydantic_private__')
+            'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
+            val_or_ser='serializer',
+            code='base-model-instantiated',
+        )
+
+    __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__'
+
     model_config = ConfigDict()
     __pydantic_complete__ = False
     __pydantic_root_model__ = False

-    def __init__(self, /, **data: Any) ->None:
+    def __init__(self, /, **data: Any) -> None:  # type: ignore
         """Create a new model by parsing and validating input data from keyword arguments.

         Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
@@ -126,32 +188,34 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):

         `self` is explicitly positional-only to allow `self` as a field name.
         """
+        # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
         __tracebackhide__ = True
         self.__pydantic_validator__.validate_python(data, self_instance=self)
-    __init__.__pydantic_base_init__ = True
+
+    # The following line sets a flag that we use to determine when `__init__` gets overridden by the user
+    __init__.__pydantic_base_init__ = True  # pyright: ignore[reportFunctionMemberAccess]

     @property
-    def model_extra(self) ->(dict[str, Any] | None):
+    def model_extra(self) -> dict[str, Any] | None:
         """Get extra fields set during validation.

         Returns:
             A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
         """
-        pass
+        return self.__pydantic_extra__

     @property
-    def model_fields_set(self) ->set[str]:
+    def model_fields_set(self) -> set[str]:
         """Returns the set of fields that have been explicitly set on this model instance.

         Returns:
             A set of strings representing the fields that have been set,
                 i.e. that were not filled from defaults.
         """
-        pass
+        return self.__pydantic_fields_set__

     @classmethod
-    def model_construct(cls, _fields_set: (set[str] | None)=None, **values: Any
-        ) ->Self:
+    def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self:  # noqa: C901
         """Creates a new instance of the `Model` class with validated data.

         Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data.
@@ -171,10 +235,67 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             A new instance of the `Model` class with validated data.
         """
-        pass
+        m = cls.__new__(cls)
+        fields_values: dict[str, Any] = {}
+        fields_set = set()
+
+        for name, field in cls.model_fields.items():
+            if field.alias is not None and field.alias in values:
+                fields_values[name] = values.pop(field.alias)
+                fields_set.add(name)
+
+            if (name not in fields_set) and (field.validation_alias is not None):
+                validation_aliases: list[str | AliasPath] = (
+                    field.validation_alias.choices
+                    if isinstance(field.validation_alias, AliasChoices)
+                    else [field.validation_alias]
+                )
+
+                for alias in validation_aliases:
+                    if isinstance(alias, str) and alias in values:
+                        fields_values[name] = values.pop(alias)
+                        fields_set.add(name)
+                        break
+                    elif isinstance(alias, AliasPath):
+                        value = alias.search_dict_for_path(values)
+                        if value is not PydanticUndefined:
+                            fields_values[name] = value
+                            fields_set.add(name)
+                            break
+
+            if name not in fields_set:
+                if name in values:
+                    fields_values[name] = values.pop(name)
+                    fields_set.add(name)
+                elif not field.is_required():
+                    fields_values[name] = field.get_default(call_default_factory=True)
+        if _fields_set is None:
+            _fields_set = fields_set
+
+        _extra: dict[str, Any] | None = (
+            {k: v for k, v in values.items()} if cls.model_config.get('extra') == 'allow' else None
+        )
+        _object_setattr(m, '__dict__', fields_values)
+        _object_setattr(m, '__pydantic_fields_set__', _fields_set)
+        if not cls.__pydantic_root_model__:
+            _object_setattr(m, '__pydantic_extra__', _extra)
+
+        if cls.__pydantic_post_init__:
+            m.model_post_init(None)
+            # update private attributes with values set
+            if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None:
+                for k, v in values.items():
+                    if k in m.__private_attributes__:
+                        m.__pydantic_private__[k] = v
+
+        elif not cls.__pydantic_root_model__:
+            # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist
+            # Since it doesn't, that means that `__pydantic_private__` should be set to None
+            _object_setattr(m, '__pydantic_private__', None)

-    def model_copy(self, *, update: (dict[str, Any] | None)=None, deep:
-        bool=False) ->Self:
+        return m
+
+    def model_copy(self, *, update: dict[str, Any] | None = None, deep: bool = False) -> Self:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/serialization/#model_copy

         Returns a copy of the model.
@@ -187,14 +308,36 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             New model instance.
         """
-        pass
-
-    def model_dump(self, *, mode: (Literal['json', 'python'] | str)=
-        'python', include: IncEx=None, exclude: IncEx=None, context: (Any |
-        None)=None, by_alias: bool=False, exclude_unset: bool=False,
-        exclude_defaults: bool=False, exclude_none: bool=False, round_trip:
-        bool=False, warnings: (bool | Literal['none', 'warn', 'error'])=
-        True, serialize_as_any: bool=False) ->dict[str, Any]:
+        copied = self.__deepcopy__() if deep else self.__copy__()
+        if update:
+            if self.model_config.get('extra') == 'allow':
+                for k, v in update.items():
+                    if k in self.model_fields:
+                        copied.__dict__[k] = v
+                    else:
+                        if copied.__pydantic_extra__ is None:
+                            copied.__pydantic_extra__ = {}
+                        copied.__pydantic_extra__[k] = v
+            else:
+                copied.__dict__.update(update)
+            copied.__pydantic_fields_set__.update(update.keys())
+        return copied
+
+    def model_dump(
+        self,
+        *,
+        mode: Literal['json', 'python'] | str = 'python',
+        include: IncEx = None,
+        exclude: IncEx = None,
+        context: Any | None = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        round_trip: bool = False,
+        warnings: bool | Literal['none', 'warn', 'error'] = True,
+        serialize_as_any: bool = False,
+    ) -> dict[str, Any]:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/serialization/#modelmodel_dump

         Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
@@ -218,14 +361,36 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             A dictionary representation of the model.
         """
-        pass
-
-    def model_dump_json(self, *, indent: (int | None)=None, include: IncEx=
-        None, exclude: IncEx=None, context: (Any | None)=None, by_alias:
-        bool=False, exclude_unset: bool=False, exclude_defaults: bool=False,
-        exclude_none: bool=False, round_trip: bool=False, warnings: (bool |
-        Literal['none', 'warn', 'error'])=True, serialize_as_any: bool=False
-        ) ->str:
+        return self.__pydantic_serializer__.to_python(
+            self,
+            mode=mode,
+            by_alias=by_alias,
+            include=include,
+            exclude=exclude,
+            context=context,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+            round_trip=round_trip,
+            warnings=warnings,
+            serialize_as_any=serialize_as_any,
+        )
+
+    def model_dump_json(
+        self,
+        *,
+        indent: int | None = None,
+        include: IncEx = None,
+        exclude: IncEx = None,
+        context: Any | None = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        round_trip: bool = False,
+        warnings: bool | Literal['none', 'warn', 'error'] = True,
+        serialize_as_any: bool = False,
+    ) -> str:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/serialization/#modelmodel_dump_json

         Generates a JSON representation of the model using Pydantic's `to_json` method.
@@ -247,13 +412,29 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             A JSON string representation of the model.
         """
-        pass
+        return self.__pydantic_serializer__.to_json(
+            self,
+            indent=indent,
+            include=include,
+            exclude=exclude,
+            context=context,
+            by_alias=by_alias,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+            round_trip=round_trip,
+            warnings=warnings,
+            serialize_as_any=serialize_as_any,
+        ).decode()

     @classmethod
-    def model_json_schema(cls, by_alias: bool=True, ref_template: str=
-        DEFAULT_REF_TEMPLATE, schema_generator: type[GenerateJsonSchema]=
-        GenerateJsonSchema, mode: JsonSchemaMode='validation') ->dict[str, Any
-        ]:
+    def model_json_schema(
+        cls,
+        by_alias: bool = True,
+        ref_template: str = DEFAULT_REF_TEMPLATE,
+        schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+        mode: JsonSchemaMode = 'validation',
+    ) -> dict[str, Any]:
         """Generates a JSON schema for a model class.

         Args:
@@ -266,10 +447,12 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             The JSON schema for the given model class.
         """
-        pass
+        return model_json_schema(
+            cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator, mode=mode
+        )

     @classmethod
-    def model_parametrized_name(cls, params: tuple[type[Any], ...]) ->str:
+    def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
         """Compute the class name for parametrizations of generic classes.

         This method can be overridden to achieve a custom naming scheme for generic BaseModels.
@@ -285,18 +468,31 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Raises:
             TypeError: Raised when trying to generate concrete names for non-generic models.
         """
-        pass
+        if not issubclass(cls, typing.Generic):
+            raise TypeError('Concrete names should only be generated for generic models.')
+
+        # Any strings received should represent forward references, so we handle them specially below.
+        # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
+        # we may be able to remove this special case.
+        param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
+        params_component = ', '.join(param_names)
+        return f'{cls.__name__}[{params_component}]'

-    def model_post_init(self, __context: Any) ->None:
+    def model_post_init(self, __context: Any) -> None:
         """Override this method to perform additional initialization after `__init__` and `model_construct`.
         This is useful if you want to do some validation that requires the entire model to be initialized.
         """
         pass

     @classmethod
-    def model_rebuild(cls, *, force: bool=False, raise_errors: bool=True,
-        _parent_namespace_depth: int=2, _types_namespace: (dict[str, Any] |
-        None)=None) ->(bool | None):
+    def model_rebuild(
+        cls,
+        *,
+        force: bool = False,
+        raise_errors: bool = True,
+        _parent_namespace_depth: int = 2,
+        _types_namespace: dict[str, Any] | None = None,
+    ) -> bool | None:
         """Try to rebuild the pydantic-core schema for the model.

         This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
@@ -312,12 +508,47 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
             Returns `None` if the schema is already "complete" and rebuilding was not required.
             If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
         """
-        pass
+        if not force and cls.__pydantic_complete__:
+            return None
+        else:
+            if '__pydantic_core_schema__' in cls.__dict__:
+                delattr(cls, '__pydantic_core_schema__')  # delete cached value to ensure full rebuild happens
+            if _types_namespace is not None:
+                types_namespace: dict[str, Any] | None = _types_namespace.copy()
+            else:
+                if _parent_namespace_depth > 0:
+                    frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {}
+                    cls_parent_ns = (
+                        _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {}
+                    )
+                    types_namespace = {**cls_parent_ns, **frame_parent_ns}
+                    cls.__pydantic_parent_namespace__ = _model_construction.build_lenient_weakvaluedict(types_namespace)
+                else:
+                    types_namespace = _model_construction.unpack_lenient_weakvaluedict(
+                        cls.__pydantic_parent_namespace__
+                    )
+
+                types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace)
+
+            # manually override defer_build so complete_model_class doesn't skip building the model again
+            config = {**cls.model_config, 'defer_build': False}
+            return _model_construction.complete_model_class(
+                cls,
+                cls.__name__,
+                _config.ConfigWrapper(config, check=False),
+                raise_errors=raise_errors,
+                types_namespace=types_namespace,
+            )

     @classmethod
-    def model_validate(cls, obj: Any, *, strict: (bool | None)=None,
-        from_attributes: (bool | None)=None, context: (Any | None)=None
-        ) ->Self:
+    def model_validate(
+        cls,
+        obj: Any,
+        *,
+        strict: bool | None = None,
+        from_attributes: bool | None = None,
+        context: Any | None = None,
+    ) -> Self:
         """Validate a pydantic model instance.

         Args:
@@ -332,11 +563,20 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             The validated model instance.
         """
-        pass
+        # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
+        __tracebackhide__ = True
+        return cls.__pydantic_validator__.validate_python(
+            obj, strict=strict, from_attributes=from_attributes, context=context
+        )

     @classmethod
-    def model_validate_json(cls, json_data: (str | bytes | bytearray), *,
-        strict: (bool | None)=None, context: (Any | None)=None) ->Self:
+    def model_validate_json(
+        cls,
+        json_data: str | bytes | bytearray,
+        *,
+        strict: bool | None = None,
+        context: Any | None = None,
+    ) -> Self:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/json/#json-parsing

         Validate the given JSON data against the Pydantic model.
@@ -352,11 +592,18 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Raises:
             ValueError: If `json_data` is not a JSON string.
         """
-        pass
+        # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
+        __tracebackhide__ = True
+        return cls.__pydantic_validator__.validate_json(json_data, strict=strict, context=context)

     @classmethod
-    def model_validate_strings(cls, obj: Any, *, strict: (bool | None)=None,
-        context: (Any | None)=None) ->Self:
+    def model_validate_strings(
+        cls,
+        obj: Any,
+        *,
+        strict: bool | None = None,
+        context: Any | None = None,
+    ) -> Self:
         """Validate the given object with string data against the Pydantic model.

         Args:
@@ -367,11 +614,12 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             The validated Pydantic model.
         """
-        pass
+        # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
+        __tracebackhide__ = True
+        return cls.__pydantic_validator__.validate_strings(obj, strict=strict, context=context)

     @classmethod
-    def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler:
-        GetCoreSchemaHandler, /) ->CoreSchema:
+    def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema:
         """Hook into generating the model's CoreSchema.

         Args:
@@ -382,16 +630,25 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             A `pydantic-core` `CoreSchema`.
         """
+        # Only use the cached value from this _exact_ class; we don't want one from a parent class
+        # This is why we check `cls.__dict__` and don't use `cls.__pydantic_core_schema__` or similar.
         schema = cls.__dict__.get('__pydantic_core_schema__')
-        if schema is not None and not isinstance(schema, _mock_val_ser.
-            MockCoreSchema):
+        if schema is not None and not isinstance(schema, _mock_val_ser.MockCoreSchema):
+            # Due to the way generic classes are built, it's possible that an invalid schema may be temporarily
+            # set on generic classes. I think we could resolve this to ensure that we get proper schema caching
+            # for generics, but for simplicity for now, we just always rebuild if the class has a generic origin.
             if not cls.__pydantic_generic_metadata__['origin']:
                 return cls.__pydantic_core_schema__
+
         return handler(source)

     @classmethod
-    def __get_pydantic_json_schema__(cls, core_schema: CoreSchema, handler:
-        GetJsonSchemaHandler, /) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        cls,
+        core_schema: CoreSchema,
+        handler: GetJsonSchemaHandler,
+        /,
+    ) -> JsonSchemaValue:
         """Hook into generating the model's JSON schema.

         Args:
@@ -412,7 +669,7 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         return handler(core_schema)

     @classmethod
-    def __pydantic_init_subclass__(cls, **kwargs: Any) ->None:
+    def __pydantic_init_subclass__(cls, **kwargs: Any) -> None:
         """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass`
         only after the class is actually fully initialized. In particular, attributes like `model_fields` will
         be present when this is called.
@@ -430,186 +687,205 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         """
         pass

-    def __class_getitem__(cls, typevar_values: (type[Any] | tuple[type[Any],
-        ...])) ->(type[BaseModel] | _forward_ref.PydanticRecursiveRef):
+    def __class_getitem__(
+        cls, typevar_values: type[Any] | tuple[type[Any], ...]
+    ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef:
         cached = _generics.get_cached_generic_type_early(cls, typevar_values)
         if cached is not None:
             return cached
+
         if cls is BaseModel:
-            raise TypeError(
-                'Type parameters should be placed on typing.Generic, not BaseModel'
-                )
+            raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel')
         if not hasattr(cls, '__parameters__'):
-            raise TypeError(
-                f'{cls} cannot be parametrized because it does not inherit from typing.Generic'
-                )
-        if not cls.__pydantic_generic_metadata__['parameters'
-            ] and typing.Generic not in cls.__bases__:
+            raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic')
+        if not cls.__pydantic_generic_metadata__['parameters'] and typing.Generic not in cls.__bases__:
             raise TypeError(f'{cls} is not a generic class')
+
         if not isinstance(typevar_values, tuple):
-            typevar_values = typevar_values,
+            typevar_values = (typevar_values,)
         _generics.check_parameters_count(cls, typevar_values)
-        typevars_map: dict[_typing_extra.TypeVarType, type[Any]] = dict(zip
-            (cls.__pydantic_generic_metadata__['parameters'], typevar_values))
-        if _utils.all_identical(typevars_map.keys(), typevars_map.values()
-            ) and typevars_map:
-            submodel = cls
+
+        # Build map from generic typevars to passed params
+        typevars_map: dict[_typing_extra.TypeVarType, type[Any]] = dict(
+            zip(cls.__pydantic_generic_metadata__['parameters'], typevar_values)
+        )
+
+        if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
+            submodel = cls  # if arguments are equal to parameters it's the same object
             _generics.set_cached_generic_type(cls, typevar_values, submodel)
         else:
             parent_args = cls.__pydantic_generic_metadata__['args']
             if not parent_args:
                 args = typevar_values
             else:
-                args = tuple(_generics.replace_types(arg, typevars_map) for
-                    arg in parent_args)
+                args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args)
+
             origin = cls.__pydantic_generic_metadata__['origin'] or cls
             model_name = origin.model_parametrized_name(args)
-            params = tuple({param: None for param in _generics.
-                iter_contained_typevars(typevars_map.values())})
-            with _generics.generic_recursion_self_type(origin, args
-                ) as maybe_self_type:
+            params = tuple(
+                {param: None for param in _generics.iter_contained_typevars(typevars_map.values())}
+            )  # use dict as ordered set
+
+            with _generics.generic_recursion_self_type(origin, args) as maybe_self_type:
                 if maybe_self_type is not None:
                     return maybe_self_type
-                cached = _generics.get_cached_generic_type_late(cls,
-                    typevar_values, origin, args)
+
+                cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args)
                 if cached is not None:
                     return cached
+
+                # Attempt to rebuild the origin in case new types have been defined
                 try:
+                    # depth 3 gets you above this __class_getitem__ call
                     origin.model_rebuild(_parent_namespace_depth=3)
                 except PydanticUndefinedAnnotation:
+                    # It's okay if it fails, it just means there are still undefined types
+                    # that could be evaluated later.
+                    # TODO: Make sure validation fails if there are still undefined types, perhaps using MockValidator
                     pass
-                submodel = _generics.create_generic_submodel(model_name,
-                    origin, args, params)
-                _generics.set_cached_generic_type(cls, typevar_values,
-                    submodel, origin, args)
+
+                submodel = _generics.create_generic_submodel(model_name, origin, args, params)
+
+                # Update cache
+                _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args)
+
         return submodel

-    def __copy__(self) ->Self:
+    def __copy__(self) -> Self:
         """Returns a shallow copy of the model."""
         cls = type(self)
         m = cls.__new__(cls)
         _object_setattr(m, '__dict__', copy(self.__dict__))
         _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__))
-        _object_setattr(m, '__pydantic_fields_set__', copy(self.
-            __pydantic_fields_set__))
-        if not hasattr(self, '__pydantic_private__'
-            ) or self.__pydantic_private__ is None:
+        _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
+
+        if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
             _object_setattr(m, '__pydantic_private__', None)
         else:
-            _object_setattr(m, '__pydantic_private__', {k: v for k, v in
-                self.__pydantic_private__.items() if v is not
-                PydanticUndefined})
+            _object_setattr(
+                m,
+                '__pydantic_private__',
+                {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined},
+            )
+
         return m

-    def __deepcopy__(self, memo: (dict[int, Any] | None)=None) ->Self:
+    def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self:
         """Returns a deep copy of the model."""
         cls = type(self)
         m = cls.__new__(cls)
         _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo))
-        _object_setattr(m, '__pydantic_extra__', deepcopy(self.
-            __pydantic_extra__, memo=memo))
-        _object_setattr(m, '__pydantic_fields_set__', copy(self.
-            __pydantic_fields_set__))
-        if not hasattr(self, '__pydantic_private__'
-            ) or self.__pydantic_private__ is None:
+        _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo))
+        # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str],
+        # and attempting a deepcopy would be marginally slower.
+        _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
+
+        if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
             _object_setattr(m, '__pydantic_private__', None)
         else:
-            _object_setattr(m, '__pydantic_private__', deepcopy({k: v for k,
-                v in self.__pydantic_private__.items() if v is not
-                PydanticUndefined}, memo=memo))
+            _object_setattr(
+                m,
+                '__pydantic_private__',
+                deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo),
+            )
+
         return m
+
     if not TYPE_CHECKING:
+        # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
+        # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643

-        def __getattr__(self, item: str) ->Any:
-            private_attributes = object.__getattribute__(self,
-                '__private_attributes__')
+        def __getattr__(self, item: str) -> Any:
+            private_attributes = object.__getattribute__(self, '__private_attributes__')
             if item in private_attributes:
                 attribute = private_attributes[item]
                 if hasattr(attribute, '__get__'):
-                    return attribute.__get__(self, type(self))
+                    return attribute.__get__(self, type(self))  # type: ignore
+
                 try:
-                    return self.__pydantic_private__[item]
+                    # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
+                    return self.__pydantic_private__[item]  # type: ignore
                 except KeyError as exc:
-                    raise AttributeError(
-                        f'{type(self).__name__!r} object has no attribute {item!r}'
-                        ) from exc
+                    raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
             else:
+                # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
+                # See `BaseModel.__repr_args__` for more details
                 try:
-                    pydantic_extra = object.__getattribute__(self,
-                        '__pydantic_extra__')
+                    pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
                 except AttributeError:
                     pydantic_extra = None
+
                 if pydantic_extra:
                     try:
                         return pydantic_extra[item]
                     except KeyError as exc:
-                        raise AttributeError(
-                            f'{type(self).__name__!r} object has no attribute {item!r}'
-                            ) from exc
-                elif hasattr(self.__class__, item):
-                    return super().__getattribute__(item)
+                        raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
                 else:
-                    raise AttributeError(
-                        f'{type(self).__name__!r} object has no attribute {item!r}'
-                        )
+                    if hasattr(self.__class__, item):
+                        return super().__getattribute__(item)  # Raises AttributeError if appropriate
+                    else:
+                        # this is the current error
+                        raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')

-        def __setattr__(self, name: str, value: Any) ->None:
+        def __setattr__(self, name: str, value: Any) -> None:
             if name in self.__class_vars__:
                 raise AttributeError(
-                    f'{name!r} is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`.'
-                    )
+                    f'{name!r} is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. '
+                    f'If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`.'
+                )
             elif not _fields.is_valid_field_name(name):
-                if (self.__pydantic_private__ is None or name not in self.
-                    __private_attributes__):
+                if self.__pydantic_private__ is None or name not in self.__private_attributes__:
                     _object_setattr(self, name, value)
                 else:
                     attribute = self.__private_attributes__[name]
                     if hasattr(attribute, '__set__'):
-                        attribute.__set__(self, value)
+                        attribute.__set__(self, value)  # type: ignore
                     else:
                         self.__pydantic_private__[name] = value
                 return
+
             self._check_frozen(name, value)
+
             attr = getattr(self.__class__, name, None)
             if isinstance(attr, property):
                 attr.__set__(self, value)
             elif self.model_config.get('validate_assignment', None):
-                self.__pydantic_validator__.validate_assignment(self, name,
-                    value)
-            elif self.model_config.get('extra'
-                ) != 'allow' and name not in self.model_fields:
-                raise ValueError(
-                    f'"{self.__class__.__name__}" object has no field "{name}"'
-                    )
-            elif self.model_config.get('extra'
-                ) == 'allow' and name not in self.model_fields:
+                self.__pydantic_validator__.validate_assignment(self, name, value)
+            elif self.model_config.get('extra') != 'allow' and name not in self.model_fields:
+                # TODO - matching error
+                raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"')
+            elif self.model_config.get('extra') == 'allow' and name not in self.model_fields:
                 if self.model_extra and name in self.model_extra:
-                    self.__pydantic_extra__[name] = value
+                    self.__pydantic_extra__[name] = value  # type: ignore
                 else:
                     try:
                         getattr(self, name)
                     except AttributeError:
-                        self.__pydantic_extra__[name] = value
+                        # attribute does not already exist on instance, so put it in extra
+                        self.__pydantic_extra__[name] = value  # type: ignore
                     else:
+                        # attribute _does_ already exist on instance, and was not in extra, so update it
                         _object_setattr(self, name, value)
             else:
                 self.__dict__[name] = value
                 self.__pydantic_fields_set__.add(name)

-        def __delattr__(self, item: str) ->Any:
+        def __delattr__(self, item: str) -> Any:
             if item in self.__private_attributes__:
                 attribute = self.__private_attributes__[item]
                 if hasattr(attribute, '__delete__'):
-                    attribute.__delete__(self)
+                    attribute.__delete__(self)  # type: ignore
                     return
+
                 try:
-                    del self.__pydantic_private__[item]
+                    # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
+                    del self.__pydantic_private__[item]  # type: ignore
                     return
                 except KeyError as exc:
-                    raise AttributeError(
-                        f'{type(self).__name__!r} object has no attribute {item!r}'
-                        ) from exc
+                    raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
+
             self._check_frozen(item, None)
+
             if item in self.model_fields:
                 object.__delattr__(self, item)
             elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__:
@@ -618,60 +894,100 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
                 try:
                     object.__delattr__(self, item)
                 except AttributeError:
-                    raise AttributeError(
-                        f'{type(self).__name__!r} object has no attribute {item!r}'
-                        )
+                    raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')

-    def __getstate__(self) ->dict[Any, Any]:
+    def _check_frozen(self, name: str, value: Any) -> None:
+        if self.model_config.get('frozen', None):
+            typ = 'frozen_instance'
+        elif getattr(self.model_fields.get(name), 'frozen', False):
+            typ = 'frozen_field'
+        else:
+            return
+        error: pydantic_core.InitErrorDetails = {
+            'type': typ,
+            'loc': (name,),
+            'input': value,
+        }
+        raise pydantic_core.ValidationError.from_exception_data(self.__class__.__name__, [error])
+
+    def __getstate__(self) -> dict[Any, Any]:
         private = self.__pydantic_private__
         if private:
-            private = {k: v for k, v in private.items() if v is not
-                PydanticUndefined}
-        return {'__dict__': self.__dict__, '__pydantic_extra__': self.
-            __pydantic_extra__, '__pydantic_fields_set__': self.
-            __pydantic_fields_set__, '__pydantic_private__': private}
-
-    def __setstate__(self, state: dict[Any, Any]) ->None:
-        _object_setattr(self, '__pydantic_fields_set__', state.get(
-            '__pydantic_fields_set__', {}))
-        _object_setattr(self, '__pydantic_extra__', state.get(
-            '__pydantic_extra__', {}))
-        _object_setattr(self, '__pydantic_private__', state.get(
-            '__pydantic_private__', {}))
+            private = {k: v for k, v in private.items() if v is not PydanticUndefined}
+        return {
+            '__dict__': self.__dict__,
+            '__pydantic_extra__': self.__pydantic_extra__,
+            '__pydantic_fields_set__': self.__pydantic_fields_set__,
+            '__pydantic_private__': private,
+        }
+
+    def __setstate__(self, state: dict[Any, Any]) -> None:
+        _object_setattr(self, '__pydantic_fields_set__', state.get('__pydantic_fields_set__', {}))
+        _object_setattr(self, '__pydantic_extra__', state.get('__pydantic_extra__', {}))
+        _object_setattr(self, '__pydantic_private__', state.get('__pydantic_private__', {}))
         _object_setattr(self, '__dict__', state.get('__dict__', {}))
+
     if not TYPE_CHECKING:

-        def __eq__(self, other: Any) ->bool:
+        def __eq__(self, other: Any) -> bool:
             if isinstance(other, BaseModel):
-                self_type = self.__pydantic_generic_metadata__['origin'
-                    ] or self.__class__
-                other_type = other.__pydantic_generic_metadata__['origin'
-                    ] or other.__class__
-                if not (self_type == other_type and getattr(self,
-                    '__pydantic_private__', None) == getattr(other,
-                    '__pydantic_private__', None) and self.
-                    __pydantic_extra__ == other.__pydantic_extra__):
+                # When comparing instances of generic types for equality, as long as all field values are equal,
+                # only require their generic origin types to be equal, rather than exact type equality.
+                # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1).
+                self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__
+                other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__
+
+                # Perform common checks first
+                if not (
+                    self_type == other_type
+                    and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None)
+                    and self.__pydantic_extra__ == other.__pydantic_extra__
+                ):
                     return False
+
+                # We only want to compare pydantic fields but ignoring fields is costly.
+                # We'll perform a fast check first, and fallback only when needed
+                # See GH-7444 and GH-7825 for rationale and a performance benchmark
+
+                # First, do the fast (and sometimes faulty) __dict__ comparison
                 if self.__dict__ == other.__dict__:
+                    # If the check above passes, then pydantic fields are equal, we can return early
                     return True
+
+                # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return
+                # early if there are no keys to ignore (we would just return False later on anyway)
                 model_fields = type(self).model_fields.keys()
-                if self.__dict__.keys(
-                    ) <= model_fields and other.__dict__.keys(
-                    ) <= model_fields:
+                if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields:
                     return False
-                getter = operator.itemgetter(*model_fields
-                    ) if model_fields else lambda _: _utils._SENTINEL
+
+                # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore
+                # Resort to costly filtering of the __dict__ objects
+                # We use operator.itemgetter because it is much faster than dict comprehensions
+                # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an
+                # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute
+                # raises an error in BaseModel.__getattr__ instead of returning the class attribute
+                # So we can use operator.itemgetter() instead of operator.attrgetter()
+                getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL
                 try:
                     return getter(self.__dict__) == getter(other.__dict__)
                 except KeyError:
+                    # In rare cases (such as when using the deprecated BaseModel.copy() method),
+                    # the __dict__ may not contain all model fields, which is how we can get here.
+                    # getter(self.__dict__) is much faster than any 'safe' method that accounts
+                    # for missing keys, and wrapping it in a `try` doesn't slow things down much
+                    # in the common case.
                     self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__)
-                    other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__
-                        )
-                    return getter(self_fields_proxy) == getter(
-                        other_fields_proxy)
+                    other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__)
+                    return getter(self_fields_proxy) == getter(other_fields_proxy)
+
+            # other instance is not a BaseModel
             else:
-                return NotImplemented
+                return NotImplemented  # delegate to the other item in the comparison
+
     if TYPE_CHECKING:
+        # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits
+        # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of
+        # subclass initialization.

         def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]):
             """This signature is included purely to help type-checkers check arguments to class declaration, which
@@ -696,65 +1012,251 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
                 *after* the class is fully initialized.
             """

-    def __iter__(self) ->TupleGenerator:
+    def __iter__(self) -> TupleGenerator:
         """So `dict(model)` works."""
-        yield from [(k, v) for k, v in self.__dict__.items() if not k.
-            startswith('_')]
+        yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')]
         extra = self.__pydantic_extra__
         if extra:
             yield from extra.items()

-    def __repr__(self) ->str:
-        return f"{self.__repr_name__()}({self.__repr_str__(', ')})"
+    def __repr__(self) -> str:
+        return f'{self.__repr_name__()}({self.__repr_str__(", ")})'

-    def __repr_args__(self) ->_repr.ReprArgs:
+    def __repr_args__(self) -> _repr.ReprArgs:
         for k, v in self.__dict__.items():
             field = self.model_fields.get(k)
             if field and field.repr:
                 yield k, v
+
+        # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
+        # This can happen if a `ValidationError` is raised during initialization and the instance's
+        # repr is generated as part of the exception handling. Therefore, we use `getattr` here
+        # with a fallback, even though the type hints indicate the attribute will always be present.
         try:
-            pydantic_extra = object.__getattribute__(self, '__pydantic_extra__'
-                )
+            pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
         except AttributeError:
             pydantic_extra = None
+
         if pydantic_extra is not None:
             yield from ((k, v) for k, v in pydantic_extra.items())
-        yield from ((k, getattr(self, k)) for k, v in self.
-            model_computed_fields.items() if v.repr)
+        yield from ((k, getattr(self, k)) for k, v in self.model_computed_fields.items() if v.repr)
+
+    # take logic from `_repr.Representation` without the side effects of inheritance, see #5740
     __repr_name__ = _repr.Representation.__repr_name__
     __repr_str__ = _repr.Representation.__repr_str__
     __pretty__ = _repr.Representation.__pretty__
     __rich_repr__ = _repr.Representation.__rich_repr__

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.__repr_str__(' ')

+    # ##### Deprecated methods from v1 #####
     @property
     @typing_extensions.deprecated(
-        'The `__fields__` attribute is deprecated, use `model_fields` instead.'
-        , category=None)
-    def __fields__(self) ->dict[str, FieldInfo]:
+        'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None
+    )
+    def __fields__(self) -> dict[str, FieldInfo]:
         warnings.warn(
-            'The `__fields__` attribute is deprecated, use `model_fields` instead.'
-            , category=PydanticDeprecatedSince20)
+            'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=PydanticDeprecatedSince20
+        )
         return self.model_fields

     @property
     @typing_extensions.deprecated(
-        'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.'
-        , category=None)
-    def __fields_set__(self) ->set[str]:
+        'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
+        category=None,
+    )
+    def __fields_set__(self) -> set[str]:
         warnings.warn(
-            'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.'
-            , category=PydanticDeprecatedSince20)
+            'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
+            category=PydanticDeprecatedSince20,
+        )
         return self.__pydantic_fields_set__

+    @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None)
+    def dict(  # noqa: D102
+        self,
+        *,
+        include: IncEx = None,
+        exclude: IncEx = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+    ) -> Dict[str, Any]:  # noqa UP006
+        warnings.warn('The `dict` method is deprecated; use `model_dump` instead.', category=PydanticDeprecatedSince20)
+        return self.model_dump(
+            include=include,
+            exclude=exclude,
+            by_alias=by_alias,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+        )
+
+    @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None)
+    def json(  # noqa: D102
+        self,
+        *,
+        include: IncEx = None,
+        exclude: IncEx = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        encoder: Callable[[Any], Any] | None = PydanticUndefined,  # type: ignore[assignment]
+        models_as_dict: bool = PydanticUndefined,  # type: ignore[assignment]
+        **dumps_kwargs: Any,
+    ) -> str:
+        warnings.warn(
+            'The `json` method is deprecated; use `model_dump_json` instead.', category=PydanticDeprecatedSince20
+        )
+        if encoder is not PydanticUndefined:
+            raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.')
+        if models_as_dict is not PydanticUndefined:
+            raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.')
+        if dumps_kwargs:
+            raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.')
+        return self.model_dump_json(
+            include=include,
+            exclude=exclude,
+            by_alias=by_alias,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+        )
+
+    @classmethod
+    @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None)
+    def parse_obj(cls, obj: Any) -> Self:  # noqa: D102
+        warnings.warn(
+            'The `parse_obj` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20
+        )
+        return cls.model_validate(obj)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
+        'otherwise load the data then use `model_validate` instead.',
+        category=None,
+    )
+    def parse_raw(  # noqa: D102
+        cls,
+        b: str | bytes,
+        *,
+        content_type: str | None = None,
+        encoding: str = 'utf8',
+        proto: DeprecatedParseProtocol | None = None,
+        allow_pickle: bool = False,
+    ) -> Self:  # pragma: no cover
+        warnings.warn(
+            'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
+            'otherwise load the data then use `model_validate` instead.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import parse
+
+        try:
+            obj = parse.load_str_bytes(
+                b,
+                proto=proto,
+                content_type=content_type,
+                encoding=encoding,
+                allow_pickle=allow_pickle,
+            )
+        except (ValueError, TypeError) as exc:
+            import json
+
+            # try to match V1
+            if isinstance(exc, UnicodeDecodeError):
+                type_str = 'value_error.unicodedecode'
+            elif isinstance(exc, json.JSONDecodeError):
+                type_str = 'value_error.jsondecode'
+            elif isinstance(exc, ValueError):
+                type_str = 'value_error'
+            else:
+                type_str = 'type_error'
+
+            # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same
+            error: pydantic_core.InitErrorDetails = {
+                # The type: ignore on the next line is to ignore the requirement of LiteralString
+                'type': pydantic_core.PydanticCustomError(type_str, str(exc)),  # type: ignore
+                'loc': ('__root__',),
+                'input': b,
+            }
+            raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error])
+        return cls.model_validate(obj)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
+        'use `model_validate_json`, otherwise `model_validate` instead.',
+        category=None,
+    )
+    def parse_file(  # noqa: D102
+        cls,
+        path: str | Path,
+        *,
+        content_type: str | None = None,
+        encoding: str = 'utf8',
+        proto: DeprecatedParseProtocol | None = None,
+        allow_pickle: bool = False,
+    ) -> Self:
+        warnings.warn(
+            'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
+            'use `model_validate_json`, otherwise `model_validate` instead.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import parse
+
+        obj = parse.load_file(
+            path,
+            proto=proto,
+            content_type=content_type,
+            encoding=encoding,
+            allow_pickle=allow_pickle,
+        )
+        return cls.parse_obj(obj)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The `from_orm` method is deprecated; set '
+        "`model_config['from_attributes']=True` and use `model_validate` instead.",
+        category=None,
+    )
+    def from_orm(cls, obj: Any) -> Self:  # noqa: D102
+        warnings.warn(
+            'The `from_orm` method is deprecated; set '
+            "`model_config['from_attributes']=True` and use `model_validate` instead.",
+            category=PydanticDeprecatedSince20,
+        )
+        if not cls.model_config.get('from_attributes', None):
+            raise PydanticUserError(
+                'You must set the config attribute `from_attributes=True` to use from_orm', code=None
+            )
+        return cls.model_validate(obj)
+
+    @classmethod
+    @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None)
+    def construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self:  # noqa: D102
+        warnings.warn(
+            'The `construct` method is deprecated; use `model_construct` instead.', category=PydanticDeprecatedSince20
+        )
+        return cls.model_construct(_fields_set=_fields_set, **values)
+
     @typing_extensions.deprecated(
-        'The `copy` method is deprecated; use `model_copy` instead. See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.'
-        , category=None)
-    def copy(self, *, include: (AbstractSetIntStr | MappingIntStrAny | None
-        )=None, exclude: (AbstractSetIntStr | MappingIntStrAny | None)=None,
-        update: (Dict[str, Any] | None)=None, deep: bool=False) ->Self:
+        'The `copy` method is deprecated; use `model_copy` instead. '
+        'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
+        category=None,
+    )
+    def copy(
+        self,
+        *,
+        include: AbstractSetIntStr | MappingIntStrAny | None = None,
+        exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
+        update: Dict[str, Any] | None = None,  # noqa UP006
+        deep: bool = False,
+    ) -> Self:  # pragma: no cover
         """Returns a copy of the model.

         !!! warning "Deprecated"
@@ -777,15 +1279,197 @@ class BaseModel(metaclass=_model_construction.ModelMetaclass):
         Returns:
             A copy of the model with included, excluded and updated fields as specified.
         """
-        pass
+        warnings.warn(
+            'The `copy` method is deprecated; use `model_copy` instead. '
+            'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import copy_internals
+
+        values = dict(
+            copy_internals._iter(
+                self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
+            ),
+            **(update or {}),
+        )
+        if self.__pydantic_private__ is None:
+            private = None
+        else:
+            private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}
+
+        if self.__pydantic_extra__ is None:
+            extra: dict[str, Any] | None = None
+        else:
+            extra = self.__pydantic_extra__.copy()
+            for k in list(self.__pydantic_extra__):
+                if k not in values:  # k was in the exclude
+                    extra.pop(k)
+            for k in list(values):
+                if k in self.__pydantic_extra__:  # k must have come from extra
+                    extra[k] = values.pop(k)
+
+        # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
+        if update:
+            fields_set = self.__pydantic_fields_set__ | update.keys()
+        else:
+            fields_set = set(self.__pydantic_fields_set__)
+
+        # removing excluded fields from `__pydantic_fields_set__`
+        if exclude:
+            fields_set -= set(exclude)

+        return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)

-def create_model(model_name: str, /, *, __config__: (ConfigDict | None)=
-    None, __doc__: (str | None)=None, __base__: (type[ModelT] | tuple[type[
-    ModelT], ...] | None)=None, __module__: (str | None)=None,
-    __validators__: (dict[str, Callable[..., Any]] | None)=None,
-    __cls_kwargs__: (dict[str, Any] | None)=None, __slots__: (tuple[str,
-    ...] | None)=None, **field_definitions: Any) ->type[ModelT]:
+    @classmethod
+    @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None)
+    def schema(  # noqa: D102
+        cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE
+    ) -> Dict[str, Any]:  # noqa UP006
+        warnings.warn(
+            'The `schema` method is deprecated; use `model_json_schema` instead.', category=PydanticDeprecatedSince20
+        )
+        return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
+        category=None,
+    )
+    def schema_json(  # noqa: D102
+        cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any
+    ) -> str:  # pragma: no cover
+        warnings.warn(
+            'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
+            category=PydanticDeprecatedSince20,
+        )
+        import json
+
+        from .deprecated.json import pydantic_encoder
+
+        return json.dumps(
+            cls.model_json_schema(by_alias=by_alias, ref_template=ref_template),
+            default=pydantic_encoder,
+            **dumps_kwargs,
+        )
+
+    @classmethod
+    @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None)
+    def validate(cls, value: Any) -> Self:  # noqa: D102
+        warnings.warn(
+            'The `validate` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20
+        )
+        return cls.model_validate(value)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
+        category=None,
+    )
+    def update_forward_refs(cls, **localns: Any) -> None:  # noqa: D102
+        warnings.warn(
+            'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
+            category=PydanticDeprecatedSince20,
+        )
+        if localns:  # pragma: no cover
+            raise TypeError('`localns` arguments are not longer accepted.')
+        cls.model_rebuild(force=True)
+
+    @typing_extensions.deprecated(
+        'The private method `_iter` will be removed and should no longer be used.', category=None
+    )
+    def _iter(self, *args: Any, **kwargs: Any) -> Any:
+        warnings.warn(
+            'The private method `_iter` will be removed and should no longer be used.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import copy_internals
+
+        return copy_internals._iter(self, *args, **kwargs)
+
+    @typing_extensions.deprecated(
+        'The private method `_copy_and_set_values` will be removed and should no longer be used.',
+        category=None,
+    )
+    def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any:
+        warnings.warn(
+            'The private method `_copy_and_set_values` will be removed and should no longer be used.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import copy_internals
+
+        return copy_internals._copy_and_set_values(self, *args, **kwargs)
+
+    @classmethod
+    @typing_extensions.deprecated(
+        'The private method `_get_value` will be removed and should no longer be used.',
+        category=None,
+    )
+    def _get_value(cls, *args: Any, **kwargs: Any) -> Any:
+        warnings.warn(
+            'The private method `_get_value` will be removed and should no longer be used.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import copy_internals
+
+        return copy_internals._get_value(cls, *args, **kwargs)
+
+    @typing_extensions.deprecated(
+        'The private method `_calculate_keys` will be removed and should no longer be used.',
+        category=None,
+    )
+    def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any:
+        warnings.warn(
+            'The private method `_calculate_keys` will be removed and should no longer be used.',
+            category=PydanticDeprecatedSince20,
+        )
+        from .deprecated import copy_internals
+
+        return copy_internals._calculate_keys(self, *args, **kwargs)
+
+
+@overload
+def create_model(
+    model_name: str,
+    /,
+    *,
+    __config__: ConfigDict | None = None,
+    __doc__: str | None = None,
+    __base__: None = None,
+    __module__: str = __name__,
+    __validators__: dict[str, Callable[..., Any]] | None = None,
+    __cls_kwargs__: dict[str, Any] | None = None,
+    **field_definitions: Any,
+) -> type[BaseModel]: ...
+
+
+@overload
+def create_model(
+    model_name: str,
+    /,
+    *,
+    __config__: ConfigDict | None = None,
+    __doc__: str | None = None,
+    __base__: type[ModelT] | tuple[type[ModelT], ...],
+    __module__: str = __name__,
+    __validators__: dict[str, Callable[..., Any]] | None = None,
+    __cls_kwargs__: dict[str, Any] | None = None,
+    **field_definitions: Any,
+) -> type[ModelT]: ...
+
+
+def create_model(  # noqa: C901
+    model_name: str,
+    /,
+    *,
+    __config__: ConfigDict | None = None,
+    __doc__: str | None = None,
+    __base__: type[ModelT] | tuple[type[ModelT], ...] | None = None,
+    __module__: str | None = None,
+    __validators__: dict[str, Callable[..., Any]] | None = None,
+    __cls_kwargs__: dict[str, Any] | None = None,
+    __slots__: tuple[str, ...] | None = None,
+    **field_definitions: Any,
+) -> type[ModelT]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/models/#dynamic-model-creation

     Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a
@@ -813,7 +1497,84 @@ def create_model(model_name: str, /, *, __config__: (ConfigDict | None)=
     Raises:
         PydanticUserError: If `__base__` and `__config__` are both passed.
     """
-    pass
+    if __slots__ is not None:
+        # __slots__ will be ignored from here on
+        warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning)
+
+    if __base__ is not None:
+        if __config__ is not None:
+            raise PydanticUserError(
+                'to avoid confusion `__config__` and `__base__` cannot be used together',
+                code='create-model-config-base',
+            )
+        if not isinstance(__base__, tuple):
+            __base__ = (__base__,)
+    else:
+        __base__ = (cast('type[ModelT]', BaseModel),)
+
+    __cls_kwargs__ = __cls_kwargs__ or {}
+
+    fields = {}
+    annotations = {}
+
+    for f_name, f_def in field_definitions.items():
+        if not _fields.is_valid_field_name(f_name):
+            warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning)
+        if isinstance(f_def, tuple):
+            f_def = cast('tuple[str, Any]', f_def)
+            try:
+                f_annotation, f_value = f_def
+            except ValueError as e:
+                raise PydanticUserError(
+                    'Field definitions should be a `(<type>, <default>)`.',
+                    code='create-model-field-definitions',
+                ) from e
+
+        elif _typing_extra.is_annotated(f_def):
+            (f_annotation, f_value, *_) = typing_extensions.get_args(
+                f_def
+            )  # first two input are expected from Annotated, refer to https://docs.python.org/3/library/typing.html#typing.Annotated
+            from .fields import FieldInfo
+
+            if not isinstance(f_value, FieldInfo):
+                raise PydanticUserError(
+                    'Field definitions should be a Annotated[<type>, <FieldInfo>]',
+                    code='create-model-field-definitions',
+                )
+
+        else:
+            f_annotation, f_value = None, f_def
+
+        if f_annotation:
+            annotations[f_name] = f_annotation
+        fields[f_name] = f_value
+
+    if __module__ is None:
+        f = sys._getframe(1)
+        __module__ = f.f_globals['__name__']
+
+    namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__}
+    if __doc__:
+        namespace.update({'__doc__': __doc__})
+    if __validators__:
+        namespace.update(__validators__)
+    namespace.update(fields)
+    if __config__:
+        namespace['model_config'] = _config.ConfigWrapper(__config__).config_dict
+    resolved_bases = types.resolve_bases(__base__)
+    meta, ns, kwds = types.prepare_class(model_name, resolved_bases, kwds=__cls_kwargs__)
+    if resolved_bases is not __base__:
+        ns['__orig_bases__'] = __base__
+    namespace.update(ns)
+
+    return meta(
+        model_name,
+        resolved_bases,
+        namespace,
+        __pydantic_reset_parent_namespace__=False,
+        _create_model_module=__module__,
+        **kwds,
+    )


 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/mypy.py b/pydantic/mypy.py
index 03f2edc6a..93e29d5c6 100644
--- a/pydantic/mypy.py
+++ b/pydantic/mypy.py
@@ -1,52 +1,118 @@
 """This module includes classes and functions designed specifically for use with the mypy plugin."""
+
 from __future__ import annotations
+
 import sys
 from configparser import ConfigParser
 from typing import Any, Callable, Iterator
+
 from mypy.errorcodes import ErrorCode
 from mypy.expandtype import expand_type, expand_type_by_instance
-from mypy.nodes import ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR2, INVARIANT, MDEF, Argument, AssignmentStmt, Block, CallExpr, ClassDef, Context, Decorator, DictExpr, EllipsisExpr, Expression, FuncDef, IfStmt, JsonDict, MemberExpr, NameExpr, PassStmt, PlaceholderNode, RefExpr, Statement, StrExpr, SymbolTableNode, TempNode, TypeAlias, TypeInfo, Var
+from mypy.nodes import (
+    ARG_NAMED,
+    ARG_NAMED_OPT,
+    ARG_OPT,
+    ARG_POS,
+    ARG_STAR2,
+    INVARIANT,
+    MDEF,
+    Argument,
+    AssignmentStmt,
+    Block,
+    CallExpr,
+    ClassDef,
+    Context,
+    Decorator,
+    DictExpr,
+    EllipsisExpr,
+    Expression,
+    FuncDef,
+    IfStmt,
+    JsonDict,
+    MemberExpr,
+    NameExpr,
+    PassStmt,
+    PlaceholderNode,
+    RefExpr,
+    Statement,
+    StrExpr,
+    SymbolTableNode,
+    TempNode,
+    TypeAlias,
+    TypeInfo,
+    Var,
+)
 from mypy.options import Options
-from mypy.plugin import CheckerPluginInterface, ClassDefContext, FunctionContext, MethodContext, Plugin, ReportConfigContext, SemanticAnalyzerPluginInterface
+from mypy.plugin import (
+    CheckerPluginInterface,
+    ClassDefContext,
+    FunctionContext,
+    MethodContext,
+    Plugin,
+    ReportConfigContext,
+    SemanticAnalyzerPluginInterface,
+)
 from mypy.plugins import dataclasses
-from mypy.plugins.common import deserialize_and_fixup_type
+from mypy.plugins.common import (
+    deserialize_and_fixup_type,
+)
 from mypy.semanal import set_callable_name
 from mypy.server.trigger import make_wildcard_trigger
 from mypy.state import state
 from mypy.typeops import map_type_from_supertype
-from mypy.types import AnyType, CallableType, Instance, NoneType, Overloaded, Type, TypeOfAny, TypeType, TypeVarType, UnionType, get_proper_type
+from mypy.types import (
+    AnyType,
+    CallableType,
+    Instance,
+    NoneType,
+    Overloaded,
+    Type,
+    TypeOfAny,
+    TypeType,
+    TypeVarType,
+    UnionType,
+    get_proper_type,
+)
 from mypy.typevars import fill_typevars
 from mypy.util import get_unique_redefinition_name
 from mypy.version import __version__ as mypy_version
+
 from pydantic._internal import _fields
 from pydantic.version import parse_mypy_version
+
 try:
-    from mypy.types import TypeVarDef
-except ImportError:
+    from mypy.types import TypeVarDef  # type: ignore[attr-defined]
+except ImportError:  # pragma: no cover
+    # Backward-compatible with TypeVarDef from Mypy 0.930.
     from mypy.types import TypeVarType as TypeVarDef
+
 CONFIGFILE_KEY = 'pydantic-mypy'
 METADATA_KEY = 'pydantic-mypy-metadata'
 BASEMODEL_FULLNAME = 'pydantic.main.BaseModel'
 BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings'
 ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel'
-MODEL_METACLASS_FULLNAME = (
-    'pydantic._internal._model_construction.ModelMetaclass')
+MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass'
 FIELD_FULLNAME = 'pydantic.fields.Field'
 DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass'
 MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator'
-DECORATOR_FULLNAMES = {'pydantic.functional_validators.field_validator',
+DECORATOR_FULLNAMES = {
+    'pydantic.functional_validators.field_validator',
     'pydantic.functional_validators.model_validator',
     'pydantic.functional_serializers.serializer',
     'pydantic.functional_serializers.model_serializer',
     'pydantic.deprecated.class_validators.validator',
-    'pydantic.deprecated.class_validators.root_validator'}
+    'pydantic.deprecated.class_validators.root_validator',
+}
+
+
 MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version)
-BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930
-    ) else '__builtins__'
+BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__'
+
+# Increment version if plugin changes and mypy caches should be invalidated
 __version__ = 2


-def plugin(version: str) ->type[Plugin]:
+def plugin(version: str) -> type[Plugin]:
     """`version` is the mypy version string.

     We might want to use this to print a warning if the mypy version being used is
@@ -58,62 +124,79 @@ def plugin(version: str) ->type[Plugin]:
     Return:
         The Pydantic mypy plugin type.
     """
-    pass
+    return PydanticPlugin


 class PydanticPlugin(Plugin):
     """The Pydantic mypy plugin."""

-    def __init__(self, options: Options) ->None:
+    def __init__(self, options: Options) -> None:
         self.plugin_config = PydanticPluginConfig(options)
         self._plugin_data = self.plugin_config.to_data()
         super().__init__(options)

-    def get_base_class_hook(self, fullname: str) ->(Callable[[
-        ClassDefContext], bool] | None):
+    def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None:
         """Update Pydantic model class."""
-        pass
-
-    def get_metaclass_hook(self, fullname: str) ->(Callable[[
-        ClassDefContext], None] | None):
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and isinstance(sym.node, TypeInfo):  # pragma: no branch
+            # No branching may occur if the mypy cache has not been cleared
+            if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro):
+                return self._pydantic_model_class_maker_callback
+        return None
+
+    def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
         """Update Pydantic `ModelMetaclass` definition."""
-        pass
+        if fullname == MODEL_METACLASS_FULLNAME:
+            return self._pydantic_model_metaclass_marker_callback
+        return None

-    def get_function_hook(self, fullname: str) ->(Callable[[FunctionContext
-        ], Type] | None):
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
         """Adjust the return type of the `Field` function."""
-        pass
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and sym.fullname == FIELD_FULLNAME:
+            return self._pydantic_field_callback
+        return None

-    def get_method_hook(self, fullname: str) ->(Callable[[MethodContext],
-        Type] | None):
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
         """Adjust return type of `from_orm` method call."""
-        pass
+        if fullname.endswith('.from_orm'):
+            return from_attributes_callback
+        return None

-    def get_class_decorator_hook(self, fullname: str) ->(Callable[[
-        ClassDefContext], None] | None):
+    def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
         """Mark pydantic.dataclasses as dataclass.

         Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
         """
-        pass
+        if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1):
+            return dataclasses.dataclass_class_maker_callback  # type: ignore[return-value]
+        return None

-    def report_config_data(self, ctx: ReportConfigContext) ->dict[str, Any]:
+    def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]:
         """Return all plugin config data.

         Used by mypy to determine if cache needs to be discarded.
         """
-        pass
+        return self._plugin_data
+
+    def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> bool:
+        transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config)
+        return transformer.transform()

-    def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext
-        ) ->None:
+    def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None:
         """Reset dataclass_transform_spec attribute of ModelMetaclass.

         Let the plugin handle it. This behavior can be disabled
         if 'debug_dataclass_transform' is set to True', for testing purposes.
         """
-        pass
+        if self.plugin_config.debug_dataclass_transform:
+            return
+        info_metaclass = ctx.cls.info.declared_metaclass
+        assert info_metaclass, "callback not passed from 'get_metaclass_hook'"
+        if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
+            info_metaclass.type.dataclass_transform_spec = None

-    def _pydantic_field_callback(self, ctx: FunctionContext) ->Type:
+    def _pydantic_field_callback(self, ctx: FunctionContext) -> Type:
         """Extract the type of the `default` argument from the Field function, and use it as the return type.

         In particular:
@@ -121,7 +204,45 @@ class PydanticPlugin(Plugin):
         * Output an error if both are specified.
         * Retrieve the type of the argument which is specified, and use it as return type for the function.
         """
-        pass
+        default_any_type = ctx.default_return_type
+
+        assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()'
+        assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()'
+        default_args = ctx.args[0]
+        default_factory_args = ctx.args[1]
+
+        if default_args and default_factory_args:
+            error_default_and_default_factory_specified(ctx.api, ctx.context)
+            return default_any_type
+
+        if default_args:
+            default_type = ctx.arg_types[0][0]
+            default_arg = default_args[0]
+
+            # Fallback to default Any type if the field is required
+            if not isinstance(default_arg, EllipsisExpr):
+                return default_type
+
+        elif default_factory_args:
+            default_factory_type = ctx.arg_types[1][0]
+
+            # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter
+            # Pydantic calls the default factory without any argument, so we retrieve the first item
+            if isinstance(default_factory_type, Overloaded):
+                default_factory_type = default_factory_type.items[0]
+
+            if isinstance(default_factory_type, CallableType):
+                ret_type = default_factory_type.ret_type
+                # mypy doesn't think `ret_type` has `args`, you'd think mypy should know,
+                # add this check in case it varies by version
+                args = getattr(ret_type, 'args', None)
+                if args:
+                    if all(isinstance(arg, TypeVarType) for arg in args):
+                        # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any`
+                        ret_type.args = tuple(default_any_type for _ in args)  # type: ignore[attr-defined]
+                return ret_type
+
+        return default_any_type


 class PydanticPluginConfig:
@@ -134,50 +255,79 @@ class PydanticPluginConfig:
         debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute
             of `ModelMetaclass` for testing purposes.
     """
-    __slots__ = ('init_forbid_extra', 'init_typed',
-        'warn_required_dynamic_aliases', 'debug_dataclass_transform')
+
+    __slots__ = (
+        'init_forbid_extra',
+        'init_typed',
+        'warn_required_dynamic_aliases',
+        'debug_dataclass_transform',
+    )
     init_forbid_extra: bool
     init_typed: bool
     warn_required_dynamic_aliases: bool
-    debug_dataclass_transform: bool
+    debug_dataclass_transform: bool  # undocumented

-    def __init__(self, options: Options) ->None:
-        if options.config_file is None:
+    def __init__(self, options: Options) -> None:
+        if options.config_file is None:  # pragma: no cover
             return
+
         toml_config = parse_toml(options.config_file)
         if toml_config is not None:
             config = toml_config.get('tool', {}).get('pydantic-mypy', {})
             for key in self.__slots__:
                 setting = config.get(key, False)
                 if not isinstance(setting, bool):
-                    raise ValueError(
-                        f'Configuration value must be a boolean for key: {key}'
-                        )
+                    raise ValueError(f'Configuration value must be a boolean for key: {key}')
                 setattr(self, key, setting)
         else:
             plugin_config = ConfigParser()
             plugin_config.read(options.config_file)
             for key in self.__slots__:
-                setting = plugin_config.getboolean(CONFIGFILE_KEY, key,
-                    fallback=False)
+                setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False)
                 setattr(self, key, setting)

-    def to_data(self) ->dict[str, Any]:
+    def to_data(self) -> dict[str, Any]:
         """Returns a dict of config names to their values."""
-        pass
+        return {key: getattr(self, key) for key in self.__slots__}


-def from_attributes_callback(ctx: MethodContext) ->Type:
+def from_attributes_callback(ctx: MethodContext) -> Type:
     """Raise an error if from_attributes is not enabled."""
-    pass
+    model_type: Instance
+    ctx_type = ctx.type
+    if isinstance(ctx_type, TypeType):
+        ctx_type = ctx_type.item
+    if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance):
+        model_type = ctx_type.ret_type  # called on the class
+    elif isinstance(ctx_type, Instance):
+        model_type = ctx_type  # called on an instance (unusual, but still valid)
+    else:  # pragma: no cover
+        detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
+        error_unexpected_behavior(detail, ctx.api, ctx.context)
+        return ctx.default_return_type
+    pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
+    if pydantic_metadata is None:
+        return ctx.default_return_type
+    from_attributes = pydantic_metadata.get('config', {}).get('from_attributes')
+    if from_attributes is not True:
+        error_from_attributes(model_type.type.name, ctx.api, ctx.context)
+    return ctx.default_return_type


 class PydanticModelField:
     """Based on mypy.plugins.dataclasses.DataclassAttribute."""

-    def __init__(self, name: str, alias: (str | None), has_dynamic_alias:
-        bool, has_default: bool, line: int, column: int, type: (Type | None
-        ), info: TypeInfo):
+    def __init__(
+        self,
+        name: str,
+        alias: str | None,
+        has_dynamic_alias: bool,
+        has_default: bool,
+        line: int,
+        column: int,
+        type: Type | None,
+        info: TypeInfo,
+    ):
         self.name = name
         self.alias = alias
         self.has_dynamic_alias = has_dynamic_alias
@@ -187,41 +337,96 @@ class PydanticModelField:
         self.type = type
         self.info = info

-    def to_argument(self, current_info: TypeInfo, typed: bool,
-        force_optional: bool, use_alias: bool, api:
-        SemanticAnalyzerPluginInterface, force_typevars_invariant: bool
-        ) ->Argument:
+    def to_argument(
+        self,
+        current_info: TypeInfo,
+        typed: bool,
+        force_optional: bool,
+        use_alias: bool,
+        api: SemanticAnalyzerPluginInterface,
+        force_typevars_invariant: bool,
+    ) -> Argument:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
-        pass
-
-    def expand_type(self, current_info: TypeInfo, api:
-        SemanticAnalyzerPluginInterface, force_typevars_invariant: bool=False
-        ) ->(Type | None):
+        variable = self.to_var(current_info, api, use_alias, force_typevars_invariant)
+        type_annotation = self.expand_type(current_info, api) if typed else AnyType(TypeOfAny.explicit)
+        return Argument(
+            variable=variable,
+            type_annotation=type_annotation,
+            initializer=None,
+            kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED,
+        )
+
+    def expand_type(
+        self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface, force_typevars_invariant: bool = False
+    ) -> Type | None:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
-        pass
-
-    def to_var(self, current_info: TypeInfo, api:
-        SemanticAnalyzerPluginInterface, use_alias: bool,
-        force_typevars_invariant: bool=False) ->Var:
+        # The getattr in the next line is used to prevent errors in legacy versions of mypy without this attribute
+        if force_typevars_invariant:
+            # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter"
+            # To prevent that, we add an option to replace typevars with invariant ones while building certain
+            # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes
+            # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue.
+            if isinstance(self.type, TypeVarType):
+                modified_type = self.type.copy_modified()
+                modified_type.variance = INVARIANT
+                self.type = modified_type
+
+        if self.type is not None and getattr(self.info, 'self_type', None) is not None:
+            # In general, it is not safe to call `expand_type()` during semantic analyzis,
+            # however this plugin is called very late, so all types should be fully ready.
+            # Also, it is tricky to avoid eager expansion of Self types here (e.g. because
+            # we serialize attributes).
+            with state.strict_optional_set(api.options.strict_optional):
+                filled_with_typevars = fill_typevars(current_info)
+                if force_typevars_invariant:
+                    for arg in filled_with_typevars.args:
+                        if isinstance(arg, TypeVarType):
+                            arg.variance = INVARIANT
+                return expand_type(self.type, {self.info.self_type.id: filled_with_typevars})
+        return self.type
+
+    def to_var(
+        self,
+        current_info: TypeInfo,
+        api: SemanticAnalyzerPluginInterface,
+        use_alias: bool,
+        force_typevars_invariant: bool = False,
+    ) -> Var:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
-        pass
+        if use_alias and self.alias is not None:
+            name = self.alias
+        else:
+            name = self.name

-    def serialize(self) ->JsonDict:
+        return Var(name, self.expand_type(current_info, api, force_typevars_invariant))
+
+    def serialize(self) -> JsonDict:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
-        pass
+        assert self.type
+        return {
+            'name': self.name,
+            'alias': self.alias,
+            'has_dynamic_alias': self.has_dynamic_alias,
+            'has_default': self.has_default,
+            'line': self.line,
+            'column': self.column,
+            'type': self.type.serialize(),
+        }

     @classmethod
-    def deserialize(cls, info: TypeInfo, data: JsonDict, api:
-        SemanticAnalyzerPluginInterface) ->PydanticModelField:
+    def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
-        pass
+        data = data.copy()
+        typ = deserialize_and_fixup_type(data.pop('type'), api)
+        return cls(type=typ, info=info, **data)

-    def expand_typevar_from_subtype(self, sub_type: TypeInfo, api:
-        SemanticAnalyzerPluginInterface) ->None:
+    def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None:
         """Expands type vars in the context of a subtype when an attribute is inherited
         from a generic super type.
         """
-        pass
+        if self.type is not None:
+            with state.strict_optional_set(api.options.strict_optional):
+                self.type = map_type_from_supertype(self.type, sub_type, self.info)


 class PydanticModelClassVar:
@@ -237,13 +442,16 @@ class PydanticModelClassVar:
         self.name = name

     @classmethod
-    def deserialize(cls, data: JsonDict) ->PydanticModelClassVar:
+    def deserialize(cls, data: JsonDict) -> PydanticModelClassVar:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
-        pass
+        data = data.copy()
+        return cls(**data)

-    def serialize(self) ->JsonDict:
+    def serialize(self) -> JsonDict:
         """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
-        pass
+        return {
+            'name': self.name,
+        }


 class PydanticModelTransformer:
@@ -252,18 +460,29 @@ class PydanticModelTransformer:
     Attributes:
         tracked_config_fields: A set of field configs that the plugin has to track their value.
     """
-    tracked_config_fields: set[str] = {'extra', 'frozen', 'from_attributes',
-        'populate_by_name', 'alias_generator'}

-    def __init__(self, cls: ClassDef, reason: (Expression | Statement), api:
-        SemanticAnalyzerPluginInterface, plugin_config: PydanticPluginConfig
-        ) ->None:
+    tracked_config_fields: set[str] = {
+        'extra',
+        'frozen',
+        'from_attributes',
+        'populate_by_name',
+        'alias_generator',
+    }
+
+    def __init__(
+        self,
+        cls: ClassDef,
+        reason: Expression | Statement,
+        api: SemanticAnalyzerPluginInterface,
+        plugin_config: PydanticPluginConfig,
+    ) -> None:
         self._cls = cls
         self._reason = reason
         self._api = api
+
         self.plugin_config = plugin_config

-    def transform(self) ->bool:
+    def transform(self) -> bool:
         """Configures the BaseModel subclass according to the plugin settings.

         In particular:
@@ -273,9 +492,34 @@ class PydanticModelTransformer:
         * freezes the class if frozen = True
         * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
         """
-        pass
-
-    def adjust_decorator_signatures(self) ->None:
+        info = self._cls.info
+        is_root_model = any(ROOT_MODEL_FULLNAME in base.fullname for base in info.mro[:-1])
+        config = self.collect_config()
+        fields, class_vars = self.collect_fields_and_class_vars(config, is_root_model)
+        if fields is None or class_vars is None:
+            # Some definitions are not ready. We need another pass.
+            return False
+        for field in fields:
+            if field.type is None:
+                return False
+
+        is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1])
+        self.add_initializer(fields, config, is_settings, is_root_model)
+        if not is_root_model:
+            self.add_model_construct_method(fields, config, is_settings)
+        self.set_frozen(fields, self._api, frozen=config.frozen is True)
+
+        self.adjust_decorator_signatures()
+
+        info.metadata[METADATA_KEY] = {
+            'fields': {field.name: field.serialize() for field in fields},
+            'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars},
+            'config': config.get_values_dict(),
+        }
+
+        return True
+
+    def adjust_decorator_signatures(self) -> None:
         """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator`
         or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance,
         even though pydantic internally wraps `f` with `classmethod` if necessary.
@@ -283,22 +527,183 @@ class PydanticModelTransformer:
         Teach mypy this by marking any function whose outermost decorator is a `validator()`,
         `field_validator()` or `serializer()` call as a `classmethod`.
         """
-        pass
+        for name, sym in self._cls.info.names.items():
+            if isinstance(sym.node, Decorator):
+                first_dec = sym.node.original_decorators[0]
+                if (
+                    isinstance(first_dec, CallExpr)
+                    and isinstance(first_dec.callee, NameExpr)
+                    and first_dec.callee.fullname in DECORATOR_FULLNAMES
+                    # @model_validator(mode="after") is an exception, it expects a regular method
+                    and not (
+                        first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
+                        and any(
+                            first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after'
+                            for i, arg in enumerate(first_dec.args)
+                        )
+                    )
+                ):
+                    # TODO: Only do this if the first argument of the decorated function is `cls`
+                    sym.node.func.is_class = True

-    def collect_config(self) ->ModelConfigData:
+    def collect_config(self) -> ModelConfigData:  # noqa: C901 (ignore complexity)
         """Collects the values of the config attributes that are used by the plugin, accounting for parent classes."""
-        pass
-
-    def collect_fields_and_class_vars(self, model_config: ModelConfigData,
-        is_root_model: bool) ->tuple[list[PydanticModelField] | None, list[
-        PydanticModelClassVar] | None]:
+        cls = self._cls
+        config = ModelConfigData()
+
+        has_config_kwargs = False
+        has_config_from_namespace = False
+
+        # Handle `class MyModel(BaseModel, <name>=<expr>, ...):`
+        for name, expr in cls.keywords.items():
+            config_data = self.get_config_update(name, expr)
+            if config_data:
+                has_config_kwargs = True
+                config.update(config_data)
+
+        # Handle `model_config`
+        stmt: Statement | None = None
+        for stmt in cls.defs.body:
+            if not isinstance(stmt, (AssignmentStmt, ClassDef)):
+                continue
+
+            if isinstance(stmt, AssignmentStmt):
+                lhs = stmt.lvalues[0]
+                if not isinstance(lhs, NameExpr) or lhs.name != 'model_config':
+                    continue
+
+                if isinstance(stmt.rvalue, CallExpr):  # calls to `dict` or `ConfigDict`
+                    for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args):
+                        if arg_name is None:
+                            continue
+                        config.update(self.get_config_update(arg_name, arg, lax_extra=True))
+                elif isinstance(stmt.rvalue, DictExpr):  # dict literals
+                    for key_expr, value_expr in stmt.rvalue.items:
+                        if not isinstance(key_expr, StrExpr):
+                            continue
+                        config.update(self.get_config_update(key_expr.value, value_expr))
+
+            elif isinstance(stmt, ClassDef):
+                if stmt.name != 'Config':  # 'deprecated' Config-class
+                    continue
+                for substmt in stmt.defs.body:
+                    if not isinstance(substmt, AssignmentStmt):
+                        continue
+                    lhs = substmt.lvalues[0]
+                    if not isinstance(lhs, NameExpr):
+                        continue
+                    config.update(self.get_config_update(lhs.name, substmt.rvalue))
+
+            if has_config_kwargs:
+                self._api.fail(
+                    'Specifying config in two places is ambiguous, use either Config attribute or class kwargs',
+                    cls,
+                )
+                break
+
+            has_config_from_namespace = True
+
+        if has_config_kwargs or has_config_from_namespace:
+            if (
+                stmt
+                and config.has_alias_generator
+                and not config.populate_by_name
+                and self.plugin_config.warn_required_dynamic_aliases
+            ):
+                error_required_dynamic_aliases(self._api, stmt)
+
+        for info in cls.info.mro[1:]:  # 0 is the current class
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            # Each class depends on the set of fields in its ancestors
+            self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname))
+            for name, value in info.metadata[METADATA_KEY]['config'].items():
+                config.setdefault(name, value)
+        return config
+
+    def collect_fields_and_class_vars(
+        self, model_config: ModelConfigData, is_root_model: bool
+    ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]:
         """Collects the fields for the model, accounting for parent classes."""
-        pass
-
-    def collect_field_or_class_var_from_stmt(self, stmt: AssignmentStmt,
-        model_config: ModelConfigData, class_vars: dict[str,
-        PydanticModelClassVar]) ->(PydanticModelField |
-        PydanticModelClassVar | None):
+        cls = self._cls
+
+        # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates.
+        #
+        # We iterate through the MRO in reverse because attrs defined in the parent must appear
+        # earlier in the attributes list than attrs defined in the child. See:
+        # https://docs.python.org/3/library/dataclasses.html#inheritance
+        #
+        # However, we also want fields defined in the subtype to override ones defined
+        # in the parent. We can implement this via a dict without disrupting the attr order
+        # because dicts preserve insertion order in Python 3.7+.
+        found_fields: dict[str, PydanticModelField] = {}
+        found_class_vars: dict[str, PydanticModelClassVar] = {}
+        for info in reversed(cls.info.mro[1:-1]):  # 0 is the current class, -2 is BaseModel, -1 is object
+            # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
+            #     # We haven't processed the base class yet. Need another pass.
+            #     return None, None
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            # Each class depends on the set of attributes in its dataclass ancestors.
+            self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname))
+
+            for name, data in info.metadata[METADATA_KEY]['fields'].items():
+                field = PydanticModelField.deserialize(info, data, self._api)
+                # (The following comment comes directly from the dataclasses plugin)
+                # TODO: We shouldn't be performing type operations during the main
+                #       semantic analysis pass, since some TypeInfo attributes might
+                #       still be in flux. This should be performed in a later phase.
+                field.expand_typevar_from_subtype(cls.info, self._api)
+                found_fields[name] = field
+
+                sym_node = cls.info.names.get(name)
+                if sym_node and sym_node.node and not isinstance(sym_node.node, Var):
+                    self._api.fail(
+                        'BaseModel field may only be overridden by another field',
+                        sym_node.node,
+                    )
+            # Collect ClassVars
+            for name, data in info.metadata[METADATA_KEY]['class_vars'].items():
+                found_class_vars[name] = PydanticModelClassVar.deserialize(data)
+
+        # Second, collect fields and ClassVars belonging to the current class.
+        current_field_names: set[str] = set()
+        current_class_vars_names: set[str] = set()
+        for stmt in self._get_assignment_statements_from_block(cls.defs):
+            maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars)
+            if isinstance(maybe_field, PydanticModelField):
+                lhs = stmt.lvalues[0]
+                if is_root_model and lhs.name != 'root':
+                    error_extra_fields_on_root_model(self._api, stmt)
+                else:
+                    current_field_names.add(lhs.name)
+                    found_fields[lhs.name] = maybe_field
+            elif isinstance(maybe_field, PydanticModelClassVar):
+                lhs = stmt.lvalues[0]
+                current_class_vars_names.add(lhs.name)
+                found_class_vars[lhs.name] = maybe_field
+
+        return list(found_fields.values()), list(found_class_vars.values())
+
+    def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]:
+        for body in stmt.body:
+            if not body.is_unreachable:
+                yield from self._get_assignment_statements_from_block(body)
+        if stmt.else_body is not None and not stmt.else_body.is_unreachable:
+            yield from self._get_assignment_statements_from_block(stmt.else_body)
+
+    def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]:
+        for stmt in block.body:
+            if isinstance(stmt, AssignmentStmt):
+                yield stmt
+            elif isinstance(stmt, IfStmt):
+                yield from self._get_assignment_statements_from_if_statement(stmt)
+
+    def collect_field_or_class_var_from_stmt(  # noqa C901
+        self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar]
+    ) -> PydanticModelField | PydanticModelClassVar | None:
         """Get pydantic model field from statement.

         Args:
@@ -309,189 +714,600 @@ class PydanticModelTransformer:
         Returns:
             A pydantic model field if it could find the field in statement. Otherwise, `None`.
         """
-        pass
-
-    def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name:
-        str, context: Context) ->(Type | None):
+        cls = self._cls
+
+        lhs = stmt.lvalues[0]
+        if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config':
+            return None
+
+        if not stmt.new_syntax:
+            if (
+                isinstance(stmt.rvalue, CallExpr)
+                and isinstance(stmt.rvalue.callee, CallExpr)
+                and isinstance(stmt.rvalue.callee.callee, NameExpr)
+                and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES
+            ):
+                # This is a (possibly-reused) validator or serializer, not a field
+                # In particular, it looks something like: my_validator = validator('my_field')(f)
+                # Eventually, we may want to attempt to respect model_config['ignored_types']
+                return None
+
+            if lhs.name in class_vars:
+                # Class vars are not fields and are not required to be annotated
+                return None
+
+            # The assignment does not have an annotation, and it's not anything else we recognize
+            error_untyped_fields(self._api, stmt)
+            return None
+
+        lhs = stmt.lvalues[0]
+        if not isinstance(lhs, NameExpr):
+            return None
+
+        if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config':
+            return None
+
+        sym = cls.info.names.get(lhs.name)
+        if sym is None:  # pragma: no cover
+            # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
+            # This is the same logic used in the dataclasses plugin
+            return None
+
+        node = sym.node
+        if isinstance(node, PlaceholderNode):  # pragma: no cover
+            # See the PlaceholderNode docstring for more detail about how this can occur
+            # Basically, it is an edge case when dealing with complex import logic
+
+            # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
+            return None
+
+        if isinstance(node, TypeAlias):
+            self._api.fail(
+                'Type aliases inside BaseModel definitions are not supported at runtime',
+                node,
+            )
+            # Skip processing this node. This doesn't match the runtime behaviour,
+            # but the only alternative would be to modify the SymbolTable,
+            # and it's a little hairy to do that in a plugin.
+            return None
+
+        if not isinstance(node, Var):  # pragma: no cover
+            # Don't know if this edge case still happens with the `is_valid_field` check above
+            # but better safe than sorry
+
+            # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
+            return None
+
+        # x: ClassVar[int] is not a field
+        if node.is_classvar:
+            return PydanticModelClassVar(lhs.name)
+
+        # x: InitVar[int] is not supported in BaseModel
+        node_type = get_proper_type(node.type)
+        if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar':
+            self._api.fail(
+                'InitVar is not supported in BaseModel',
+                node,
+            )
+
+        has_default = self.get_has_default(stmt)
+
+        if sym.type is None and node.is_final and node.is_inferred:
+            # This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
+            #
+            # This is a special case, assignment like x: Final = 42 is classified
+            # annotated above, but mypy strips the `Final` turning it into x = 42.
+            # We do not support inferred types in dataclasses, so we can try inferring
+            # type for simple literals, and otherwise require an explicit type
+            # argument for Final[...].
+            typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True)
+            if typ:
+                node.type = typ
+            else:
+                self._api.fail(
+                    'Need type argument for Final[...] with non-literal default in BaseModel',
+                    stmt,
+                )
+                node.type = AnyType(TypeOfAny.from_error)
+
+        alias, has_dynamic_alias = self.get_alias_info(stmt)
+        if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases:
+            error_required_dynamic_aliases(self._api, stmt)
+
+        init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt)
+        return PydanticModelField(
+            name=lhs.name,
+            has_dynamic_alias=has_dynamic_alias,
+            has_default=has_default,
+            alias=alias,
+            line=stmt.line,
+            column=stmt.column,
+            type=init_type,
+            info=cls.info,
+        )
+
+    def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None:
         """Infer __init__ argument type for an attribute.

         In particular, possibly use the signature of __set__.
         """
-        pass
-
-    def add_initializer(self, fields: list[PydanticModelField], config:
-        ModelConfigData, is_settings: bool, is_root_model: bool) ->None:
+        default = sym.type
+        if sym.implicit:
+            return default
+        t = get_proper_type(sym.type)
+
+        # Perform a simple-minded inference from the signature of __set__, if present.
+        # We can't use mypy.checkmember here, since this plugin runs before type checking.
+        # We only support some basic scanerios here, which is hopefully sufficient for
+        # the vast majority of use cases.
+        if not isinstance(t, Instance):
+            return default
+        setter = t.type.get('__set__')
+        if setter:
+            if isinstance(setter.node, FuncDef):
+                super_info = t.type.get_containing_type_info('__set__')
+                assert super_info
+                if setter.type:
+                    setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info))
+                else:
+                    return AnyType(TypeOfAny.unannotated)
+                if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [
+                    ARG_POS,
+                    ARG_POS,
+                    ARG_POS,
+                ]:
+                    return expand_type_by_instance(setter_type.arg_types[2], t)
+                else:
+                    self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context)
+            else:
+                self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context)
+
+        return default
+
+    def add_initializer(
+        self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool
+    ) -> None:
         """Adds a fields-aware `__init__` method to the class.

         The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
         """
-        pass
-
-    def add_model_construct_method(self, fields: list[PydanticModelField],
-        config: ModelConfigData, is_settings: bool) ->None:
+        if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated:
+            return  # Don't generate an __init__ if one already exists
+
+        typed = self.plugin_config.init_typed
+        use_alias = config.populate_by_name is not True
+        requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name)
+        args = self.get_field_arguments(
+            fields,
+            typed=typed,
+            requires_dynamic_aliases=requires_dynamic_aliases,
+            use_alias=use_alias,
+            is_settings=is_settings,
+            force_typevars_invariant=True,
+        )
+
+        if is_root_model and MYPY_VERSION_TUPLE <= (1, 0, 1):
+            # convert root argument to positional argument
+            # This is needed because mypy support for `dataclass_transform` isn't complete on 1.0.1
+            args[0].kind = ARG_POS if args[0].kind == ARG_NAMED else ARG_OPT
+
+        if is_settings:
+            base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node
+            if '__init__' in base_settings_node.names:
+                base_settings_init_node = base_settings_node.names['__init__'].node
+                if base_settings_init_node is not None and base_settings_init_node.type is not None:
+                    func_type = base_settings_init_node.type
+                    for arg_idx, arg_name in enumerate(func_type.arg_names):
+                        if arg_name.startswith('__') or not arg_name.startswith('_'):
+                            continue
+                        analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx])
+                        variable = Var(arg_name, analyzed_variable_type)
+                        args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT))
+
+        if not self.should_init_forbid_extra(fields, config):
+            var = Var('kwargs')
+            args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
+
+        add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType())
+
+    def add_model_construct_method(
+        self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool
+    ) -> None:
         """Adds a fully typed `model_construct` classmethod to the class.

         Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
         and does not treat settings fields as optional.
         """
-        pass
-
-    def set_frozen(self, fields: list[PydanticModelField], api:
-        SemanticAnalyzerPluginInterface, frozen: bool) ->None:
+        set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')])
+        optional_set_str = UnionType([set_str, NoneType()])
+        fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
+        with state.strict_optional_set(self._api.options.strict_optional):
+            args = self.get_field_arguments(
+                fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings
+            )
+        if not self.should_init_forbid_extra(fields, config):
+            var = Var('kwargs')
+            args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
+
+        args = [fields_set_argument] + args
+
+        add_method(
+            self._api,
+            self._cls,
+            'model_construct',
+            args=args,
+            return_type=fill_typevars(self._cls.info),
+            is_classmethod=True,
+        )
+
+    def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None:
         """Marks all fields as properties so that attempts to set them trigger mypy errors.

         This is the same approach used by the attrs and dataclasses plugins.
         """
-        pass
-
-    def get_config_update(self, name: str, arg: Expression, lax_extra: bool
-        =False) ->(ModelConfigData | None):
+        info = self._cls.info
+        for field in fields:
+            sym_node = info.names.get(field.name)
+            if sym_node is not None:
+                var = sym_node.node
+                if isinstance(var, Var):
+                    var.is_property = frozen
+                elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
+                    # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
+                    self._api.defer()
+                else:  # pragma: no cover
+                    # I don't know whether it's possible to hit this branch, but I've added it for safety
+                    try:
+                        var_str = str(var)
+                    except TypeError:
+                        # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
+                        var_str = repr(var)
+                    detail = f'sym_node.node: {var_str} (of type {var.__class__})'
+                    error_unexpected_behavior(detail, self._api, self._cls)
+            else:
+                var = field.to_var(info, api, use_alias=False)
+                var.info = info
+                var.is_property = frozen
+                var._fullname = info.fullname + '.' + var.name
+                info.names[var.name] = SymbolTableNode(MDEF, var)
+
+    def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None:
         """Determines the config update due to a single kwarg in the ConfigDict definition.

         Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
         """
-        pass
+        if name not in self.tracked_config_fields:
+            return None
+        if name == 'extra':
+            if isinstance(arg, StrExpr):
+                forbid_extra = arg.value == 'forbid'
+            elif isinstance(arg, MemberExpr):
+                forbid_extra = arg.name == 'forbid'
+            else:
+                if not lax_extra:
+                    # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when
+                    # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error
+                    # because you'll get type checking from the ConfigDict itself.
+                    #
+                    # It would be nice if we could introspect the types better otherwise, but I don't know what the API
+                    # is to evaluate an expr into its type and then check if that type is compatible with the expected
+                    # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just
+                    # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden.
+                    error_invalid_config_value(name, self._api, arg)
+                return None
+            return ModelConfigData(forbid_extra=forbid_extra)
+        if name == 'alias_generator':
+            has_alias_generator = True
+            if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None':
+                has_alias_generator = False
+            return ModelConfigData(has_alias_generator=has_alias_generator)
+        if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'):
+            return ModelConfigData(**{name: arg.fullname == 'builtins.True'})
+        error_invalid_config_value(name, self._api, arg)
+        return None

     @staticmethod
-    def get_has_default(stmt: AssignmentStmt) ->bool:
+    def get_has_default(stmt: AssignmentStmt) -> bool:
         """Returns a boolean indicating whether the field defined in `stmt` is a required field."""
-        pass
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only, so has no default
+            return False
+        if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
+            # The "default value" is a call to `Field`; at this point, the field has a default if and only if:
+            # * there is a positional argument that is not `...`
+            # * there is a keyword argument named "default" that is not `...`
+            # * there is a "default_factory" that is not `None`
+            for arg, name in zip(expr.args, expr.arg_names):
+                # If name is None, then this arg is the default because it is the only positional argument.
+                if name is None or name == 'default':
+                    return arg.__class__ is not EllipsisExpr
+                if name == 'default_factory':
+                    return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None')
+            return False
+        # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
+        return not isinstance(expr, EllipsisExpr)

     @staticmethod
-    def get_alias_info(stmt: AssignmentStmt) ->tuple[str | None, bool]:
+    def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]:
         """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.

         `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
         If `has_dynamic_alias` is True, `alias` will be None.
         """
-        pass
-
-    def get_field_arguments(self, fields: list[PydanticModelField], typed:
-        bool, use_alias: bool, requires_dynamic_aliases: bool, is_settings:
-        bool, force_typevars_invariant: bool=False) ->list[Argument]:
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only
+            return None, False
+
+        if not (
+            isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
+        ):
+            # Assigned value is not a call to pydantic.fields.Field
+            return None, False
+
+        for i, arg_name in enumerate(expr.arg_names):
+            if arg_name != 'alias':
+                continue
+            arg = expr.args[i]
+            if isinstance(arg, StrExpr):
+                return arg.value, False
+            else:
+                return None, True
+        return None, False
+
+    def get_field_arguments(
+        self,
+        fields: list[PydanticModelField],
+        typed: bool,
+        use_alias: bool,
+        requires_dynamic_aliases: bool,
+        is_settings: bool,
+        force_typevars_invariant: bool = False,
+    ) -> list[Argument]:
         """Helper function used during the construction of the `__init__` and `model_construct` method signatures.

         Returns a list of mypy Argument instances for use in the generated signatures.
         """
-        pass
-
-    def should_init_forbid_extra(self, fields: list[PydanticModelField],
-        config: ModelConfigData) ->bool:
+        info = self._cls.info
+        arguments = [
+            field.to_argument(
+                info,
+                typed=typed,
+                force_optional=requires_dynamic_aliases or is_settings,
+                use_alias=use_alias,
+                api=self._api,
+                force_typevars_invariant=force_typevars_invariant,
+            )
+            for field in fields
+            if not (use_alias and field.has_dynamic_alias)
+        ]
+        return arguments
+
+    def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool:
         """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature.

         We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
         *unless* a required dynamic alias is present (since then we can't determine a valid signature).
         """
-        pass
+        if not config.populate_by_name:
+            if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
+                return False
+        if config.forbid_extra:
+            return True
+        return self.plugin_config.init_forbid_extra

     @staticmethod
-    def is_dynamic_alias_present(fields: list[PydanticModelField],
-        has_alias_generator: bool) ->bool:
+    def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool:
         """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
         determined during static analysis.
         """
-        pass
+        for field in fields:
+            if field.has_dynamic_alias:
+                return True
+        if has_alias_generator:
+            for field in fields:
+                if field.alias is None:
+                    return True
+        return False


 class ModelConfigData:
     """Pydantic mypy plugin model config class."""

-    def __init__(self, forbid_extra: (bool | None)=None, frozen: (bool |
-        None)=None, from_attributes: (bool | None)=None, populate_by_name:
-        (bool | None)=None, has_alias_generator: (bool | None)=None):
+    def __init__(
+        self,
+        forbid_extra: bool | None = None,
+        frozen: bool | None = None,
+        from_attributes: bool | None = None,
+        populate_by_name: bool | None = None,
+        has_alias_generator: bool | None = None,
+    ):
         self.forbid_extra = forbid_extra
         self.frozen = frozen
         self.from_attributes = from_attributes
         self.populate_by_name = populate_by_name
         self.has_alias_generator = has_alias_generator

-    def get_values_dict(self) ->dict[str, Any]:
+    def get_values_dict(self) -> dict[str, Any]:
         """Returns a dict of Pydantic model config names to their values.

         It includes the config if config value is not `None`.
         """
-        pass
+        return {k: v for k, v in self.__dict__.items() if v is not None}

-    def update(self, config: (ModelConfigData | None)) ->None:
+    def update(self, config: ModelConfigData | None) -> None:
         """Update Pydantic model config values."""
-        pass
+        if config is None:
+            return
+        for k, v in config.get_values_dict().items():
+            setattr(self, k, v)

-    def setdefault(self, key: str, value: Any) ->None:
+    def setdefault(self, key: str, value: Any) -> None:
         """Set default value for Pydantic model config if config value is `None`."""
-        pass
+        if getattr(self, key) is None:
+            setattr(self, key, value)


-ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call',
-    'Pydantic')
+ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic')
 ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic')
-ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed',
-    'Pydantic')
-ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior',
-    'Pydantic')
-ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed',
-    'Pydantic')
-ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults',
-    'Pydantic')
-ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field',
-    'Extra field on RootModel subclass', 'Pydantic')
-
-
-def error_from_attributes(model_name: str, api: CheckerPluginInterface,
-    context: Context) ->None:
+ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic')
+ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic')
+ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic')
+ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic')
+ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic')
+
+
+def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None:
     """Emits an error when the model does not have `from_attributes=True`."""
-    pass
+    api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM)


-def error_invalid_config_value(name: str, api:
-    SemanticAnalyzerPluginInterface, context: Context) ->None:
+def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None:
     """Emits an error when the config value is invalid."""
-    pass
+    api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG)


-def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface,
-    context: Context) ->None:
+def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
     """Emits required dynamic aliases error.

     This will be called when `warn_required_dynamic_aliases=True`.
     """
-    pass
+    api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS)


-def error_unexpected_behavior(detail: str, api: (CheckerPluginInterface |
-    SemanticAnalyzerPluginInterface), context: Context) ->None:
+def error_unexpected_behavior(
+    detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context
+) -> None:  # pragma: no cover
     """Emits unexpected behavior error."""
-    pass
+    # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
+    link = 'https://github.com/pydantic/pydantic/issues/new/choose'
+    full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
+    full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
+    api.fail(full_message, context, code=ERROR_UNEXPECTED)


-def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context
-    ) ->None:
+def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
     """Emits an error when there is an untyped field in the model."""
-    pass
+    api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED)


-def error_extra_fields_on_root_model(api: CheckerPluginInterface, context:
-    Context) ->None:
+def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None:
     """Emits an error when there is more than just a root field defined for a subclass of RootModel."""
-    pass
+    api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL)


-def error_default_and_default_factory_specified(api: CheckerPluginInterface,
-    context: Context) ->None:
+def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None:
     """Emits an error when `Field` has both `default` and `default_factory` together."""
-    pass
-
-
-def add_method(api: (SemanticAnalyzerPluginInterface |
-    CheckerPluginInterface), cls: ClassDef, name: str, args: list[Argument],
-    return_type: Type, self_type: (Type | None)=None, tvar_def: (TypeVarDef |
-    None)=None, is_classmethod: bool=False) ->None:
+    api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS)
+
+
+def add_method(
+    api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
+    cls: ClassDef,
+    name: str,
+    args: list[Argument],
+    return_type: Type,
+    self_type: Type | None = None,
+    tvar_def: TypeVarDef | None = None,
+    is_classmethod: bool = False,
+) -> None:
     """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
-    pass
-
-
-def parse_toml(config_file: str) ->(dict[str, Any] | None):
+    info = cls.info
+
+    # First remove any previously generated methods with the same name
+    # to avoid clashes and problems in the semantic analyzer.
+    if name in info.names:
+        sym = info.names[name]
+        if sym.plugin_generated and isinstance(sym.node, FuncDef):
+            cls.defs.body.remove(sym.node)  # pragma: no cover
+
+    if isinstance(api, SemanticAnalyzerPluginInterface):
+        function_type = api.named_type('builtins.function')
+    else:
+        function_type = api.named_generic_type('builtins.function', [])
+
+    if is_classmethod:
+        self_type = self_type or TypeType(fill_typevars(info))
+        first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)]
+    else:
+        self_type = self_type or fill_typevars(info)
+        # `self` is positional *ONLY* here, but this can't be expressed
+        # fully in the mypy internal API. ARG_POS is the closest we can get.
+        # Using ARG_POS will, however, give mypy errors if a `self` field
+        # is present on a model:
+        #
+        #     Name "self" already defined (possibly by an import)  [no-redef]
+        #
+        # As a workaround, we give this argument a name that will
+        # never conflict. By its positional nature, this name will not
+        # be used or exposed to users.
+        first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
+    args = first + args
+
+    arg_types, arg_names, arg_kinds = [], [], []
+    for arg in args:
+        assert arg.type_annotation, 'All arguments must be fully typed.'
+        arg_types.append(arg.type_annotation)
+        arg_names.append(arg.variable.name)
+        arg_kinds.append(arg.kind)
+
+    signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
+    if tvar_def:
+        signature.variables = [tvar_def]
+
+    func = FuncDef(name, args, Block([PassStmt()]))
+    func.info = info
+    func.type = set_callable_name(signature, func)
+    func.is_class = is_classmethod
+    func._fullname = info.fullname + '.' + name
+    func.line = info.line
+
+    # NOTE: we would like the plugin generated node to dominate, but we still
+    # need to keep any existing definitions so they get semantically analyzed.
+    if name in info.names:
+        # Get a nice unique name instead.
+        r_name = get_unique_redefinition_name(name, info.names)
+        info.names[r_name] = info.names[name]
+
+    # Add decorator for is_classmethod
+    # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a
+    # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel.
+    if is_classmethod:
+        func.is_decorated = True
+        v = Var(name, func.type)
+        v.info = info
+        v._fullname = func._fullname
+        v.is_classmethod = True
+        dec = Decorator(func, [NameExpr('classmethod')], v)
+        dec.line = info.line
+        sym = SymbolTableNode(MDEF, dec)
+    else:
+        sym = SymbolTableNode(MDEF, func)
+    sym.plugin_generated = True
+    info.names[name] = sym
+
+    info.defn.defs.body.append(func)
+
+
+def parse_toml(config_file: str) -> dict[str, Any] | None:
     """Returns a dict of config keys to values.

     It reads configs from toml file and returns `None` if the file is not a toml file.
     """
-    pass
+    if not config_file.endswith('.toml'):
+        return None
+
+    if sys.version_info >= (3, 11):
+        import tomllib as toml_
+    else:
+        try:
+            import tomli as toml_
+        except ImportError:  # pragma: no cover
+            import warnings
+
+            warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
+            return None
+
+    with open(config_file, 'rb') as rf:
+        return toml_.load(rf)
diff --git a/pydantic/networks.py b/pydantic/networks.py
index ae44dc9a8..830ab3992 100644
--- a/pydantic/networks.py
+++ b/pydantic/networks.py
@@ -1,28 +1,56 @@
 """The networks module contains types for common network-related fields."""
+
 from __future__ import annotations as _annotations
+
 import dataclasses as _dataclasses
 import re
 from importlib.metadata import version
 from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
 from typing import TYPE_CHECKING, Any
+
 from pydantic_core import MultiHostUrl, PydanticCustomError, Url, core_schema
 from typing_extensions import Annotated, Self, TypeAlias
+
 from ._internal import _fields, _repr, _schema_generation_shared
 from ._migration import getattr_migration
 from .annotated_handlers import GetCoreSchemaHandler
 from .json_schema import JsonSchemaValue
+
 if TYPE_CHECKING:
     import email_validator
-    NetworkType: TypeAlias = (
-        'str | bytes | int | tuple[str | bytes | int, str | int]')
+
+    NetworkType: TypeAlias = 'str | bytes | int | tuple[str | bytes | int, str | int]'
+
 else:
     email_validator = None
-__all__ = ['AnyUrl', 'AnyHttpUrl', 'FileUrl', 'FtpUrl', 'HttpUrl',
-    'WebsocketUrl', 'AnyWebsocketUrl', 'UrlConstraints', 'EmailStr',
-    'NameEmail', 'IPvAnyAddress', 'IPvAnyInterface', 'IPvAnyNetwork',
-    'PostgresDsn', 'CockroachDsn', 'AmqpDsn', 'RedisDsn', 'MongoDsn',
-    'KafkaDsn', 'NatsDsn', 'validate_email', 'MySQLDsn', 'MariaDBDsn',
-    'ClickHouseDsn']
+
+
+__all__ = [
+    'AnyUrl',
+    'AnyHttpUrl',
+    'FileUrl',
+    'FtpUrl',
+    'HttpUrl',
+    'WebsocketUrl',
+    'AnyWebsocketUrl',
+    'UrlConstraints',
+    'EmailStr',
+    'NameEmail',
+    'IPvAnyAddress',
+    'IPvAnyInterface',
+    'IPvAnyNetwork',
+    'PostgresDsn',
+    'CockroachDsn',
+    'AmqpDsn',
+    'RedisDsn',
+    'MongoDsn',
+    'KafkaDsn',
+    'NatsDsn',
+    'validate_email',
+    'MySQLDsn',
+    'MariaDBDsn',
+    'ClickHouseDsn',
+]


 @_dataclasses.dataclass
@@ -37,6 +65,7 @@ class UrlConstraints(_fields.PydanticMetadata):
         default_port: The default port. Defaults to `None`.
         default_path: The default path. Defaults to `None`.
     """
+
     max_length: int | None = None
     allowed_schemes: list[str] | None = None
     host_required: bool | None = None
@@ -44,10 +73,17 @@ class UrlConstraints(_fields.PydanticMetadata):
     default_port: int | None = None
     default_path: str | None = None

-    def __hash__(self) ->int:
-        return hash((self.max_length, tuple(self.allowed_schemes) if self.
-            allowed_schemes is not None else None, self.host_required, self
-            .default_host, self.default_port, self.default_path))
+    def __hash__(self) -> int:
+        return hash(
+            (
+                self.max_length,
+                tuple(self.allowed_schemes) if self.allowed_schemes is not None else None,
+                self.host_required,
+                self.default_host,
+                self.default_port,
+                self.default_path,
+            )
+        )


 AnyUrl = Url
@@ -75,8 +111,7 @@ AnyHttpUrl = Annotated[Url, UrlConstraints(allowed_schemes=['http', 'https'])]
 * TLD not required
 * Host required
 """
-HttpUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=[
-    'http', 'https'])]
+HttpUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=['http', 'https'])]
 """A type that will accept any http or https URL.

 * TLD not required
@@ -159,8 +194,7 @@ AnyWebsocketUrl = Annotated[Url, UrlConstraints(allowed_schemes=['ws', 'wss'])]
 * TLD not required
 * Host required
 """
-WebsocketUrl = Annotated[Url, UrlConstraints(max_length=2083,
-    allowed_schemes=['ws', 'wss'])]
+WebsocketUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=['ws', 'wss'])]
 """A type that will accept any ws or wss URL.

 * TLD not required
@@ -178,11 +212,23 @@ FtpUrl = Annotated[Url, UrlConstraints(allowed_schemes=['ftp'])]
 * TLD not required
 * Host required
 """
-PostgresDsn = Annotated[MultiHostUrl, UrlConstraints(host_required=True,
-    allowed_schemes=['postgres', 'postgresql', 'postgresql+asyncpg',
-    'postgresql+pg8000', 'postgresql+psycopg', 'postgresql+psycopg2',
-    'postgresql+psycopg2cffi', 'postgresql+py-postgresql',
-    'postgresql+pygresql'])]
+PostgresDsn = Annotated[
+    MultiHostUrl,
+    UrlConstraints(
+        host_required=True,
+        allowed_schemes=[
+            'postgres',
+            'postgresql',
+            'postgresql+asyncpg',
+            'postgresql+pg8000',
+            'postgresql+psycopg',
+            'postgresql+psycopg2',
+            'postgresql+psycopg2cffi',
+            'postgresql+py-postgresql',
+            'postgresql+pygresql',
+        ],
+    ),
+]
 """A type that will accept any Postgres DSN.

 * User info required
@@ -241,9 +287,18 @@ except ValidationError as e:
     '''
 ```
 """
-CockroachDsn = Annotated[Url, UrlConstraints(host_required=True,
-    allowed_schemes=['cockroachdb', 'cockroachdb+psycopg2',
-    'cockroachdb+asyncpg'])]
+
+CockroachDsn = Annotated[
+    Url,
+    UrlConstraints(
+        host_required=True,
+        allowed_schemes=[
+            'cockroachdb',
+            'cockroachdb+psycopg2',
+            'cockroachdb+asyncpg',
+        ],
+    ),
+]
 """A type that will accept any Cockroach DSN.

 * User info required
@@ -257,16 +312,17 @@ AmqpDsn = Annotated[Url, UrlConstraints(allowed_schemes=['amqp', 'amqps'])]
 * TLD not required
 * Host required
 """
-RedisDsn = Annotated[Url, UrlConstraints(allowed_schemes=['redis', 'rediss'
-    ], default_host='localhost', default_port=6379, default_path='/0')]
+RedisDsn = Annotated[
+    Url,
+    UrlConstraints(allowed_schemes=['redis', 'rediss'], default_host='localhost', default_port=6379, default_path='/0'),
+]
 """A type that will accept any Redis DSN.

 * User info required
 * TLD not required
 * Host required (e.g., `rediss://:pass@localhost`)
 """
-MongoDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=[
-    'mongodb', 'mongodb+srv'], default_port=27017)]
+MongoDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=['mongodb', 'mongodb+srv'], default_port=27017)]
 """A type that will accept any MongoDB DSN.

 * User info not required
@@ -274,16 +330,16 @@ MongoDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=[
 * Port not required
 * User info may be passed without user part (e.g., `mongodb://mongodb0.example.com:27017`).
 """
-KafkaDsn = Annotated[Url, UrlConstraints(allowed_schemes=['kafka'],
-    default_host='localhost', default_port=9092)]
+KafkaDsn = Annotated[Url, UrlConstraints(allowed_schemes=['kafka'], default_host='localhost', default_port=9092)]
 """A type that will accept any Kafka DSN.

 * User info required
 * TLD not required
 * Host required
 """
-NatsDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=['nats',
-    'tls', 'ws'], default_host='localhost', default_port=4222)]
+NatsDsn = Annotated[
+    MultiHostUrl, UrlConstraints(allowed_schemes=['nats', 'tls', 'ws'], default_host='localhost', default_port=4222)
+]
 """A type that will accept any NATS DSN.

 NATS is a connective technology built for the ever increasingly hyper-connected world.
@@ -291,38 +347,71 @@ It is a single technology that enables applications to securely communicate acro
 any combination of cloud vendors, on-premise, edge, web and mobile, and devices.
 More: https://nats.io
 """
-MySQLDsn = Annotated[Url, UrlConstraints(allowed_schemes=['mysql',
-    'mysql+mysqlconnector', 'mysql+aiomysql', 'mysql+asyncmy',
-    'mysql+mysqldb', 'mysql+pymysql', 'mysql+cymysql', 'mysql+pyodbc'],
-    default_port=3306)]
+MySQLDsn = Annotated[
+    Url,
+    UrlConstraints(
+        allowed_schemes=[
+            'mysql',
+            'mysql+mysqlconnector',
+            'mysql+aiomysql',
+            'mysql+asyncmy',
+            'mysql+mysqldb',
+            'mysql+pymysql',
+            'mysql+cymysql',
+            'mysql+pyodbc',
+        ],
+        default_port=3306,
+    ),
+]
 """A type that will accept any MySQL DSN.

 * User info required
 * TLD not required
 * Host required
 """
-MariaDBDsn = Annotated[Url, UrlConstraints(allowed_schemes=['mariadb',
-    'mariadb+mariadbconnector', 'mariadb+pymysql'], default_port=3306)]
+MariaDBDsn = Annotated[
+    Url,
+    UrlConstraints(
+        allowed_schemes=['mariadb', 'mariadb+mariadbconnector', 'mariadb+pymysql'],
+        default_port=3306,
+    ),
+]
 """A type that will accept any MariaDB DSN.

 * User info required
 * TLD not required
 * Host required
 """
-ClickHouseDsn = Annotated[Url, UrlConstraints(allowed_schemes=[
-    'clickhouse+native', 'clickhouse+asynch'], default_host='localhost',
-    default_port=9000)]
+ClickHouseDsn = Annotated[
+    Url,
+    UrlConstraints(
+        allowed_schemes=['clickhouse+native', 'clickhouse+asynch'],
+        default_host='localhost',
+        default_port=9000,
+    ),
+]
 """A type that will accept any ClickHouse DSN.

 * User info required
 * TLD not required
 * Host required
 """
+
+
+def import_email_validator() -> None:
+    global email_validator
+    try:
+        import email_validator
+    except ImportError as e:
+        raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e
+    if not version('email-validator').partition('.')[0] == '2':
+        raise ImportError('email-validator version >= 2.0 required, run pip install -U email-validator')
+
+
 if TYPE_CHECKING:
     EmailStr = Annotated[str, ...]
 else:

-
     class EmailStr:
         """
         Info:
@@ -344,23 +433,29 @@ else:
         print(Model(email='contact@mail.com'))
         #> email='contact@mail.com'
         ```
-        """
+        """  # noqa: D212

         @classmethod
-        def __get_pydantic_core_schema__(cls, _source: type[Any], _handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls,
+            _source: type[Any],
+            _handler: GetCoreSchemaHandler,
+        ) -> core_schema.CoreSchema:
             import_email_validator()
-            return core_schema.no_info_after_validator_function(cls.
-                _validate, core_schema.str_schema())
+            return core_schema.no_info_after_validator_function(cls._validate, core_schema.str_schema())

         @classmethod
-        def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-            CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
-            ) ->JsonSchemaValue:
+        def __get_pydantic_json_schema__(
+            cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
+        ) -> JsonSchemaValue:
             field_schema = handler(core_schema)
             field_schema.update(type='string', format='email')
             return field_schema

+        @classmethod
+        def _validate(cls, input_value: str, /) -> str:
+            return validate_email(input_value)[1]
+

 class NameEmail(_repr.Representation):
     """
@@ -396,40 +491,58 @@ class NameEmail(_repr.Representation):
     print(user.email.name)
     #> fred.bloggs
     ```
-    """
+    """  # noqa: D212
+
     __slots__ = 'name', 'email'

     def __init__(self, name: str, email: str):
         self.name = name
         self.email = email

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, NameEmail) and (self.name, self.email) == (
-            other.name, other.email)
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email)

     @classmethod
-    def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-        CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
-        ) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = handler(core_schema)
         field_schema.update(type='string', format='name-email')
         return field_schema

     @classmethod
-    def __get_pydantic_core_schema__(cls, _source: type[Any], _handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(
+        cls,
+        _source: type[Any],
+        _handler: GetCoreSchemaHandler,
+    ) -> core_schema.CoreSchema:
         import_email_validator()
-        return core_schema.no_info_after_validator_function(cls._validate,
-            core_schema.json_or_python_schema(json_schema=core_schema.
-            str_schema(), python_schema=core_schema.union_schema([
-            core_schema.is_instance_schema(cls), core_schema.str_schema()],
-            custom_error_type='name_email_type', custom_error_message=
-            'Input is not a valid NameEmail'), serialization=core_schema.
-            to_string_ser_schema()))
-
-    def __str__(self) ->str:
+
+        return core_schema.no_info_after_validator_function(
+            cls._validate,
+            core_schema.json_or_python_schema(
+                json_schema=core_schema.str_schema(),
+                python_schema=core_schema.union_schema(
+                    [core_schema.is_instance_schema(cls), core_schema.str_schema()],
+                    custom_error_type='name_email_type',
+                    custom_error_message='Input is not a valid NameEmail',
+                ),
+                serialization=core_schema.to_string_ser_schema(),
+            ),
+        )
+
+    @classmethod
+    def _validate(cls, input_value: Self | str, /) -> Self:
+        if isinstance(input_value, str):
+            name, email = validate_email(input_value)
+            return cls(name, email)
+        else:
+            return input_value
+
+    def __str__(self) -> str:
         if '@' in self.name:
             return f'"{self.name}" <{self.email}>'
+
         return f'{self.name} <{self.email}>'


@@ -462,109 +575,149 @@ class IPvAnyAddress:
         '''
     ```
     """
+
     __slots__ = ()

-    def __new__(cls, value: Any) ->(IPv4Address | IPv6Address):
+    def __new__(cls, value: Any) -> IPv4Address | IPv6Address:
         """Validate an IPv4 or IPv6 address."""
         try:
             return IPv4Address(value)
         except ValueError:
             pass
+
         try:
             return IPv6Address(value)
         except ValueError:
-            raise PydanticCustomError('ip_any_address',
-                'value is not a valid IPv4 or IPv6 address')
+            raise PydanticCustomError('ip_any_address', 'value is not a valid IPv4 or IPv6 address')

     @classmethod
-    def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-        CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
-        ) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = {}
         field_schema.update(type='string', format='ipvanyaddress')
         return field_schema

     @classmethod
-    def __get_pydantic_core_schema__(cls, _source: type[Any], _handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.no_info_plain_validator_function(cls._validate,
-            serialization=core_schema.to_string_ser_schema())
+    def __get_pydantic_core_schema__(
+        cls,
+        _source: type[Any],
+        _handler: GetCoreSchemaHandler,
+    ) -> core_schema.CoreSchema:
+        return core_schema.no_info_plain_validator_function(
+            cls._validate, serialization=core_schema.to_string_ser_schema()
+        )
+
+    @classmethod
+    def _validate(cls, input_value: Any, /) -> IPv4Address | IPv6Address:
+        return cls(input_value)  # type: ignore[return-value]


 class IPvAnyInterface:
     """Validate an IPv4 or IPv6 interface."""
+
     __slots__ = ()

-    def __new__(cls, value: NetworkType) ->(IPv4Interface | IPv6Interface):
+    def __new__(cls, value: NetworkType) -> IPv4Interface | IPv6Interface:
         """Validate an IPv4 or IPv6 interface."""
         try:
             return IPv4Interface(value)
         except ValueError:
             pass
+
         try:
             return IPv6Interface(value)
         except ValueError:
-            raise PydanticCustomError('ip_any_interface',
-                'value is not a valid IPv4 or IPv6 interface')
+            raise PydanticCustomError('ip_any_interface', 'value is not a valid IPv4 or IPv6 interface')

     @classmethod
-    def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-        CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
-        ) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = {}
         field_schema.update(type='string', format='ipvanyinterface')
         return field_schema

     @classmethod
-    def __get_pydantic_core_schema__(cls, _source: type[Any], _handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.no_info_plain_validator_function(cls._validate,
-            serialization=core_schema.to_string_ser_schema())
+    def __get_pydantic_core_schema__(
+        cls,
+        _source: type[Any],
+        _handler: GetCoreSchemaHandler,
+    ) -> core_schema.CoreSchema:
+        return core_schema.no_info_plain_validator_function(
+            cls._validate, serialization=core_schema.to_string_ser_schema()
+        )
+
+    @classmethod
+    def _validate(cls, input_value: NetworkType, /) -> IPv4Interface | IPv6Interface:
+        return cls(input_value)  # type: ignore[return-value]


 IPvAnyNetworkType: TypeAlias = 'IPv4Network | IPv6Network'
+
 if TYPE_CHECKING:
     IPvAnyNetwork = IPvAnyNetworkType
 else:

-
     class IPvAnyNetwork:
         """Validate an IPv4 or IPv6 network."""
+
         __slots__ = ()

-        def __new__(cls, value: NetworkType) ->IPvAnyNetworkType:
+        def __new__(cls, value: NetworkType) -> IPvAnyNetworkType:
             """Validate an IPv4 or IPv6 network."""
+            # Assume IP Network is defined with a default value for `strict` argument.
+            # Define your own class if you want to specify network address check strictness.
             try:
                 return IPv4Network(value)
             except ValueError:
                 pass
+
             try:
                 return IPv6Network(value)
             except ValueError:
-                raise PydanticCustomError('ip_any_network',
-                    'value is not a valid IPv4 or IPv6 network')
+                raise PydanticCustomError('ip_any_network', 'value is not a valid IPv4 or IPv6 network')

         @classmethod
-        def __get_pydantic_json_schema__(cls, core_schema: core_schema.
-            CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
-            ) ->JsonSchemaValue:
+        def __get_pydantic_json_schema__(
+            cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler
+        ) -> JsonSchemaValue:
             field_schema = {}
             field_schema.update(type='string', format='ipvanynetwork')
             return field_schema

         @classmethod
-        def __get_pydantic_core_schema__(cls, _source: type[Any], _handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
-            return core_schema.no_info_plain_validator_function(cls.
-                _validate, serialization=core_schema.to_string_ser_schema())
+        def __get_pydantic_core_schema__(
+            cls,
+            _source: type[Any],
+            _handler: GetCoreSchemaHandler,
+        ) -> core_schema.CoreSchema:
+            return core_schema.no_info_plain_validator_function(
+                cls._validate, serialization=core_schema.to_string_ser_schema()
+            )
+
+        @classmethod
+        def _validate(cls, input_value: NetworkType, /) -> IPvAnyNetworkType:
+            return cls(input_value)  # type: ignore[return-value]
+
+
+def _build_pretty_email_regex() -> re.Pattern[str]:
+    name_chars = r'[\w!#$%&\'*+\-/=?^_`{|}~]'
+    unquoted_name_group = rf'((?:{name_chars}+\s+)*{name_chars}+)'
+    quoted_name_group = r'"((?:[^"]|\")+)"'
+    email_group = r'<\s*(.+)\s*>'
+    return re.compile(rf'\s*(?:{unquoted_name_group}|{quoted_name_group})?\s*{email_group}\s*')
+
+
 pretty_email_regex = _build_pretty_email_regex()
+
 MAX_EMAIL_LENGTH = 2048
 """Maximum length for an email.
 A somewhat arbitrary but very generous number compared to what is allowed by most implementations.
 """


-def validate_email(value: str) ->tuple[str, str]:
+def validate_email(value: str) -> tuple[str, str]:
     """Email address validation using [email-validator](https://pypi.org/project/email-validator/).

     Note:
@@ -574,7 +727,35 @@ def validate_email(value: str) ->tuple[str, str]:
         * `"John Doe <local_part@domain.com>"` style "pretty" email addresses are processed.
         * Spaces are striped from the beginning and end of addresses, but no error is raised.
     """
-    pass
+    if email_validator is None:
+        import_email_validator()
+
+    if len(value) > MAX_EMAIL_LENGTH:
+        raise PydanticCustomError(
+            'value_error',
+            'value is not a valid email address: {reason}',
+            {'reason': f'Length must not exceed {MAX_EMAIL_LENGTH} characters'},
+        )
+
+    m = pretty_email_regex.fullmatch(value)
+    name: str | None = None
+    if m:
+        unquoted_name, quoted_name, value = m.groups()
+        name = unquoted_name or quoted_name
+
+    email = value.strip()
+
+    try:
+        parts = email_validator.validate_email(email, check_deliverability=False)
+    except email_validator.EmailNotValidError as e:
+        raise PydanticCustomError(
+            'value_error', 'value is not a valid email address: {reason}', {'reason': str(e.args[0])}
+        ) from e
+
+    email = parts.normalized
+    assert email is not None
+    name = name or parts.local_part
+    return name, email


 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/parse.py b/pydantic/parse.py
index 43b80835b..68b7f0464 100644
--- a/pydantic/parse.py
+++ b/pydantic/parse.py
@@ -1,3 +1,5 @@
 """The `parse` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/plugin/_loader.py b/pydantic/plugin/_loader.py
index 9e984adf4..2f90dc541 100644
--- a/pydantic/plugin/_loader.py
+++ b/pydantic/plugin/_loader.py
@@ -1,18 +1,56 @@
 from __future__ import annotations
+
 import importlib.metadata as importlib_metadata
 import os
 import warnings
 from typing import TYPE_CHECKING, Final, Iterable
+
 if TYPE_CHECKING:
     from . import PydanticPluginProtocol
+
+
 PYDANTIC_ENTRY_POINT_GROUP: Final[str] = 'pydantic'
+
+# cache of plugins
 _plugins: dict[str, PydanticPluginProtocol] | None = None
+# return no plugins while loading plugins to avoid recursion and errors while import plugins
+# this means that if plugins use pydantic
 _loading_plugins: bool = False


-def get_plugins() ->Iterable[PydanticPluginProtocol]:
+def get_plugins() -> Iterable[PydanticPluginProtocol]:
     """Load plugins for Pydantic.

     Inspired by: https://github.com/pytest-dev/pluggy/blob/1.3.0/src/pluggy/_manager.py#L376-L402
     """
-    pass
+    disabled_plugins = os.getenv('PYDANTIC_DISABLE_PLUGINS')
+    global _plugins, _loading_plugins
+    if _loading_plugins:
+        # this happens when plugins themselves use pydantic, we return no plugins
+        return ()
+    elif disabled_plugins in ('__all__', '1', 'true'):
+        return ()
+    elif _plugins is None:
+        _plugins = {}
+        # set _loading_plugins so any plugins that use pydantic don't themselves use plugins
+        _loading_plugins = True
+        try:
+            for dist in importlib_metadata.distributions():
+                for entry_point in dist.entry_points:
+                    if entry_point.group != PYDANTIC_ENTRY_POINT_GROUP:
+                        continue
+                    if entry_point.value in _plugins:
+                        continue
+                    if disabled_plugins is not None and entry_point.name in disabled_plugins.split(','):
+                        continue
+                    try:
+                        _plugins[entry_point.value] = entry_point.load()
+                    except (ImportError, AttributeError) as e:
+                        warnings.warn(
+                            f'{e.__class__.__name__} while loading the `{entry_point.name}` Pydantic plugin, '
+                            f'this plugin will not be installed.\n\n{e!r}'
+                        )
+        finally:
+            _loading_plugins = False
+
+    return _plugins.values()
diff --git a/pydantic/plugin/_schema_validator.py b/pydantic/plugin/_schema_validator.py
index cc46fbe15..21287f445 100644
--- a/pydantic/plugin/_schema_validator.py
+++ b/pydantic/plugin/_schema_validator.py
@@ -1,71 +1,139 @@
 """Pluggable schema validator for pydantic."""
+
 from __future__ import annotations
+
 import functools
 from typing import TYPE_CHECKING, Any, Callable, Iterable, TypeVar
+
 from pydantic_core import CoreConfig, CoreSchema, SchemaValidator, ValidationError
 from typing_extensions import Literal, ParamSpec
+
 if TYPE_CHECKING:
     from . import BaseValidateHandlerProtocol, PydanticPluginProtocol, SchemaKind, SchemaTypePath
+
+
 P = ParamSpec('P')
 R = TypeVar('R')
-Event = Literal['on_validate_python', 'on_validate_json', 'on_validate_strings'
-    ]
-events: list[Event] = list(Event.__args__)
+Event = Literal['on_validate_python', 'on_validate_json', 'on_validate_strings']
+events: list[Event] = list(Event.__args__)  # type: ignore


-def create_schema_validator(schema: CoreSchema, schema_type: Any,
-    schema_type_module: str, schema_type_name: str, schema_kind: SchemaKind,
-    config: (CoreConfig | None)=None, plugin_settings: (dict[str, Any] |
-    None)=None) ->(SchemaValidator | PluggableSchemaValidator):
+def create_schema_validator(
+    schema: CoreSchema,
+    schema_type: Any,
+    schema_type_module: str,
+    schema_type_name: str,
+    schema_kind: SchemaKind,
+    config: CoreConfig | None = None,
+    plugin_settings: dict[str, Any] | None = None,
+) -> SchemaValidator | PluggableSchemaValidator:
     """Create a `SchemaValidator` or `PluggableSchemaValidator` if plugins are installed.

     Returns:
         If plugins are installed then return `PluggableSchemaValidator`, otherwise return `SchemaValidator`.
     """
-    pass
+    from . import SchemaTypePath
+    from ._loader import get_plugins
+
+    plugins = get_plugins()
+    if plugins:
+        return PluggableSchemaValidator(
+            schema,
+            schema_type,
+            SchemaTypePath(schema_type_module, schema_type_name),
+            schema_kind,
+            config,
+            plugins,
+            plugin_settings or {},
+        )
+    else:
+        return SchemaValidator(schema, config)


 class PluggableSchemaValidator:
     """Pluggable schema validator."""
-    __slots__ = ('_schema_validator', 'validate_json', 'validate_python',
-        'validate_strings')

-    def __init__(self, schema: CoreSchema, schema_type: Any,
-        schema_type_path: SchemaTypePath, schema_kind: SchemaKind, config:
-        (CoreConfig | None), plugins: Iterable[PydanticPluginProtocol],
-        plugin_settings: dict[str, Any]) ->None:
+    __slots__ = '_schema_validator', 'validate_json', 'validate_python', 'validate_strings'
+
+    def __init__(
+        self,
+        schema: CoreSchema,
+        schema_type: Any,
+        schema_type_path: SchemaTypePath,
+        schema_kind: SchemaKind,
+        config: CoreConfig | None,
+        plugins: Iterable[PydanticPluginProtocol],
+        plugin_settings: dict[str, Any],
+    ) -> None:
         self._schema_validator = SchemaValidator(schema, config)
+
         python_event_handlers: list[BaseValidateHandlerProtocol] = []
         json_event_handlers: list[BaseValidateHandlerProtocol] = []
         strings_event_handlers: list[BaseValidateHandlerProtocol] = []
         for plugin in plugins:
             try:
-                p, j, s = plugin.new_schema_validator(schema, schema_type,
-                    schema_type_path, schema_kind, config, plugin_settings)
-            except TypeError as e:
-                raise TypeError(
-                    f'Error using plugin `{plugin.__module__}:{plugin.__class__.__name__}`: {e}'
-                    ) from e
+                p, j, s = plugin.new_schema_validator(
+                    schema, schema_type, schema_type_path, schema_kind, config, plugin_settings
+                )
+            except TypeError as e:  # pragma: no cover
+                raise TypeError(f'Error using plugin `{plugin.__module__}:{plugin.__class__.__name__}`: {e}') from e
             if p is not None:
                 python_event_handlers.append(p)
             if j is not None:
                 json_event_handlers.append(j)
             if s is not None:
                 strings_event_handlers.append(s)
-        self.validate_python = build_wrapper(self._schema_validator.
-            validate_python, python_event_handlers)
-        self.validate_json = build_wrapper(self._schema_validator.
-            validate_json, json_event_handlers)
-        self.validate_strings = build_wrapper(self._schema_validator.
-            validate_strings, strings_event_handlers)
-
-    def __getattr__(self, name: str) ->Any:
+
+        self.validate_python = build_wrapper(self._schema_validator.validate_python, python_event_handlers)
+        self.validate_json = build_wrapper(self._schema_validator.validate_json, json_event_handlers)
+        self.validate_strings = build_wrapper(self._schema_validator.validate_strings, strings_event_handlers)
+
+    def __getattr__(self, name: str) -> Any:
         return getattr(self._schema_validator, name)


-def filter_handlers(handler_cls: BaseValidateHandlerProtocol, method_name: str
-    ) ->bool:
+def build_wrapper(func: Callable[P, R], event_handlers: list[BaseValidateHandlerProtocol]) -> Callable[P, R]:
+    if not event_handlers:
+        return func
+    else:
+        on_enters = tuple(h.on_enter for h in event_handlers if filter_handlers(h, 'on_enter'))
+        on_successes = tuple(h.on_success for h in event_handlers if filter_handlers(h, 'on_success'))
+        on_errors = tuple(h.on_error for h in event_handlers if filter_handlers(h, 'on_error'))
+        on_exceptions = tuple(h.on_exception for h in event_handlers if filter_handlers(h, 'on_exception'))
+
+        @functools.wraps(func)
+        def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+            for on_enter_handler in on_enters:
+                on_enter_handler(*args, **kwargs)
+
+            try:
+                result = func(*args, **kwargs)
+            except ValidationError as error:
+                for on_error_handler in on_errors:
+                    on_error_handler(error)
+                raise
+            except Exception as exception:
+                for on_exception_handler in on_exceptions:
+                    on_exception_handler(exception)
+                raise
+            else:
+                for on_success_handler in on_successes:
+                    on_success_handler(result)
+                return result
+
+        return wrapper
+
+
+def filter_handlers(handler_cls: BaseValidateHandlerProtocol, method_name: str) -> bool:
     """Filter out handler methods which are not implemented by the plugin directly - e.g. are missing
     or are inherited from the protocol.
     """
-    pass
+    handler = getattr(handler_cls, method_name, None)
+    if handler is None:
+        return False
+    elif handler.__module__ == 'pydantic.plugin':
+        # this is the original handler, from the protocol due to runtime inheritance
+        # we don't want to call it
+        return False
+    else:
+        return True
diff --git a/pydantic/root_model.py b/pydantic/root_model.py
index 199c196de..b07363103 100644
--- a/pydantic/root_model.py
+++ b/pydantic/root_model.py
@@ -1,30 +1,38 @@
 """RootModel class and type definitions."""
+
 from __future__ import annotations as _annotations
+
 import typing
 from copy import copy, deepcopy
+
 from pydantic_core import PydanticUndefined
+
 from . import PydanticUserError
 from ._internal import _model_construction, _repr
 from .main import BaseModel, _object_setattr
+
 if typing.TYPE_CHECKING:
     from typing import Any
+
     from typing_extensions import Literal, Self, dataclass_transform
+
     from .fields import Field as PydanticModelField
     from .fields import PrivateAttr as PydanticModelPrivateAttr

-
-    @dataclass_transform(kw_only_default=False, field_specifiers=(
-        PydanticModelField, PydanticModelPrivateAttr))
-    class _RootModelMetaclass(_model_construction.ModelMetaclass):
-        ...
+    # dataclass_transform could be applied to RootModel directly, but `ModelMetaclass`'s dataclass_transform
+    # takes priority (at least with pyright). We trick type checkers into thinking we apply dataclass_transform
+    # on a new metaclass.
+    @dataclass_transform(kw_only_default=False, field_specifiers=(PydanticModelField, PydanticModelPrivateAttr))
+    class _RootModelMetaclass(_model_construction.ModelMetaclass): ...
 else:
     _RootModelMetaclass = _model_construction.ModelMetaclass
-__all__ = 'RootModel',
+
+__all__ = ('RootModel',)
+
 RootModelRootType = typing.TypeVar('RootModelRootType')


-class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=
-    _RootModelMetaclass):
+class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=_RootModelMetaclass):
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/models/#rootmodel-and-custom-root-types

     A Pydantic `BaseModel` for the root object of the model.
@@ -36,34 +44,35 @@ class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=
         __pydantic_extra__: Extra fields in the model.

     """
+
     __pydantic_root_model__ = True
     __pydantic_private__ = None
     __pydantic_extra__ = None
+
     root: RootModelRootType

     def __init_subclass__(cls, **kwargs):
         extra = cls.model_config.get('extra')
         if extra is not None:
             raise PydanticUserError(
-                "`RootModel` does not support setting `model_config['extra']`",
-                code='root-model-extra')
+                "`RootModel` does not support setting `model_config['extra']`", code='root-model-extra'
+            )
         super().__init_subclass__(**kwargs)

-    def __init__(self, /, root: RootModelRootType=PydanticUndefined, **data
-        ) ->None:
+    def __init__(self, /, root: RootModelRootType = PydanticUndefined, **data) -> None:  # type: ignore
         __tracebackhide__ = True
         if data:
             if root is not PydanticUndefined:
                 raise ValueError(
                     '"RootModel.__init__" accepts either a single positional argument or arbitrary keyword arguments'
-                    )
-            root = data
+                )
+            root = data  # type: ignore
         self.__pydantic_validator__.validate_python(root, self_instance=self)
-    __init__.__pydantic_base_init__ = True
+
+    __init__.__pydantic_base_init__ = True  # pyright: ignore[reportFunctionMemberAccess]

     @classmethod
-    def model_construct(cls, root: RootModelRootType, _fields_set: (set[str
-        ] | None)=None) ->Self:
+    def model_construct(cls, root: RootModelRootType, _fields_set: set[str] | None = None) -> Self:  # type: ignore
         """Create a new model using the provided root object and update fields set.

         Args:
@@ -76,42 +85,53 @@ class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=
         Raises:
             NotImplemented: If the model is not a subclass of `RootModel`.
         """
-        pass
+        return super().model_construct(root=root, _fields_set=_fields_set)

-    def __getstate__(self) ->dict[Any, Any]:
-        return {'__dict__': self.__dict__, '__pydantic_fields_set__': self.
-            __pydantic_fields_set__}
+    def __getstate__(self) -> dict[Any, Any]:
+        return {
+            '__dict__': self.__dict__,
+            '__pydantic_fields_set__': self.__pydantic_fields_set__,
+        }

-    def __setstate__(self, state: dict[Any, Any]) ->None:
-        _object_setattr(self, '__pydantic_fields_set__', state[
-            '__pydantic_fields_set__'])
+    def __setstate__(self, state: dict[Any, Any]) -> None:
+        _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__'])
         _object_setattr(self, '__dict__', state['__dict__'])

-    def __copy__(self) ->Self:
+    def __copy__(self) -> Self:
         """Returns a shallow copy of the model."""
         cls = type(self)
         m = cls.__new__(cls)
         _object_setattr(m, '__dict__', copy(self.__dict__))
-        _object_setattr(m, '__pydantic_fields_set__', copy(self.
-            __pydantic_fields_set__))
+        _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
         return m

-    def __deepcopy__(self, memo: (dict[int, Any] | None)=None) ->Self:
+    def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self:
         """Returns a deep copy of the model."""
         cls = type(self)
         m = cls.__new__(cls)
         _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo))
-        _object_setattr(m, '__pydantic_fields_set__', copy(self.
-            __pydantic_fields_set__))
+        # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str],
+        # and attempting a deepcopy would be marginally slower.
+        _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
         return m
+
     if typing.TYPE_CHECKING:

-        def model_dump(self, *, mode: (Literal['json', 'python'] | str)=
-            'python', include: Any=None, exclude: Any=None, context: (dict[
-            str, Any] | None)=None, by_alias: bool=False, exclude_unset:
-            bool=False, exclude_defaults: bool=False, exclude_none: bool=
-            False, round_trip: bool=False, warnings: (bool | Literal['none',
-            'warn', 'error'])=True, serialize_as_any: bool=False) ->Any:
+        def model_dump(  # type: ignore
+            self,
+            *,
+            mode: Literal['json', 'python'] | str = 'python',
+            include: Any = None,
+            exclude: Any = None,
+            context: dict[str, Any] | None = None,
+            by_alias: bool = False,
+            exclude_unset: bool = False,
+            exclude_defaults: bool = False,
+            exclude_none: bool = False,
+            round_trip: bool = False,
+            warnings: bool | Literal['none', 'warn', 'error'] = True,
+            serialize_as_any: bool = False,
+        ) -> Any:
             """This method is included just to get a more accurate return type for type checkers.
             It is included in this `if TYPE_CHECKING:` block since no override is actually necessary.

@@ -123,13 +143,12 @@ class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=
             even be something different, in the case of a custom serializer.
             Thus, `Any` is used here to catch all of these cases.
             """
-            pass
+            ...

-    def __eq__(self, other: Any) ->bool:
+    def __eq__(self, other: Any) -> bool:
         if not isinstance(other, RootModel):
             return NotImplemented
-        return self.model_fields['root'].annotation == other.model_fields[
-            'root'].annotation and super().__eq__(other)
+        return self.model_fields['root'].annotation == other.model_fields['root'].annotation and super().__eq__(other)

-    def __repr_args__(self) ->_repr.ReprArgs:
+    def __repr_args__(self) -> _repr.ReprArgs:
         yield 'root', self.root
diff --git a/pydantic/schema.py b/pydantic/schema.py
index 563310b07..a3245a61a 100644
--- a/pydantic/schema.py
+++ b/pydantic/schema.py
@@ -1,3 +1,5 @@
 """The `schema` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/tools.py b/pydantic/tools.py
index adfc56c85..fdc68c4f4 100644
--- a/pydantic/tools.py
+++ b/pydantic/tools.py
@@ -1,3 +1,5 @@
 """The `tools` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/type_adapter.py b/pydantic/type_adapter.py
index fc7ff2ab0..d6001df0f 100644
--- a/pydantic/type_adapter.py
+++ b/pydantic/type_adapter.py
@@ -1,28 +1,57 @@
 """Type adapter specification."""
+
 from __future__ import annotations as _annotations
+
 import sys
 from contextlib import contextmanager
 from dataclasses import is_dataclass
 from functools import cached_property, wraps
-from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Iterable, Iterator, Literal, Set, TypeVar, Union, cast, final, overload
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Generic,
+    Iterable,
+    Iterator,
+    Literal,
+    Set,
+    TypeVar,
+    Union,
+    cast,
+    final,
+    overload,
+)
+
 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some
 from typing_extensions import Concatenate, ParamSpec, is_typeddict
+
 from pydantic.errors import PydanticUserError
 from pydantic.main import BaseModel
+
 from ._internal import _config, _generate_schema, _mock_val_ser, _typing_extra, _utils
 from .config import ConfigDict
-from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaKeyT, JsonSchemaMode, JsonSchemaValue
+from .json_schema import (
+    DEFAULT_REF_TEMPLATE,
+    GenerateJsonSchema,
+    JsonSchemaKeyT,
+    JsonSchemaMode,
+    JsonSchemaValue,
+)
 from .plugin._schema_validator import PluggableSchemaValidator, create_schema_validator
+
 T = TypeVar('T')
 R = TypeVar('R')
 P = ParamSpec('P')
 TypeAdapterT = TypeVar('TypeAdapterT', bound='TypeAdapter')
+
+
 if TYPE_CHECKING:
+    # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope
     IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]]


-def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper,
-    parent_depth: int) ->CoreSchema:
+def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper, parent_depth: int) -> CoreSchema:
     """`BaseModel` uses its own `__module__` to find out where it was defined
     and then looks for symbols to resolve forward references in those globals.
     On the other hand this function can be called with arbitrary objects,
@@ -66,17 +95,55 @@ def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper,

     But at the very least this behavior is _subtly_ different from `BaseModel`'s.
     """
-    pass
+    local_ns = _typing_extra.parent_frame_namespace(parent_depth=parent_depth)
+    global_ns = sys._getframe(max(parent_depth - 1, 1)).f_globals.copy()
+    global_ns.update(local_ns or {})
+    gen = _generate_schema.GenerateSchema(config_wrapper, types_namespace=global_ns, typevars_map={})
+    schema = gen.generate_schema(type_)
+    schema = gen.clean_schema(schema)
+    return schema


-def _getattr_no_parents(obj: Any, attribute: str) ->Any:
+def _getattr_no_parents(obj: Any, attribute: str) -> Any:
     """Returns the attribute value without attempting to look up attributes from parent types."""
-    pass
+    if hasattr(obj, '__dict__'):
+        try:
+            return obj.__dict__[attribute]
+        except KeyError:
+            pass

+    slots = getattr(obj, '__slots__', None)
+    if slots is not None and attribute in slots:
+        return getattr(obj, attribute)
+    else:
+        raise AttributeError(attribute)

-def _type_has_config(type_: Any) ->bool:
+
+def _type_has_config(type_: Any) -> bool:
     """Returns whether the type has config."""
-    pass
+    type_ = _typing_extra.annotated_type(type_) or type_
+    try:
+        return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_)
+    except TypeError:
+        # type is not a class
+        return False
+
+
+# This is keeping track of the frame depth for the TypeAdapter functions. This is required for _parent_depth used for
+# ForwardRef resolution. We may enter the TypeAdapter schema building via different TypeAdapter functions. Hence, we
+# need to keep track of the frame depth relative to the originally provided _parent_depth.
+def _frame_depth(
+    depth: int,
+) -> Callable[[Callable[Concatenate[TypeAdapterT, P], R]], Callable[Concatenate[TypeAdapterT, P], R]]:
+    def wrapper(func: Callable[Concatenate[TypeAdapterT, P], R]) -> Callable[Concatenate[TypeAdapterT, P], R]:
+        @wraps(func)
+        def wrapped(self: TypeAdapterT, *args: P.args, **kwargs: P.kwargs) -> R:
+            with self._with_frame_depth(depth + 1):  # depth + 1 for the wrapper function
+                return func(self, *args, **kwargs)
+
+        return wrapped
+
+    return wrapper


 @final
@@ -104,17 +171,36 @@ class TypeAdapter(Generic[T]):
     """

     @overload
-    def __init__(self, type: type[T], *, config: (ConfigDict | None)=...,
-        _parent_depth: int=..., module: (str | None)=...) ->None:
-        ...
-
+    def __init__(
+        self,
+        type: type[T],
+        *,
+        config: ConfigDict | None = ...,
+        _parent_depth: int = ...,
+        module: str | None = ...,
+    ) -> None: ...
+
+    # This second overload is for unsupported special forms (such as Annotated, Union, etc.)
+    # Currently there is no way to type this correctly
+    # See https://github.com/python/typing/pull/1618
     @overload
-    def __init__(self, type: Any, *, config: (ConfigDict | None)=...,
-        _parent_depth: int=..., module: (str | None)=...) ->None:
-        ...
-
-    def __init__(self, type: Any, *, config: (ConfigDict | None)=None,
-        _parent_depth: int=2, module: (str | None)=None) ->None:
+    def __init__(
+        self,
+        type: Any,
+        *,
+        config: ConfigDict | None = ...,
+        _parent_depth: int = ...,
+        module: str | None = ...,
+    ) -> None: ...
+
+    def __init__(
+        self,
+        type: Any,
+        *,
+        config: ConfigDict | None = None,
+        _parent_depth: int = 2,
+        module: str | None = None,
+    ) -> None:
         """Initializes the TypeAdapter object.

         Args:
@@ -150,8 +236,13 @@ class TypeAdapter(Generic[T]):
         """
         if _type_has_config(type) and config is not None:
             raise PydanticUserError(
-                'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict. These types can have their own config and setting the config via the `config` parameter to TypeAdapter will not override it, thus the `config` you passed to TypeAdapter becomes meaningless, which is probably not what you want.'
-                , code='type-adapter-config-unused')
+                'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.'
+                ' These types can have their own config and setting the config via the `config`'
+                ' parameter to TypeAdapter will not override it, thus the `config` you passed to'
+                ' TypeAdapter becomes meaningless, which is probably not what you want.',
+                code='type-adapter-config-unused',
+            )
+
         self._type = type
         self._config = config
         self._parent_depth = _parent_depth
@@ -160,36 +251,110 @@ class TypeAdapter(Generic[T]):
             self._module_name = cast(str, f.f_globals.get('__name__', ''))
         else:
             self._module_name = module
+
         self._core_schema: CoreSchema | None = None
-        (self._validator: SchemaValidator | PluggableSchemaValidator | None
-            ) = None
+        self._validator: SchemaValidator | PluggableSchemaValidator | None = None
         self._serializer: SchemaSerializer | None = None
+
         if not self._defer_build():
-            with self._with_frame_depth(1):
+            # Immediately initialize the core schema, validator and serializer
+            with self._with_frame_depth(1):  # +1 frame depth for this __init__
+                # Model itself may be using deferred building. For backward compatibility we don't rebuild model mocks
+                # here as part of __init__ even though TypeAdapter itself is not using deferred building.
                 self._init_core_attrs(rebuild_mocks=False)

+    @contextmanager
+    def _with_frame_depth(self, depth: int) -> Iterator[None]:
+        self._parent_depth += depth
+        try:
+            yield
+        finally:
+            self._parent_depth -= depth
+
+    @_frame_depth(1)
+    def _init_core_attrs(self, rebuild_mocks: bool) -> None:
+        try:
+            self._core_schema = _getattr_no_parents(self._type, '__pydantic_core_schema__')
+            self._validator = _getattr_no_parents(self._type, '__pydantic_validator__')
+            self._serializer = _getattr_no_parents(self._type, '__pydantic_serializer__')
+        except AttributeError:
+            config_wrapper = _config.ConfigWrapper(self._config)
+            core_config = config_wrapper.core_config(None)
+
+            self._core_schema = _get_schema(self._type, config_wrapper, parent_depth=self._parent_depth)
+            self._validator = create_schema_validator(
+                schema=self._core_schema,
+                schema_type=self._type,
+                schema_type_module=self._module_name,
+                schema_type_name=str(self._type),
+                schema_kind='TypeAdapter',
+                config=core_config,
+                plugin_settings=config_wrapper.plugin_settings,
+            )
+            self._serializer = SchemaSerializer(self._core_schema, core_config)
+
+        if rebuild_mocks and isinstance(self._core_schema, _mock_val_ser.MockCoreSchema):
+            self._core_schema.rebuild()
+            self._init_core_attrs(rebuild_mocks=False)
+            assert not isinstance(self._core_schema, _mock_val_ser.MockCoreSchema)
+            assert not isinstance(self._validator, _mock_val_ser.MockValSer)
+            assert not isinstance(self._serializer, _mock_val_ser.MockValSer)
+
     @cached_property
-    @_frame_depth(2)
-    def core_schema(self) ->CoreSchema:
+    @_frame_depth(2)  # +2 for @cached_property and core_schema(self)
+    def core_schema(self) -> CoreSchema:
         """The pydantic-core schema used to build the SchemaValidator and SchemaSerializer."""
-        pass
+        if self._core_schema is None or isinstance(self._core_schema, _mock_val_ser.MockCoreSchema):
+            self._init_core_attrs(rebuild_mocks=True)  # Do not expose MockCoreSchema from public function
+        assert self._core_schema is not None and not isinstance(self._core_schema, _mock_val_ser.MockCoreSchema)
+        return self._core_schema

     @cached_property
-    @_frame_depth(2)
-    def validator(self) ->(SchemaValidator | PluggableSchemaValidator):
+    @_frame_depth(2)  # +2 for @cached_property + validator(self)
+    def validator(self) -> SchemaValidator | PluggableSchemaValidator:
         """The pydantic-core SchemaValidator used to validate instances of the model."""
-        pass
+        if not isinstance(self._validator, (SchemaValidator, PluggableSchemaValidator)):
+            self._init_core_attrs(rebuild_mocks=True)  # Do not expose MockValSer from public function
+        assert isinstance(self._validator, (SchemaValidator, PluggableSchemaValidator))
+        return self._validator

     @cached_property
-    @_frame_depth(2)
-    def serializer(self) ->SchemaSerializer:
+    @_frame_depth(2)  # +2 for @cached_property + serializer(self)
+    def serializer(self) -> SchemaSerializer:
         """The pydantic-core SchemaSerializer used to dump instances of the model."""
-        pass
+        if not isinstance(self._serializer, SchemaSerializer):
+            self._init_core_attrs(rebuild_mocks=True)  # Do not expose MockValSer from public function
+        assert isinstance(self._serializer, SchemaSerializer)
+        return self._serializer
+
+    def _defer_build(self) -> bool:
+        config = self._config if self._config is not None else self._model_config()
+        return self._is_defer_build_config(config) if config is not None else False
+
+    def _model_config(self) -> ConfigDict | None:
+        type_: Any = _typing_extra.annotated_type(self._type) or self._type  # Eg FastAPI heavily uses Annotated
+        if _utils.lenient_issubclass(type_, BaseModel):
+            return type_.model_config
+        return getattr(type_, '__pydantic_config__', None)
+
+    @staticmethod
+    def _is_defer_build_config(config: ConfigDict) -> bool:
+        # TODO reevaluate this logic when we have a better understanding of how defer_build should work with TypeAdapter
+        # Should we drop the special experimental_defer_build_mode check?
+        return config.get('defer_build', False) is True and 'type_adapter' in config.get(
+            'experimental_defer_build_mode', tuple()
+        )

     @_frame_depth(1)
-    def validate_python(self, object: Any, /, *, strict: (bool | None)=None,
-        from_attributes: (bool | None)=None, context: (dict[str, Any] |
-        None)=None) ->T:
+    def validate_python(
+        self,
+        object: Any,
+        /,
+        *,
+        strict: bool | None = None,
+        from_attributes: bool | None = None,
+        context: dict[str, Any] | None = None,
+    ) -> T:
         """Validate a Python object against the model.

         Args:
@@ -205,11 +370,12 @@ class TypeAdapter(Generic[T]):
         Returns:
             The validated object.
         """
-        pass
+        return self.validator.validate_python(object, strict=strict, from_attributes=from_attributes, context=context)

     @_frame_depth(1)
-    def validate_json(self, data: (str | bytes), /, *, strict: (bool | None
-        )=None, context: (dict[str, Any] | None)=None) ->T:
+    def validate_json(
+        self, data: str | bytes, /, *, strict: bool | None = None, context: dict[str, Any] | None = None
+    ) -> T:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/json/#json-parsing

         Validate a JSON string or bytes against the model.
@@ -222,11 +388,10 @@ class TypeAdapter(Generic[T]):
         Returns:
             The validated object.
         """
-        pass
+        return self.validator.validate_json(data, strict=strict, context=context)

     @_frame_depth(1)
-    def validate_strings(self, obj: Any, /, *, strict: (bool | None)=None,
-        context: (dict[str, Any] | None)=None) ->T:
+    def validate_strings(self, obj: Any, /, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> T:
         """Validate object contains string data against the model.

         Args:
@@ -237,11 +402,10 @@ class TypeAdapter(Generic[T]):
         Returns:
             The validated object.
         """
-        pass
+        return self.validator.validate_strings(obj, strict=strict, context=context)

     @_frame_depth(1)
-    def get_default_value(self, *, strict: (bool | None)=None, context: (
-        dict[str, Any] | None)=None) ->(Some[T] | None):
+    def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None:
         """Get the default value for the wrapped type.

         Args:
@@ -251,16 +415,26 @@ class TypeAdapter(Generic[T]):
         Returns:
             The default value wrapped in a `Some` if there is one or None if not.
         """
-        pass
+        return self.validator.get_default_value(strict=strict, context=context)

     @_frame_depth(1)
-    def dump_python(self, instance: T, /, *, mode: Literal['json', 'python'
-        ]='python', include: (IncEx | None)=None, exclude: (IncEx | None)=
-        None, by_alias: bool=False, exclude_unset: bool=False,
-        exclude_defaults: bool=False, exclude_none: bool=False, round_trip:
-        bool=False, warnings: (bool | Literal['none', 'warn', 'error'])=
-        True, serialize_as_any: bool=False, context: (dict[str, Any] | None
-        )=None) ->Any:
+    def dump_python(
+        self,
+        instance: T,
+        /,
+        *,
+        mode: Literal['json', 'python'] = 'python',
+        include: IncEx | None = None,
+        exclude: IncEx | None = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        round_trip: bool = False,
+        warnings: bool | Literal['none', 'warn', 'error'] = True,
+        serialize_as_any: bool = False,
+        context: dict[str, Any] | None = None,
+    ) -> Any:
         """Dump an instance of the adapted type to a Python object.

         Args:
@@ -281,16 +455,39 @@ class TypeAdapter(Generic[T]):
         Returns:
             The serialized object.
         """
-        pass
+        return self.serializer.to_python(
+            instance,
+            mode=mode,
+            by_alias=by_alias,
+            include=include,
+            exclude=exclude,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+            round_trip=round_trip,
+            warnings=warnings,
+            serialize_as_any=serialize_as_any,
+            context=context,
+        )

     @_frame_depth(1)
-    def dump_json(self, instance: T, /, *, indent: (int | None)=None,
-        include: (IncEx | None)=None, exclude: (IncEx | None)=None,
-        by_alias: bool=False, exclude_unset: bool=False, exclude_defaults:
-        bool=False, exclude_none: bool=False, round_trip: bool=False,
-        warnings: (bool | Literal['none', 'warn', 'error'])=True,
-        serialize_as_any: bool=False, context: (dict[str, Any] | None)=None
-        ) ->bytes:
+    def dump_json(
+        self,
+        instance: T,
+        /,
+        *,
+        indent: int | None = None,
+        include: IncEx | None = None,
+        exclude: IncEx | None = None,
+        by_alias: bool = False,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        round_trip: bool = False,
+        warnings: bool | Literal['none', 'warn', 'error'] = True,
+        serialize_as_any: bool = False,
+        context: dict[str, Any] | None = None,
+    ) -> bytes:
         """Usage docs: https://docs.pydantic.dev/2.8/concepts/json/#json-serialization

         Serialize an instance of the adapted type to JSON.
@@ -313,13 +510,30 @@ class TypeAdapter(Generic[T]):
         Returns:
             The JSON representation of the given instance as bytes.
         """
-        pass
+        return self.serializer.to_json(
+            instance,
+            indent=indent,
+            include=include,
+            exclude=exclude,
+            by_alias=by_alias,
+            exclude_unset=exclude_unset,
+            exclude_defaults=exclude_defaults,
+            exclude_none=exclude_none,
+            round_trip=round_trip,
+            warnings=warnings,
+            serialize_as_any=serialize_as_any,
+            context=context,
+        )

     @_frame_depth(1)
-    def json_schema(self, *, by_alias: bool=True, ref_template: str=
-        DEFAULT_REF_TEMPLATE, schema_generator: type[GenerateJsonSchema]=
-        GenerateJsonSchema, mode: JsonSchemaMode='validation') ->dict[str, Any
-        ]:
+    def json_schema(
+        self,
+        *,
+        by_alias: bool = True,
+        ref_template: str = DEFAULT_REF_TEMPLATE,
+        schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+        mode: JsonSchemaMode = 'validation',
+    ) -> dict[str, Any]:
         """Generate a JSON schema for the adapted type.

         Args:
@@ -331,15 +545,20 @@ class TypeAdapter(Generic[T]):
         Returns:
             The JSON schema for the model as a dictionary.
         """
-        pass
+        schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template)
+        return schema_generator_instance.generate(self.core_schema, mode=mode)

     @staticmethod
-    def json_schemas(inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode,
-        TypeAdapter[Any]]], /, *, by_alias: bool=True, title: (str | None)=
-        None, description: (str | None)=None, ref_template: str=
-        DEFAULT_REF_TEMPLATE, schema_generator: type[GenerateJsonSchema]=
-        GenerateJsonSchema) ->tuple[dict[tuple[JsonSchemaKeyT,
-        JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
+    def json_schemas(
+        inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]],
+        /,
+        *,
+        by_alias: bool = True,
+        title: str | None = None,
+        description: str | None = None,
+        ref_template: str = DEFAULT_REF_TEMPLATE,
+        schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
+    ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
         """Generate a JSON schema including definitions from multiple type adapters.

         Args:
@@ -362,4 +581,21 @@ class TypeAdapter(Generic[T]):
                     element, along with the optional title and description keys.

         """
-        pass
+        schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template)
+
+        inputs_ = []
+        for key, mode, adapter in inputs:
+            with adapter._with_frame_depth(1):  # +1 for json_schemas staticmethod
+                inputs_.append((key, mode, adapter.core_schema))
+
+        json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs_)
+
+        json_schema: dict[str, Any] = {}
+        if definitions:
+            json_schema['$defs'] = definitions
+        if title:
+            json_schema['title'] = title
+        if description:
+            json_schema['description'] = description
+
+        return json_schemas_map, json_schema
diff --git a/pydantic/types.py b/pydantic/types.py
index 48f11cb49..50f397e85 100644
--- a/pydantic/types.py
+++ b/pydantic/types.py
@@ -1,5 +1,7 @@
 """The types module contains custom types used by pydantic."""
+
 from __future__ import annotations as _annotations
+
 import base64
 import dataclasses as _dataclasses
 import re
@@ -8,32 +10,110 @@ from decimal import Decimal
 from enum import Enum
 from pathlib import Path
 from types import ModuleType
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, FrozenSet, Generic, Hashable, Iterator, List, Pattern, Set, TypeVar, Union, cast, get_args, get_origin
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    FrozenSet,
+    Generic,
+    Hashable,
+    Iterator,
+    List,
+    Pattern,
+    Set,
+    TypeVar,
+    Union,
+    cast,
+    get_args,
+    get_origin,
+)
 from uuid import UUID
+
 import annotated_types
 from annotated_types import BaseMetadata, MaxLen, MinLen
 from pydantic_core import CoreSchema, PydanticCustomError, core_schema
 from typing_extensions import Annotated, Literal, Protocol, TypeAlias, TypeAliasType, deprecated
-from ._internal import _core_utils, _fields, _internal_dataclass, _typing_extra, _utils, _validators
+
+from ._internal import (
+    _core_utils,
+    _fields,
+    _internal_dataclass,
+    _typing_extra,
+    _utils,
+    _validators,
+)
 from ._migration import getattr_migration
 from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
 from .errors import PydanticUserError
 from .json_schema import JsonSchemaValue
 from .warnings import PydanticDeprecatedSince20
-__all__ = ('Strict', 'StrictStr', 'conbytes', 'conlist', 'conset',
-    'confrozenset', 'constr', 'ImportString', 'conint', 'PositiveInt',
-    'NegativeInt', 'NonNegativeInt', 'NonPositiveInt', 'confloat',
-    'PositiveFloat', 'NegativeFloat', 'NonNegativeFloat',
-    'NonPositiveFloat', 'FiniteFloat', 'condecimal', 'UUID1', 'UUID3',
-    'UUID4', 'UUID5', 'FilePath', 'DirectoryPath', 'NewPath', 'Json',
-    'Secret', 'SecretStr', 'SecretBytes', 'StrictBool', 'StrictBytes',
-    'StrictInt', 'StrictFloat', 'PaymentCardNumber', 'ByteSize', 'PastDate',
-    'FutureDate', 'PastDatetime', 'FutureDatetime', 'condate',
-    'AwareDatetime', 'NaiveDatetime', 'AllowInfNan', 'EncoderProtocol',
-    'EncodedBytes', 'EncodedStr', 'Base64Encoder', 'Base64Bytes',
-    'Base64Str', 'Base64UrlBytes', 'Base64UrlStr', 'GetPydanticSchema',
-    'StringConstraints', 'Tag', 'Discriminator', 'JsonValue', 'OnErrorOmit',
-    'FailFast')
+
+__all__ = (
+    'Strict',
+    'StrictStr',
+    'conbytes',
+    'conlist',
+    'conset',
+    'confrozenset',
+    'constr',
+    'ImportString',
+    'conint',
+    'PositiveInt',
+    'NegativeInt',
+    'NonNegativeInt',
+    'NonPositiveInt',
+    'confloat',
+    'PositiveFloat',
+    'NegativeFloat',
+    'NonNegativeFloat',
+    'NonPositiveFloat',
+    'FiniteFloat',
+    'condecimal',
+    'UUID1',
+    'UUID3',
+    'UUID4',
+    'UUID5',
+    'FilePath',
+    'DirectoryPath',
+    'NewPath',
+    'Json',
+    'Secret',
+    'SecretStr',
+    'SecretBytes',
+    'StrictBool',
+    'StrictBytes',
+    'StrictInt',
+    'StrictFloat',
+    'PaymentCardNumber',
+    'ByteSize',
+    'PastDate',
+    'FutureDate',
+    'PastDatetime',
+    'FutureDatetime',
+    'condate',
+    'AwareDatetime',
+    'NaiveDatetime',
+    'AllowInfNan',
+    'EncoderProtocol',
+    'EncodedBytes',
+    'EncodedStr',
+    'Base64Encoder',
+    'Base64Bytes',
+    'Base64Str',
+    'Base64UrlBytes',
+    'Base64UrlStr',
+    'GetPydanticSchema',
+    'StringConstraints',
+    'Tag',
+    'Discriminator',
+    'JsonValue',
+    'OnErrorOmit',
+    'FailFast',
+)
+
+
 T = TypeVar('T')


@@ -55,19 +135,30 @@ class Strict(_fields.PydanticMetadata, BaseMetadata):
         StrictBool = Annotated[bool, Strict()]
         ```
     """
+
     strict: bool = True

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.strict)


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
 StrictBool = Annotated[bool, Strict()]
 """A boolean that must be either ``True`` or ``False``."""

+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

-def conint(*, strict: (bool | None)=None, gt: (int | None)=None, ge: (int |
-    None)=None, lt: (int | None)=None, le: (int | None)=None, multiple_of:
-    (int | None)=None) ->type[int]:
+def conint(
+    *,
+    strict: bool | None = None,
+    gt: int | None = None,
+    ge: int | None = None,
+    lt: int | None = None,
+    le: int | None = None,
+    multiple_of: int | None = None,
+) -> type[int]:
     """
     !!! warning "Discouraged"
         This function is **discouraged** in favor of using
@@ -137,8 +228,13 @@ def conint(*, strict: (bool | None)=None, gt: (int | None)=None, ge: (int |
         '''
     ```

-    """
-    pass
+    """  # noqa: D212
+    return Annotated[  # pyright: ignore[reportReturnType]
+        int,
+        Strict(strict) if strict is not None else None,
+        annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
+        annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
+    ]


 PositiveInt = Annotated[int, annotated_types.Gt(0)]
@@ -286,20 +382,29 @@ except ValidationError as e:
 ```
 """

+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

 @_dataclasses.dataclass
 class AllowInfNan(_fields.PydanticMetadata):
     """A field metadata class to indicate that a field should allow ``-inf``, ``inf``, and ``nan``."""
+
     allow_inf_nan: bool = True

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.allow_inf_nan)


-def confloat(*, strict: (bool | None)=None, gt: (float | None)=None, ge: (
-    float | None)=None, lt: (float | None)=None, le: (float | None)=None,
-    multiple_of: (float | None)=None, allow_inf_nan: (bool | None)=None
-    ) ->type[float]:
+def confloat(
+    *,
+    strict: bool | None = None,
+    gt: float | None = None,
+    ge: float | None = None,
+    lt: float | None = None,
+    le: float | None = None,
+    multiple_of: float | None = None,
+    allow_inf_nan: bool | None = None,
+) -> type[float]:
     """
     !!! warning "Discouraged"
         This function is **discouraged** in favor of using
@@ -369,8 +474,14 @@ def confloat(*, strict: (bool | None)=None, gt: (float | None)=None, ge: (
         ]
         '''
     ```
-    """
-    pass
+    """  # noqa: D212
+    return Annotated[  # pyright: ignore[reportReturnType]
+        float,
+        Strict(strict) if strict is not None else None,
+        annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
+        annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
+        AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None,
+    ]


 PositiveFloat = Annotated[float, annotated_types.Gt(0)]
@@ -533,8 +644,15 @@ print(m)
 """


-def conbytes(*, min_length: (int | None)=None, max_length: (int | None)=
-    None, strict: (bool | None)=None) ->type[bytes]:
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+def conbytes(
+    *,
+    min_length: int | None = None,
+    max_length: int | None = None,
+    strict: bool | None = None,
+) -> type[bytes]:
     """A wrapper around `bytes` that allows for additional constraints.

     Args:
@@ -545,13 +663,20 @@ def conbytes(*, min_length: (int | None)=None, max_length: (int | None)=
     Returns:
         The wrapped bytes type.
     """
-    pass
+    return Annotated[  # pyright: ignore[reportReturnType]
+        bytes,
+        Strict(strict) if strict is not None else None,
+        annotated_types.Len(min_length or 0, max_length),
+    ]


 StrictBytes = Annotated[bytes, Strict()]
 """A bytes that must be validated in strict mode."""


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 @_dataclasses.dataclass(frozen=True)
 class StringConstraints(annotated_types.GroupedMetadata):
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/fields/#string-constraints
@@ -567,6 +692,7 @@ class StringConstraints(annotated_types.GroupedMetadata):
         max_length: The maximum length of the string.
         pattern: A regex pattern that the string must match.
     """
+
     strip_whitespace: bool | None = None
     to_upper: bool | None = None
     to_lower: bool | None = None
@@ -575,24 +701,37 @@ class StringConstraints(annotated_types.GroupedMetadata):
     max_length: int | None = None
     pattern: str | Pattern[str] | None = None

-    def __iter__(self) ->Iterator[BaseMetadata]:
+    def __iter__(self) -> Iterator[BaseMetadata]:
         if self.min_length is not None:
             yield MinLen(self.min_length)
         if self.max_length is not None:
             yield MaxLen(self.max_length)
         if self.strict is not None:
             yield Strict(self.strict)
-        if (self.strip_whitespace is not None or self.pattern is not None or
-            self.to_lower is not None or self.to_upper is not None):
-            yield _fields.pydantic_general_metadata(strip_whitespace=self.
-                strip_whitespace, to_upper=self.to_upper, to_lower=self.
-                to_lower, pattern=self.pattern)
-
-
-def constr(*, strip_whitespace: (bool | None)=None, to_upper: (bool | None)
-    =None, to_lower: (bool | None)=None, strict: (bool | None)=None,
-    min_length: (int | None)=None, max_length: (int | None)=None, pattern:
-    (str | Pattern[str] | None)=None) ->type[str]:
+        if (
+            self.strip_whitespace is not None
+            or self.pattern is not None
+            or self.to_lower is not None
+            or self.to_upper is not None
+        ):
+            yield _fields.pydantic_general_metadata(
+                strip_whitespace=self.strip_whitespace,
+                to_upper=self.to_upper,
+                to_lower=self.to_lower,
+                pattern=self.pattern,
+            )
+
+
+def constr(
+    *,
+    strip_whitespace: bool | None = None,
+    to_upper: bool | None = None,
+    to_lower: bool | None = None,
+    strict: bool | None = None,
+    min_length: int | None = None,
+    max_length: int | None = None,
+    pattern: str | Pattern[str] | None = None,
+) -> type[str]:
     """
     !!! warning "Discouraged"
         This function is **discouraged** in favor of using
@@ -646,17 +785,32 @@ def constr(*, strip_whitespace: (bool | None)=None, to_upper: (bool | None)

     Returns:
         The wrapped string type.
-    """
-    pass
+    """  # noqa: D212
+    return Annotated[  # pyright: ignore[reportReturnType]
+        str,
+        StringConstraints(
+            strip_whitespace=strip_whitespace,
+            to_upper=to_upper,
+            to_lower=to_lower,
+            strict=strict,
+            min_length=min_length,
+            max_length=max_length,
+            pattern=pattern,
+        ),
+    ]


 StrictStr = Annotated[str, Strict()]
 """A string that must be validated in strict mode."""
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ COLLECTION TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 HashableItemType = TypeVar('HashableItemType', bound=Hashable)


-def conset(item_type: type[HashableItemType], *, min_length: (int | None)=
-    None, max_length: (int | None)=None) ->type[set[HashableItemType]]:
+def conset(
+    item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None
+) -> type[set[HashableItemType]]:
     """A wrapper around `typing.Set` that allows for additional constraints.

     Args:
@@ -667,12 +821,12 @@ def conset(item_type: type[HashableItemType], *, min_length: (int | None)=
     Returns:
         The wrapped set type.
     """
-    pass
+    return Annotated[Set[item_type], annotated_types.Len(min_length or 0, max_length)]  # pyright: ignore[reportReturnType]


-def confrozenset(item_type: type[HashableItemType], *, min_length: (int |
-    None)=None, max_length: (int | None)=None) ->type[frozenset[
-    HashableItemType]]:
+def confrozenset(
+    item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None
+) -> type[frozenset[HashableItemType]]:
     """A wrapper around `typing.FrozenSet` that allows for additional constraints.

     Args:
@@ -683,15 +837,19 @@ def confrozenset(item_type: type[HashableItemType], *, min_length: (int |
     Returns:
         The wrapped frozenset type.
     """
-    pass
+    return Annotated[FrozenSet[item_type], annotated_types.Len(min_length or 0, max_length)]  # pyright: ignore[reportReturnType]


 AnyItemType = TypeVar('AnyItemType')


-def conlist(item_type: type[AnyItemType], *, min_length: (int | None)=None,
-    max_length: (int | None)=None, unique_items: (bool | None)=None) ->type[
-    list[AnyItemType]]:
+def conlist(
+    item_type: type[AnyItemType],
+    *,
+    min_length: int | None = None,
+    max_length: int | None = None,
+    unique_items: bool | None = None,
+) -> type[list[AnyItemType]]:
     """A wrapper around typing.List that adds validation.

     Args:
@@ -706,15 +864,24 @@ def conlist(item_type: type[AnyItemType], *, min_length: (int | None)=None,
     Returns:
         The wrapped list type.
     """
-    pass
+    if unique_items is not None:
+        raise PydanticUserError(
+            (
+                '`unique_items` is removed, use `Set` instead'
+                '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)'
+            ),
+            code='removed-kwargs',
+        )
+    return Annotated[List[item_type], annotated_types.Len(min_length or 0, max_length)]  # pyright: ignore[reportReturnType]
+

+# ~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT STRING TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

 AnyType = TypeVar('AnyType')
 if TYPE_CHECKING:
     ImportString = Annotated[AnyType, ...]
 else:

-
     class ImportString:
         """A type that can be used to import a type from a string.

@@ -807,36 +974,56 @@ else:
         """

         @classmethod
-        def __class_getitem__(cls, item: AnyType) ->AnyType:
+        def __class_getitem__(cls, item: AnyType) -> AnyType:
             return Annotated[item, cls()]

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
-            serializer = core_schema.plain_serializer_function_ser_schema(cls
-                ._serialize, when_used='json')
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
+            serializer = core_schema.plain_serializer_function_ser_schema(cls._serialize, when_used='json')
             if cls is source:
-                return core_schema.no_info_plain_validator_function(function
-                    =_validators.import_string, serialization=serializer)
+                # Treat bare usage of ImportString (`schema is None`) as the same as ImportString[Any]
+                return core_schema.no_info_plain_validator_function(
+                    function=_validators.import_string, serialization=serializer
+                )
             else:
-                return core_schema.no_info_before_validator_function(function
-                    =_validators.import_string, schema=handler(source),
-                    serialization=serializer)
+                return core_schema.no_info_before_validator_function(
+                    function=_validators.import_string, schema=handler(source), serialization=serializer
+                )

         @classmethod
-        def __get_pydantic_json_schema__(cls, cs: CoreSchema, handler:
-            GetJsonSchemaHandler) ->JsonSchemaValue:
+        def __get_pydantic_json_schema__(cls, cs: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
             return handler(core_schema.str_schema())

-        def __repr__(self) ->str:
+        @staticmethod
+        def _serialize(v: Any) -> str:
+            if isinstance(v, ModuleType):
+                return v.__name__
+            elif hasattr(v, '__module__') and hasattr(v, '__name__'):
+                return f'{v.__module__}.{v.__name__}'
+            else:
+                return v
+
+        def __repr__(self) -> str:
             return 'ImportString'


-def condecimal(*, strict: (bool | None)=None, gt: (int | Decimal | None)=
-    None, ge: (int | Decimal | None)=None, lt: (int | Decimal | None)=None,
-    le: (int | Decimal | None)=None, multiple_of: (int | Decimal | None)=
-    None, max_digits: (int | None)=None, decimal_places: (int | None)=None,
-    allow_inf_nan: (bool | None)=None) ->type[Decimal]:
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+def condecimal(
+    *,
+    strict: bool | None = None,
+    gt: int | Decimal | None = None,
+    ge: int | Decimal | None = None,
+    lt: int | Decimal | None = None,
+    le: int | Decimal | None = None,
+    multiple_of: int | Decimal | None = None,
+    max_digits: int | None = None,
+    decimal_places: int | None = None,
+    allow_inf_nan: bool | None = None,
+) -> type[Decimal]:
     """
     !!! warning "Discouraged"
         This function is **discouraged** in favor of using
@@ -909,34 +1096,46 @@ def condecimal(*, strict: (bool | None)=None, gt: (int | Decimal | None)=
         ]
         '''
     ```
-    """
-    pass
+    """  # noqa: D212
+    return Annotated[  # pyright: ignore[reportReturnType]
+        Decimal,
+        Strict(strict) if strict is not None else None,
+        annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
+        annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
+        _fields.pydantic_general_metadata(max_digits=max_digits, decimal_places=decimal_places),
+        AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None,
+    ]
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


 @_dataclasses.dataclass(**_internal_dataclass.slots_true)
 class UuidVersion:
     """A field metadata class to indicate a [UUID](https://docs.python.org/3/library/uuid.html) version."""
+
     uuid_version: Literal[1, 3, 4, 5]

-    def __get_pydantic_json_schema__(self, core_schema: core_schema.
-        CoreSchema, handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = handler(core_schema)
-        field_schema.pop('anyOf', None)
+        field_schema.pop('anyOf', None)  # remove the bytes/str union
         field_schema.update(type='string', format=f'uuid{self.uuid_version}')
         return field_schema

-    def __get_pydantic_core_schema__(self, source: Any, handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
         if isinstance(self, source):
+            # used directly as a type
             return core_schema.uuid_schema(version=self.uuid_version)
         else:
+            # update existing schema with self.uuid_version
             schema = handler(source)
-            _check_annotated_type(schema['type'], 'uuid', self.__class__.
-                __name__)
-            schema['version'] = self.uuid_version
+            _check_annotated_type(schema['type'], 'uuid', self.__class__.__name__)
+            schema['version'] = self.uuid_version  # type: ignore
             return schema

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(type(self.uuid_version))


@@ -998,29 +1197,57 @@ Model(uuid5=uuid.uuid5(uuid.NAMESPACE_DNS, 'pydantic.org'))
 """


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 @_dataclasses.dataclass
 class PathType:
     path_type: Literal['file', 'dir', 'new']

-    def __get_pydantic_json_schema__(self, core_schema: core_schema.
-        CoreSchema, handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = handler(core_schema)
         format_conversion = {'file': 'file-path', 'dir': 'directory-path'}
-        field_schema.update(format=format_conversion.get(self.path_type,
-            'path'), type='string')
+        field_schema.update(format=format_conversion.get(self.path_type, 'path'), type='string')
         return field_schema

-    def __get_pydantic_core_schema__(self, source: Any, handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        function_lookup = {'file': cast(core_schema.
-            WithInfoValidatorFunction, self.validate_file), 'dir': cast(
-            core_schema.WithInfoValidatorFunction, self.validate_directory),
-            'new': cast(core_schema.WithInfoValidatorFunction, self.
-            validate_new)}
-        return core_schema.with_info_after_validator_function(function_lookup
-            [self.path_type], handler(source))
-
-    def __hash__(self) ->int:
+    def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        function_lookup = {
+            'file': cast(core_schema.WithInfoValidatorFunction, self.validate_file),
+            'dir': cast(core_schema.WithInfoValidatorFunction, self.validate_directory),
+            'new': cast(core_schema.WithInfoValidatorFunction, self.validate_new),
+        }
+
+        return core_schema.with_info_after_validator_function(
+            function_lookup[self.path_type],
+            handler(source),
+        )
+
+    @staticmethod
+    def validate_file(path: Path, _: core_schema.ValidationInfo) -> Path:
+        if path.is_file():
+            return path
+        else:
+            raise PydanticCustomError('path_not_file', 'Path does not point to a file')
+
+    @staticmethod
+    def validate_directory(path: Path, _: core_schema.ValidationInfo) -> Path:
+        if path.is_dir():
+            return path
+        else:
+            raise PydanticCustomError('path_not_directory', 'Path does not point to a directory')
+
+    @staticmethod
+    def validate_new(path: Path, _: core_schema.ValidationInfo) -> Path:
+        if path.exists():
+            raise PydanticCustomError('path_exists', 'Path already exists')
+        elif not path.parent.exists():
+            raise PydanticCustomError('parent_does_not_exist', 'Parent directory does not exist')
+        else:
+            return path
+
+    def __hash__(self) -> int:
         return hash(type(self.path_type))


@@ -1110,10 +1337,15 @@ except ValidationError as e:
 """
 NewPath = Annotated[Path, PathType('new')]
 """A path for a new file or directory that must not already exist. The parent directory must already exist."""
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
 if TYPE_CHECKING:
+    # Json[list[str]] will be recognized by type checkers as list[str]
     Json = Annotated[AnyType, ...]
-else:

+else:

     class Json:
         """A special type wrapper which loads JSON before parsing.
@@ -1196,54 +1428,58 @@ else:
         """

         @classmethod
-        def __class_getitem__(cls, item: AnyType) ->AnyType:
+        def __class_getitem__(cls, item: AnyType) -> AnyType:
             return Annotated[item, cls()]

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: Any, handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
             if cls is source:
                 return core_schema.json_schema(None)
             else:
                 return core_schema.json_schema(handler(source))

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'Json'

-        def __hash__(self) ->int:
+        def __hash__(self) -> int:
             return hash(type(self))

-        def __eq__(self, other: Any) ->bool:
+        def __eq__(self, other: Any) -> bool:
             return type(other) == type(self)
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
 SecretType = TypeVar('SecretType')


 class _SecretBase(Generic[SecretType]):
-
-    def __init__(self, secret_value: SecretType) ->None:
+    def __init__(self, secret_value: SecretType) -> None:
         self._secret_value: SecretType = secret_value

-    def get_secret_value(self) ->SecretType:
+    def get_secret_value(self) -> SecretType:
         """Get the secret value.

         Returns:
             The secret value.
         """
-        pass
+        return self._secret_value

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, self.__class__) and self.get_secret_value(
-            ) == other.get_secret_value()
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value()

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.get_secret_value())

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return str(self._display())

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f'{self.__class__.__name__}({self._display()!r})'

+    def _display(self) -> str | bytes:
+        raise NotImplementedError
+

 class Secret(_SecretBase[SecretType]):
     """A generic base class used for defining a field with sensitive information that you do not want to be visible in logging or tracebacks.
@@ -1301,44 +1537,58 @@ class Secret(_SecretBase[SecretType]):
     The value returned by the `_display` method will be used for `repr()` and `str()`.
     """

+    def _display(self) -> str | bytes:
+        return '**********' if self.get_secret_value() else ''
+
     @classmethod
-    def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
+    def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
         inner_type = None
+        # if origin_type is Secret, then cls is a GenericAlias, and we can extract the inner type directly
         origin_type = get_origin(source)
         if origin_type is not None:
             inner_type = get_args(source)[0]
+        # otherwise, we need to get the inner type from the base class
         else:
-            bases = getattr(cls, '__orig_bases__', getattr(cls, '__bases__',
-                []))
+            bases = getattr(cls, '__orig_bases__', getattr(cls, '__bases__', []))
             for base in bases:
                 if get_origin(base) is Secret:
                     inner_type = get_args(base)[0]
             if bases == [] or inner_type is None:
                 raise TypeError(
-                    f"Can't get secret type from {cls.__name__}. Please use Secret[<type>], or subclass from Secret[<type>] instead."
-                    )
-        inner_schema = handler.generate_schema(inner_type)
+                    f"Can't get secret type from {cls.__name__}. "
+                    'Please use Secret[<type>], or subclass from Secret[<type>] instead.'
+                )

-        def validate_secret_value(value, handler) ->Secret[SecretType]:
+        inner_schema = handler.generate_schema(inner_type)  # type: ignore
+
+        def validate_secret_value(value, handler) -> Secret[SecretType]:
             if isinstance(value, Secret):
                 value = value.get_secret_value()
             validated_inner = handler(value)
             return cls(validated_inner)

-        def serialize(value: Secret[SecretType], info: core_schema.
-            SerializationInfo) ->(str | Secret[SecretType]):
+        def serialize(value: Secret[SecretType], info: core_schema.SerializationInfo) -> str | Secret[SecretType]:
             if info.mode == 'json':
                 return str(value)
             else:
                 return value
-        return core_schema.json_or_python_schema(python_schema=core_schema.
-            no_info_wrap_validator_function(validate_secret_value,
-            inner_schema), json_schema=core_schema.
-            no_info_after_validator_function(lambda x: cls(x), inner_schema
-            ), serialization=core_schema.
-            plain_serializer_function_ser_schema(serialize, info_arg=True,
-            when_used='always'))
+
+        return core_schema.json_or_python_schema(
+            python_schema=core_schema.no_info_wrap_validator_function(
+                validate_secret_value,
+                inner_schema,
+            ),
+            json_schema=core_schema.no_info_after_validator_function(lambda x: cls(x), inner_schema),
+            serialization=core_schema.plain_serializer_function_ser_schema(
+                serialize,
+                info_arg=True,
+                when_used='always',
+            ),
+        )
+
+
+def _secret_display(value: SecretType) -> str:  # type: ignore
+    return '**********' if value else ''


 class _SecretField(_SecretBase[SecretType]):
@@ -1346,37 +1596,56 @@ class _SecretField(_SecretBase[SecretType]):
     _error_kind: ClassVar[str]

     @classmethod
-    def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-
-        def serialize(value: _SecretField[SecretType], info: core_schema.
-            SerializationInfo) ->(str | _SecretField[SecretType]):
+    def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        def serialize(
+            value: _SecretField[SecretType], info: core_schema.SerializationInfo
+        ) -> str | _SecretField[SecretType]:
             if info.mode == 'json':
+                # we want the output to always be string without the `b'` prefix for bytes,
+                # hence we just use `secret_display`
                 return _secret_display(value.get_secret_value())
             else:
                 return value

-        def get_json_schema(_core_schema: core_schema.CoreSchema, handler:
-            GetJsonSchemaHandler) ->JsonSchemaValue:
+        def get_json_schema(_core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue:
             json_schema = handler(cls._inner_schema)
-            _utils.update_not_none(json_schema, type='string', writeOnly=
-                True, format='password')
+            _utils.update_not_none(
+                json_schema,
+                type='string',
+                writeOnly=True,
+                format='password',
+            )
             return json_schema
-        json_schema = core_schema.no_info_after_validator_function(source,
-            cls._inner_schema)
-
-        def get_secret_schema(strict: bool) ->CoreSchema:
-            return core_schema.json_or_python_schema(python_schema=
-                core_schema.union_schema([core_schema.is_instance_schema(
-                source), json_schema], custom_error_type=cls._error_kind,
-                strict=strict), json_schema=json_schema, serialization=
-                core_schema.plain_serializer_function_ser_schema(serialize,
-                info_arg=True, return_schema=core_schema.str_schema(),
-                when_used='json'))
-        return core_schema.lax_or_strict_schema(lax_schema=
-            get_secret_schema(strict=False), strict_schema=
-            get_secret_schema(strict=True), metadata={
-            'pydantic_js_functions': [get_json_schema]})
+
+        json_schema = core_schema.no_info_after_validator_function(
+            source,  # construct the type
+            cls._inner_schema,
+        )
+
+        def get_secret_schema(strict: bool) -> CoreSchema:
+            return core_schema.json_or_python_schema(
+                python_schema=core_schema.union_schema(
+                    [
+                        core_schema.is_instance_schema(source),
+                        json_schema,
+                    ],
+                    custom_error_type=cls._error_kind,
+                    strict=strict,
+                ),
+                json_schema=json_schema,
+                serialization=core_schema.plain_serializer_function_ser_schema(
+                    serialize,
+                    info_arg=True,
+                    return_schema=core_schema.str_schema(),
+                    when_used='json',
+                ),
+            )
+
+        return core_schema.lax_or_strict_schema(
+            lax_schema=get_secret_schema(strict=False),
+            strict_schema=get_secret_schema(strict=True),
+            metadata={'pydantic_js_functions': [get_json_schema]},
+        )


 class SecretStr(_SecretField[str]):
@@ -1402,12 +1671,16 @@ class SecretStr(_SecretField[str]):
     #> (SecretStr('**********'), SecretStr(''))
     ```
     """
+
     _inner_schema: ClassVar[CoreSchema] = core_schema.str_schema()
     _error_kind: ClassVar[str] = 'string_type'

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self._secret_value)

+    def _display(self) -> str:
+        return _secret_display(self._secret_value)
+

 class SecretBytes(_SecretField[bytes]):
     """A bytes used for storing sensitive information that you do not want to be visible in logging or tracebacks.
@@ -1431,12 +1704,19 @@ class SecretBytes(_SecretField[bytes]):
     #> (SecretBytes(b'**********'), SecretBytes(b''))
     ```
     """
+
     _inner_schema: ClassVar[CoreSchema] = core_schema.bytes_schema()
     _error_kind: ClassVar[str] = 'bytes_type'

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self._secret_value)

+    def _display(self) -> bytes:
+        return _secret_display(self._secret_value).encode()
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

 class PaymentCardBrand(str, Enum):
     amex = 'American Express'
@@ -1444,15 +1724,18 @@ class PaymentCardBrand(str, Enum):
     visa = 'Visa'
     other = 'other'

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.value


 @deprecated(
-    'The `PaymentCardNumber` class is deprecated, use `pydantic_extra_types` instead. See https://docs.pydantic.dev/latest/api/pydantic_extra_types_payment/#pydantic_extra_types.payment.PaymentCardNumber.'
-    , category=PydanticDeprecatedSince20)
+    'The `PaymentCardNumber` class is deprecated, use `pydantic_extra_types` instead. '
+    'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_payment/#pydantic_extra_types.payment.PaymentCardNumber.',
+    category=PydanticDeprecatedSince20,
+)
 class PaymentCardNumber(str):
     """Based on: https://en.wikipedia.org/wiki/Payment_card_number."""
+
     strip_whitespace: ClassVar[bool] = True
     min_length: ClassVar[int] = 12
     max_length: ClassVar[int] = 19
@@ -1462,49 +1745,98 @@ class PaymentCardNumber(str):

     def __init__(self, card_number: str):
         self.validate_digits(card_number)
+
         card_number = self.validate_luhn_check_digit(card_number)
+
         self.bin = card_number[:6]
         self.last4 = card_number[-4:]
         self.brand = self.validate_brand(card_number)

     @classmethod
-    def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.with_info_after_validator_function(cls.validate,
-            core_schema.str_schema(min_length=cls.min_length, max_length=
-            cls.max_length, strip_whitespace=cls.strip_whitespace))
+    def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        return core_schema.with_info_after_validator_function(
+            cls.validate,
+            core_schema.str_schema(
+                min_length=cls.min_length, max_length=cls.max_length, strip_whitespace=cls.strip_whitespace
+            ),
+        )

     @classmethod
-    def validate(cls, input_value: str, /, _: core_schema.ValidationInfo
-        ) ->PaymentCardNumber:
+    def validate(cls, input_value: str, /, _: core_schema.ValidationInfo) -> PaymentCardNumber:
         """Validate the card number and return a `PaymentCardNumber` instance."""
-        pass
+        return cls(input_value)

     @property
-    def masked(self) ->str:
+    def masked(self) -> str:
         """Mask all but the last 4 digits of the card number.

         Returns:
             A masked card number string.
         """
-        pass
+        num_masked = len(self) - 10  # len(bin) + len(last4) == 10
+        return f'{self.bin}{"*" * num_masked}{self.last4}'

     @classmethod
-    def validate_digits(cls, card_number: str) ->None:
+    def validate_digits(cls, card_number: str) -> None:
         """Validate that the card number is all digits."""
-        pass
+        if not card_number.isdigit():
+            raise PydanticCustomError('payment_card_number_digits', 'Card number is not all digits')

     @classmethod
-    def validate_luhn_check_digit(cls, card_number: str) ->str:
+    def validate_luhn_check_digit(cls, card_number: str) -> str:
         """Based on: https://en.wikipedia.org/wiki/Luhn_algorithm."""
-        pass
+        sum_ = int(card_number[-1])
+        length = len(card_number)
+        parity = length % 2
+        for i in range(length - 1):
+            digit = int(card_number[i])
+            if i % 2 == parity:
+                digit *= 2
+            if digit > 9:
+                digit -= 9
+            sum_ += digit
+        valid = sum_ % 10 == 0
+        if not valid:
+            raise PydanticCustomError('payment_card_number_luhn', 'Card number is not luhn valid')
+        return card_number

     @staticmethod
-    def validate_brand(card_number: str) ->PaymentCardBrand:
+    def validate_brand(card_number: str) -> PaymentCardBrand:
         """Validate length based on BIN for major brands:
         https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN).
         """
-        pass
+        if card_number[0] == '4':
+            brand = PaymentCardBrand.visa
+        elif 51 <= int(card_number[:2]) <= 55:
+            brand = PaymentCardBrand.mastercard
+        elif card_number[:2] in {'34', '37'}:
+            brand = PaymentCardBrand.amex
+        else:
+            brand = PaymentCardBrand.other
+
+        required_length: None | int | str = None
+        if brand in PaymentCardBrand.mastercard:
+            required_length = 16
+            valid = len(card_number) == required_length
+        elif brand == PaymentCardBrand.visa:
+            required_length = '13, 16 or 19'
+            valid = len(card_number) in {13, 16, 19}
+        elif brand == PaymentCardBrand.amex:
+            required_length = 15
+            valid = len(card_number) == required_length
+        else:
+            valid = True
+
+        if not valid:
+            raise PydanticCustomError(
+                'payment_card_number_brand',
+                'Length for a {brand} card must be {required_length}',
+                {'brand': brand, 'required_length': required_length},
+            )
+        return brand
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


 class ByteSize(int):
@@ -1543,31 +1875,80 @@ class ByteSize(int):
     #> 45474.73508864641
     ```
     """
-    byte_sizes = {'b': 1, 'kb': 10 ** 3, 'mb': 10 ** 6, 'gb': 10 ** 9, 'tb':
-        10 ** 12, 'pb': 10 ** 15, 'eb': 10 ** 18, 'kib': 2 ** 10, 'mib': 2 **
-        20, 'gib': 2 ** 30, 'tib': 2 ** 40, 'pib': 2 ** 50, 'eib': 2 ** 60,
-        'bit': 1 / 8, 'kbit': 10 ** 3 / 8, 'mbit': 10 ** 6 / 8, 'gbit': 10 **
-        9 / 8, 'tbit': 10 ** 12 / 8, 'pbit': 10 ** 15 / 8, 'ebit': 10 ** 18 /
-        8, 'kibit': 2 ** 10 / 8, 'mibit': 2 ** 20 / 8, 'gibit': 2 ** 30 / 8,
-        'tibit': 2 ** 40 / 8, 'pibit': 2 ** 50 / 8, 'eibit': 2 ** 60 / 8}
-    byte_sizes.update({k.lower()[0]: v for k, v in byte_sizes.items() if 
-        'i' not in k})
-    byte_string_pattern = '^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?'
+
+    byte_sizes = {
+        'b': 1,
+        'kb': 10**3,
+        'mb': 10**6,
+        'gb': 10**9,
+        'tb': 10**12,
+        'pb': 10**15,
+        'eb': 10**18,
+        'kib': 2**10,
+        'mib': 2**20,
+        'gib': 2**30,
+        'tib': 2**40,
+        'pib': 2**50,
+        'eib': 2**60,
+        'bit': 1 / 8,
+        'kbit': 10**3 / 8,
+        'mbit': 10**6 / 8,
+        'gbit': 10**9 / 8,
+        'tbit': 10**12 / 8,
+        'pbit': 10**15 / 8,
+        'ebit': 10**18 / 8,
+        'kibit': 2**10 / 8,
+        'mibit': 2**20 / 8,
+        'gibit': 2**30 / 8,
+        'tibit': 2**40 / 8,
+        'pibit': 2**50 / 8,
+        'eibit': 2**60 / 8,
+    }
+    byte_sizes.update({k.lower()[0]: v for k, v in byte_sizes.items() if 'i' not in k})
+
+    byte_string_pattern = r'^\s*(\d*\.?\d+)\s*(\w+)?'
     byte_string_re = re.compile(byte_string_pattern, re.IGNORECASE)

     @classmethod
-    def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.with_info_after_validator_function(function=cls.
-            _validate, schema=core_schema.union_schema([core_schema.
-            str_schema(pattern=cls.byte_string_pattern), core_schema.
-            int_schema(ge=0)], custom_error_type='byte_size',
-            custom_error_message=
-            'could not parse value and unit from byte string'),
+    def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        return core_schema.with_info_after_validator_function(
+            function=cls._validate,
+            schema=core_schema.union_schema(
+                [
+                    core_schema.str_schema(pattern=cls.byte_string_pattern),
+                    core_schema.int_schema(ge=0),
+                ],
+                custom_error_type='byte_size',
+                custom_error_message='could not parse value and unit from byte string',
+            ),
             serialization=core_schema.plain_serializer_function_ser_schema(
-            int, return_schema=core_schema.int_schema(ge=0)))
+                int, return_schema=core_schema.int_schema(ge=0)
+            ),
+        )

-    def human_readable(self, decimal: bool=False, separator: str='') ->str:
+    @classmethod
+    def _validate(cls, input_value: Any, /, _: core_schema.ValidationInfo) -> ByteSize:
+        try:
+            return cls(int(input_value))
+        except ValueError:
+            pass
+
+        str_match = cls.byte_string_re.match(str(input_value))
+        if str_match is None:
+            raise PydanticCustomError('byte_size', 'could not parse value and unit from byte string')
+
+        scalar, unit = str_match.groups()
+        if unit is None:
+            unit = 'b'
+
+        try:
+            unit_mult = cls.byte_sizes[unit.lower()]
+        except KeyError:
+            raise PydanticCustomError('byte_size_unit', 'could not interpret byte unit: {unit}', {'unit': unit})
+
+        return cls(int(float(scalar) * unit_mult))
+
+    def human_readable(self, decimal: bool = False, separator: str = '') -> str:
         """Converts a byte size to a human readable string.

         Args:
@@ -1578,9 +1959,27 @@ class ByteSize(int):
         Returns:
             A human readable string representation of the byte size.
         """
-        pass
-
-    def to(self, unit: str) ->float:
+        if decimal:
+            divisor = 1000
+            units = 'B', 'KB', 'MB', 'GB', 'TB', 'PB'
+            final_unit = 'EB'
+        else:
+            divisor = 1024
+            units = 'B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'
+            final_unit = 'EiB'
+
+        num = float(self)
+        for unit in units:
+            if abs(num) < divisor:
+                if unit == 'B':
+                    return f'{num:0.0f}{separator}{unit}'
+                else:
+                    return f'{num:0.1f}{separator}{unit}'
+            num /= divisor
+
+        return f'{num:0.1f}{separator}{final_unit}'
+
+    def to(self, unit: str) -> float:
         """Converts a byte size to another unit, including both byte and bit units.

         Args:
@@ -1592,7 +1991,20 @@ class ByteSize(int):
         Returns:
             The byte size in the new unit.
         """
-        pass
+        try:
+            unit_div = self.byte_sizes[unit.lower()]
+        except KeyError:
+            raise PydanticCustomError('byte_size_unit', 'Could not interpret byte unit: {unit}', {'unit': unit})
+
+        return self / unit_div
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+def _check_annotated_type(annotated_type: str, expected_type: str, annotation: str) -> None:
+    if annotated_type != expected_type:
+        raise PydanticUserError(f"'{annotation}' cannot annotate '{annotated_type}'.", code='invalid_annotated_type')


 if TYPE_CHECKING:
@@ -1600,14 +2012,15 @@ if TYPE_CHECKING:
     FutureDate = Annotated[date, ...]
 else:

-
     class PastDate:
         """A date in the past."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.date_schema(now_op='past')
             else:
                 schema = handler(source)
@@ -1615,17 +2028,18 @@ else:
                 schema['now_op'] = 'past'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'PastDate'

-
     class FutureDate:
         """A date in the future."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.date_schema(now_op='future')
             else:
                 schema = handler(source)
@@ -1633,13 +2047,18 @@ else:
                 schema['now_op'] = 'future'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'FutureDate'


-def condate(*, strict: (bool | None)=None, gt: (date | None)=None, ge: (
-    date | None)=None, lt: (date | None)=None, le: (date | None)=None) ->type[
-    date]:
+def condate(
+    *,
+    strict: bool | None = None,
+    gt: date | None = None,
+    ge: date | None = None,
+    lt: date | None = None,
+    le: date | None = None,
+) -> type[date]:
     """A wrapper for date that adds constraints.

     Args:
@@ -1652,24 +2071,32 @@ def condate(*, strict: (bool | None)=None, gt: (date | None)=None, ge: (
     Returns:
         A date type with the specified constraints.
     """
-    pass
+    return Annotated[  # pyright: ignore[reportReturnType]
+        date,
+        Strict(strict) if strict is not None else None,
+        annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
+    ]
+

+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATETIME TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

 if TYPE_CHECKING:
     AwareDatetime = Annotated[datetime, ...]
     NaiveDatetime = Annotated[datetime, ...]
     PastDatetime = Annotated[datetime, ...]
     FutureDatetime = Annotated[datetime, ...]
-else:

+else:

     class AwareDatetime:
         """A datetime that requires timezone info."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.datetime_schema(tz_constraint='aware')
             else:
                 schema = handler(source)
@@ -1677,17 +2104,18 @@ else:
                 schema['tz_constraint'] = 'aware'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'AwareDatetime'

-
     class NaiveDatetime:
         """A datetime that doesn't require timezone info."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.datetime_schema(tz_constraint='naive')
             else:
                 schema = handler(source)
@@ -1695,17 +2123,18 @@ else:
                 schema['tz_constraint'] = 'naive'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'NaiveDatetime'

-
     class PastDatetime:
         """A datetime that must be in the past."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.datetime_schema(now_op='past')
             else:
                 schema = handler(source)
@@ -1713,17 +2142,18 @@ else:
                 schema['now_op'] = 'past'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'PastDatetime'

-
     class FutureDatetime:
         """A datetime that must be in the future."""

         @classmethod
-        def __get_pydantic_core_schema__(cls, source: type[Any], handler:
-            GetCoreSchemaHandler) ->core_schema.CoreSchema:
+        def __get_pydantic_core_schema__(
+            cls, source: type[Any], handler: GetCoreSchemaHandler
+        ) -> core_schema.CoreSchema:
             if cls is source:
+                # used directly as a type
                 return core_schema.datetime_schema(now_op='future')
             else:
                 schema = handler(source)
@@ -1731,15 +2161,18 @@ else:
                 schema['now_op'] = 'future'
                 return schema

-        def __repr__(self) ->str:
+        def __repr__(self) -> str:
             return 'FutureDatetime'


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Encoded TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 class EncoderProtocol(Protocol):
     """Protocol for encoding and decoding data to and from bytes."""

     @classmethod
-    def decode(cls, data: bytes) ->bytes:
+    def decode(cls, data: bytes) -> bytes:
         """Decode the data using the encoder.

         Args:
@@ -1748,10 +2181,10 @@ class EncoderProtocol(Protocol):
         Returns:
             The decoded data.
         """
-        pass
+        ...

     @classmethod
-    def encode(cls, value: bytes) ->bytes:
+    def encode(cls, value: bytes) -> bytes:
         """Encode the data using the encoder.

         Args:
@@ -1760,23 +2193,23 @@ class EncoderProtocol(Protocol):
         Returns:
             The encoded data.
         """
-        pass
+        ...

     @classmethod
-    def get_json_format(cls) ->str:
+    def get_json_format(cls) -> str:
         """Get the JSON format for the encoded data.

         Returns:
             The JSON format for the encoded data.
         """
-        pass
+        ...


 class Base64Encoder(EncoderProtocol):
     """Standard (non-URL-safe) Base64 encoder."""

     @classmethod
-    def decode(cls, data: bytes) ->bytes:
+    def decode(cls, data: bytes) -> bytes:
         """Decode the data from base64 encoded bytes to original bytes data.

         Args:
@@ -1785,10 +2218,13 @@ class Base64Encoder(EncoderProtocol):
         Returns:
             The decoded data.
         """
-        pass
+        try:
+            return base64.decodebytes(data)
+        except ValueError as e:
+            raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)})

     @classmethod
-    def encode(cls, value: bytes) ->bytes:
+    def encode(cls, value: bytes) -> bytes:
         """Encode the data from bytes to a base64 encoded bytes.

         Args:
@@ -1797,23 +2233,23 @@ class Base64Encoder(EncoderProtocol):
         Returns:
             The encoded data.
         """
-        pass
+        return base64.encodebytes(value)

     @classmethod
-    def get_json_format(cls) ->Literal['base64']:
+    def get_json_format(cls) -> Literal['base64']:
         """Get the JSON format for the encoded data.

         Returns:
             The JSON format for the encoded data.
         """
-        pass
+        return 'base64'


 class Base64UrlEncoder(EncoderProtocol):
     """URL-safe Base64 encoder."""

     @classmethod
-    def decode(cls, data: bytes) ->bytes:
+    def decode(cls, data: bytes) -> bytes:
         """Decode the data from base64 encoded bytes to original bytes data.

         Args:
@@ -1822,10 +2258,13 @@ class Base64UrlEncoder(EncoderProtocol):
         Returns:
             The decoded data.
         """
-        pass
+        try:
+            return base64.urlsafe_b64decode(data)
+        except ValueError as e:
+            raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)})

     @classmethod
-    def encode(cls, value: bytes) ->bytes:
+    def encode(cls, value: bytes) -> bytes:
         """Encode the data from bytes to a base64 encoded bytes.

         Args:
@@ -1834,16 +2273,16 @@ class Base64UrlEncoder(EncoderProtocol):
         Returns:
             The encoded data.
         """
-        pass
+        return base64.urlsafe_b64encode(value)

     @classmethod
-    def get_json_format(cls) ->Literal['base64url']:
+    def get_json_format(cls) -> Literal['base64url']:
         """Get the JSON format for the encoded data.

         Returns:
             The JSON format for the encoded data.
         """
-        pass
+        return 'base64url'


 @_dataclasses.dataclass(**_internal_dataclass.slots_true)
@@ -1900,23 +2339,24 @@ class EncodedBytes:
         '''
     ```
     """
+
     encoder: type[EncoderProtocol]

-    def __get_pydantic_json_schema__(self, core_schema: core_schema.
-        CoreSchema, handler: GetJsonSchemaHandler) ->JsonSchemaValue:
+    def __get_pydantic_json_schema__(
+        self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
         field_schema = handler(core_schema)
-        field_schema.update(type='string', format=self.encoder.
-            get_json_format())
+        field_schema.update(type='string', format=self.encoder.get_json_format())
         return field_schema

-    def __get_pydantic_core_schema__(self, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.with_info_after_validator_function(function=self
-            .decode, schema=core_schema.bytes_schema(), serialization=
-            core_schema.plain_serializer_function_ser_schema(function=self.
-            encode))
+    def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        return core_schema.with_info_after_validator_function(
+            function=self.decode,
+            schema=core_schema.bytes_schema(),
+            serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode),
+        )

-    def decode(self, data: bytes, _: core_schema.ValidationInfo) ->bytes:
+    def decode(self, data: bytes, _: core_schema.ValidationInfo) -> bytes:
         """Decode the data using the specified encoder.

         Args:
@@ -1925,9 +2365,9 @@ class EncodedBytes:
         Returns:
             The decoded data.
         """
-        pass
+        return self.encoder.decode(data)

-    def encode(self, value: bytes) ->bytes:
+    def encode(self, value: bytes) -> bytes:
         """Encode the data using the specified encoder.

         Args:
@@ -1936,9 +2376,9 @@ class EncodedBytes:
         Returns:
             The encoded data.
         """
-        pass
+        return self.encoder.encode(value)

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.encoder)


@@ -1997,15 +2437,14 @@ class EncodedStr(EncodedBytes):
     ```
     """

-    def __get_pydantic_core_schema__(self, source: type[Any], handler:
-        GetCoreSchemaHandler) ->core_schema.CoreSchema:
-        return core_schema.with_info_after_validator_function(function=self
-            .decode_str, schema=super(EncodedStr, self).
-            __get_pydantic_core_schema__(source=source, handler=handler),
-            serialization=core_schema.plain_serializer_function_ser_schema(
-            function=self.encode_str))
+    def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
+        return core_schema.with_info_after_validator_function(
+            function=self.decode_str,
+            schema=super(EncodedStr, self).__get_pydantic_core_schema__(source=source, handler=handler),  # noqa: UP008
+            serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode_str),
+        )

-    def decode_str(self, data: bytes, _: core_schema.ValidationInfo) ->str:
+    def decode_str(self, data: bytes, _: core_schema.ValidationInfo) -> str:
         """Decode the data using the specified encoder.

         Args:
@@ -2014,9 +2453,9 @@ class EncodedStr(EncodedBytes):
         Returns:
             The decoded data.
         """
-        pass
+        return data.decode()

-    def encode_str(self, value: str) ->str:
+    def encode_str(self, value: str) -> str:
         """Encode the data using the specified encoder.

         Args:
@@ -2025,9 +2464,9 @@ class EncodedStr(EncodedBytes):
         Returns:
             The encoded data.
         """
-        pass
+        return super(EncodedStr, self).encode(value=value.encode()).decode()  # noqa: UP008

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.encoder)


@@ -2055,8 +2494,7 @@ print(m.base64_bytes)

 # Serialize into the base64 form
 print(m.model_dump())
-#> {'base64_bytes': b'VGhpcyBpcyB0aGUgd2F5
-'}
+#> {'base64_bytes': b'VGhpcyBpcyB0aGUgd2F5\n'}

 # Validate base64 data
 try:
@@ -2094,8 +2532,7 @@ print(m.base64_str)

 # Serialize into the base64 form
 print(m.model_dump())
-#> {'base64_str': 'VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y
-'}
+#> {'base64_str': 'VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y\n'}

 # Validate base64 data
 try:
@@ -2152,6 +2589,8 @@ print(m)
 #> base64url_str='Hw?tw>Mw'
 ```
 """
+
+
 __getattr__ = getattr_migration(__name__)


@@ -2181,21 +2620,25 @@ class GetPydanticSchema:
     #> 'abc'
     ```
     """
-    get_pydantic_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema
-        ] | None = None
-    get_pydantic_json_schema: Callable[[Any, GetJsonSchemaHandler],
-        JsonSchemaValue] | None = None
+
+    get_pydantic_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] | None = None
+    get_pydantic_json_schema: Callable[[Any, GetJsonSchemaHandler], JsonSchemaValue] | None = None
+
+    # Note: we may want to consider adding a convenience staticmethod `def for_type(type_: Any) -> GetPydanticSchema:`
+    #   which returns `GetPydanticSchema(lambda _s, h: h(type_))`
+
     if not TYPE_CHECKING:
+        # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access

-        def __getattr__(self, item: str) ->Any:
+        def __getattr__(self, item: str) -> Any:
             """Use this rather than defining `__get_pydantic_core_schema__` etc. to reduce the number of nested calls."""
-            if (item == '__get_pydantic_core_schema__' and self.
-                get_pydantic_core_schema):
+            if item == '__get_pydantic_core_schema__' and self.get_pydantic_core_schema:
                 return self.get_pydantic_core_schema
             elif item == '__get_pydantic_json_schema__' and self.get_pydantic_json_schema:
                 return self.get_pydantic_json_schema
             else:
                 return object.__getattribute__(self, item)
+
     __hash__ = object.__hash__


@@ -2276,10 +2719,10 @@ class Tag:

     [Discriminated Unions]: ../concepts/unions.md#discriminated-unions
     """
+
     tag: str

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         schema = handler(source_type)
         metadata = schema.setdefault('metadata', {})
         assert isinstance(metadata, dict)
@@ -2360,6 +2803,7 @@ class Discriminator:

     [Discriminated Unions]: ../concepts/unions.md#discriminated-unions
     """
+
     discriminator: str | Callable[[Any], Hashable]
     """The callable or field name for discriminating the type in a tagged union.

@@ -2375,38 +2819,113 @@ class Discriminator:
     custom_error_context: dict[str, int | str | float] | None = None
     """Context to use in custom errors."""

-    def __get_pydantic_core_schema__(self, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         origin = _typing_extra.get_origin(source_type)
         if not origin or not _typing_extra.origin_is_union(origin):
-            raise TypeError(
-                f'{type(self).__name__} must be used with a Union type, not {source_type}'
-                )
+            raise TypeError(f'{type(self).__name__} must be used with a Union type, not {source_type}')
+
         if isinstance(self.discriminator, str):
             from pydantic import Field
-            return handler(Annotated[source_type, Field(discriminator=self.
-                discriminator)])
+
+            return handler(Annotated[source_type, Field(discriminator=self.discriminator)])
         else:
             original_schema = handler(source_type)
             return self._convert_schema(original_schema)

+    def _convert_schema(self, original_schema: core_schema.CoreSchema) -> core_schema.TaggedUnionSchema:
+        if original_schema['type'] != 'union':
+            # This likely indicates that the schema was a single-item union that was simplified.
+            # In this case, we do the same thing we do in
+            # `pydantic._internal._discriminated_union._ApplyInferredDiscriminator._apply_to_root`, namely,
+            # package the generated schema back into a single-item union.
+            original_schema = core_schema.union_schema([original_schema])
+
+        tagged_union_choices = {}
+        for i, choice in enumerate(original_schema['choices']):
+            tag = None
+            if isinstance(choice, tuple):
+                choice, tag = choice
+            metadata = choice.get('metadata')
+            if metadata is not None:
+                metadata_tag = metadata.get(_core_utils.TAGGED_UNION_TAG_KEY)
+                if metadata_tag is not None:
+                    tag = metadata_tag
+            if tag is None:
+                raise PydanticUserError(
+                    f'`Tag` not provided for choice {choice} used with `Discriminator`',
+                    code='callable-discriminator-no-tag',
+                )
+            tagged_union_choices[tag] = choice
+
+        # Have to do these verbose checks to ensure falsy values ('' and {}) don't get ignored
+        custom_error_type = self.custom_error_type
+        if custom_error_type is None:
+            custom_error_type = original_schema.get('custom_error_type')
+
+        custom_error_message = self.custom_error_message
+        if custom_error_message is None:
+            custom_error_message = original_schema.get('custom_error_message')
+
+        custom_error_context = self.custom_error_context
+        if custom_error_context is None:
+            custom_error_context = original_schema.get('custom_error_context')
+
+        custom_error_type = original_schema.get('custom_error_type') if custom_error_type is None else custom_error_type
+        return core_schema.tagged_union_schema(
+            tagged_union_choices,
+            self.discriminator,
+            custom_error_type=custom_error_type,
+            custom_error_message=custom_error_message,
+            custom_error_context=custom_error_context,
+            strict=original_schema.get('strict'),
+            ref=original_schema.get('ref'),
+            metadata=original_schema.get('metadata'),
+            serialization=original_schema.get('serialization'),
+        )
+

 _JSON_TYPES = {int, float, str, bool, list, dict, type(None)}


-class _AllowAnyJson:
+def _get_type_name(x: Any) -> str:
+    type_ = type(x)
+    if type_ in _JSON_TYPES:
+        return type_.__name__
+
+    # Handle proper subclasses; note we don't need to handle None or bool here
+    if isinstance(x, int):
+        return 'int'
+    if isinstance(x, float):
+        return 'float'
+    if isinstance(x, str):
+        return 'str'
+    if isinstance(x, list):
+        return 'list'
+    if isinstance(x, dict):
+        return 'dict'
+
+    # Fail by returning the type's actual name
+    return getattr(type_, '__name__', '<no type name>')

+
+class _AllowAnyJson:
     @classmethod
-    def __get_pydantic_core_schema__(cls, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
+    def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
         python_schema = handler(source_type)
-        return core_schema.json_or_python_schema(json_schema=core_schema.
-            any_schema(), python_schema=python_schema)
+        return core_schema.json_or_python_schema(json_schema=core_schema.any_schema(), python_schema=python_schema)


 if TYPE_CHECKING:
-    JsonValue: TypeAlias = Union[List['JsonValue'], Dict[str, 'JsonValue'],
-        str, bool, int, float, None]
+    # This seems to only be necessary for mypy
+    JsonValue: TypeAlias = Union[
+        List['JsonValue'],
+        Dict[str, 'JsonValue'],
+        str,
+        bool,
+        int,
+        float,
+        None,
+    ]
     """A `JsonValue` is used to represent a value that can be serialized to JSON.

     It may be one of:
@@ -2449,23 +2968,37 @@ if TYPE_CHECKING:
         '''
     ```
     """
+
 else:
-    JsonValue = TypeAliasType('JsonValue', Annotated[Union[Annotated[List[
-        'JsonValue'], Tag('list')], Annotated[Dict[str, 'JsonValue'], Tag(
-        'dict')], Annotated[str, Tag('str')], Annotated[bool, Tag('bool')],
-        Annotated[int, Tag('int')], Annotated[float, Tag('float')],
-        Annotated[None, Tag('NoneType')]], Discriminator(_get_type_name,
-        custom_error_type='invalid-json-value', custom_error_message=
-        'input was not a valid JSON value'), _AllowAnyJson])
+    JsonValue = TypeAliasType(
+        'JsonValue',
+        Annotated[
+            Union[
+                Annotated[List['JsonValue'], Tag('list')],
+                Annotated[Dict[str, 'JsonValue'], Tag('dict')],
+                Annotated[str, Tag('str')],
+                Annotated[bool, Tag('bool')],
+                Annotated[int, Tag('int')],
+                Annotated[float, Tag('float')],
+                Annotated[None, Tag('NoneType')],
+            ],
+            Discriminator(
+                _get_type_name,
+                custom_error_type='invalid-json-value',
+                custom_error_message='input was not a valid JSON value',
+            ),
+            _AllowAnyJson,
+        ],
+    )


 class _OnErrorOmit:
-
     @classmethod
-    def __get_pydantic_core_schema__(cls, source_type: Any, handler:
-        GetCoreSchemaHandler) ->CoreSchema:
-        return core_schema.with_default_schema(schema=handler(source_type),
-            on_error='omit')
+    def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
+        # there is no actual default value here but we use with_default_schema since it already has the on_error
+        # behavior implemented and it would be no more efficient to implement it on every other validator
+        # or as a standalone validator
+        return core_schema.with_default_schema(schema=handler(source_type), on_error='omit')


 OnErrorOmit = Annotated[T, _OnErrorOmit]
@@ -2506,4 +3039,5 @@ class FailFast(_fields.PydanticMetadata, BaseMetadata):
         '''
     ```
     """
+
     fail_fast: bool = True
diff --git a/pydantic/typing.py b/pydantic/typing.py
index 84690bc3e..0bda22d02 100644
--- a/pydantic/typing.py
+++ b/pydantic/typing.py
@@ -1,3 +1,5 @@
 """`typing` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/utils.py b/pydantic/utils.py
index fef150148..8d1e2a81c 100644
--- a/pydantic/utils.py
+++ b/pydantic/utils.py
@@ -1,3 +1,5 @@
 """The `utils` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/v1/_hypothesis_plugin.py b/pydantic/v1/_hypothesis_plugin.py
index 3ab0c72cd..b62234d50 100644
--- a/pydantic/v1/_hypothesis_plugin.py
+++ b/pydantic/v1/_hypothesis_plugin.py
@@ -21,6 +21,7 @@ This is because Hypothesis annotates `register_type_strategy()` with
 `(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt`
 to generate instances of the builtin `int` type which match the constraints.
 """
+
 import contextlib
 import datetime
 import ipaddress
@@ -28,61 +29,362 @@ import json
 import math
 from fractions import Fraction
 from typing import Callable, Dict, Type, Union, cast, overload
+
 import hypothesis.strategies as st
+
 import pydantic
 import pydantic.color
 import pydantic.types
 from pydantic.v1.utils import lenient_issubclass
+
+# FilePath and DirectoryPath are explicitly unsupported, as we'd have to create
+# them on-disk, and that's unsafe in general without being told *where* to do so.
+#
+# URLs are unsupported because it's easy for users to define their own strategy for
+# "normal" URLs, and hard for us to define a general strategy which includes "weird"
+# URLs but doesn't also have unpredictable performance problems.
+#
+# conlist() and conset() are unsupported for now, because the workarounds for
+# Cython and Hypothesis to handle parametrized generic types are incompatible.
+# We are rethinking Hypothesis compatibility in Pydantic v2.
+
+# Emails
 try:
     import email_validator
-except ImportError:
+except ImportError:  # pragma: no cover
     pass
 else:
-    st.register_type_strategy(pydantic.EmailStr, st.emails().filter(
-        is_valid_email))
-    st.register_type_strategy(pydantic.NameEmail, st.builds('{} <{}>'.
-        format, st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}',
-        fullmatch=True), st.emails().filter(is_valid_email)))
-st.register_type_strategy(pydantic.PyObject, st.sampled_from([cast(pydantic
-    .PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.
-    startswith('_')]))
-_color_regexes = '|'.join((pydantic.color.r_hex_short, pydantic.color.
-    r_hex_long, pydantic.color.r_rgb, pydantic.color.r_rgba, pydantic.color
-    .r_hsl, pydantic.color.r_hsla)).replace(pydantic.color._r_sl,
-    '(?:(\\d\\d?(?:\\.\\d+)?|100(?:\\.0+)?)%)').replace(pydantic.color.
-    _r_alpha, '(?:(0(?:\\.\\d+)?|1(?:\\.0+)?|\\.\\d+|\\d{1,2}%))').replace(
-    pydantic.color._r_255,
-    '(?:((?:\\d|\\d\\d|[01]\\d\\d|2[0-4]\\d|25[0-4])(?:\\.\\d+)?|255(?:\\.0+)?))'
+
+    def is_valid_email(s: str) -> bool:
+        # Hypothesis' st.emails() occasionally generates emails like 0@A0--0.ac
+        # that are invalid according to email-validator, so we filter those out.
+        try:
+            email_validator.validate_email(s, check_deliverability=False)
+            return True
+        except email_validator.EmailNotValidError:  # pragma: no cover
+            return False
+
+    # Note that these strategies deliberately stay away from any tricky Unicode
+    # or other encoding issues; we're just trying to generate *something* valid.
+    st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email))  # type: ignore[arg-type]
+    st.register_type_strategy(
+        pydantic.NameEmail,
+        st.builds(
+            '{} <{}>'.format,  # type: ignore[arg-type]
+            st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True),
+            st.emails().filter(is_valid_email),
+        ),
     )
-st.register_type_strategy(pydantic.color.Color, st.one_of(st.sampled_from(
-    sorted(pydantic.color.COLORS_BY_NAME)), st.tuples(st.integers(0, 255),
-    st.integers(0, 255), st.integers(0, 255), st.none() | st.floats(0, 1) |
-    st.floats(0, 100).map('{}%'.format)), st.from_regex(_color_regexes,
-    fullmatch=True)))
-card_patterns = ('4[0-9]{14}', '5[12345][0-9]{13}', '3[47][0-9]{12}',
-    '[0-26-9][0-9]{10,17}')
-st.register_type_strategy(pydantic.PaymentCardNumber, st.from_regex('|'.
-    join(card_patterns), fullmatch=True).map(add_luhn_digit))
+
+# PyObject - dotted names, in this case taken from the math module.
+st.register_type_strategy(
+    pydantic.PyObject,  # type: ignore[arg-type]
+    st.sampled_from(
+        [cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')]
+    ),
+)
+
+# CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings
+_color_regexes = (
+    '|'.join(
+        (
+            pydantic.color.r_hex_short,
+            pydantic.color.r_hex_long,
+            pydantic.color.r_rgb,
+            pydantic.color.r_rgba,
+            pydantic.color.r_hsl,
+            pydantic.color.r_hsla,
+        )
+    )
+    # Use more precise regex patterns to avoid value-out-of-range errors
+    .replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)')
+    .replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))')
+    .replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))')
+)
+st.register_type_strategy(
+    pydantic.color.Color,
+    st.one_of(
+        st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)),
+        st.tuples(
+            st.integers(0, 255),
+            st.integers(0, 255),
+            st.integers(0, 255),
+            st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format),
+        ),
+        st.from_regex(_color_regexes, fullmatch=True),
+    ),
+)
+
+
+# Card numbers, valid according to the Luhn algorithm
+
+
+def add_luhn_digit(card_number: str) -> str:
+    # See https://en.wikipedia.org/wiki/Luhn_algorithm
+    for digit in '0123456789':
+        with contextlib.suppress(Exception):
+            pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit)
+            return card_number + digit
+    raise AssertionError('Unreachable')  # pragma: no cover
+
+
+card_patterns = (
+    # Note that these patterns omit the Luhn check digit; that's added by the function above
+    '4[0-9]{14}',  # Visa
+    '5[12345][0-9]{13}',  # Mastercard
+    '3[47][0-9]{12}',  # American Express
+    '[0-26-9][0-9]{10,17}',  # other (incomplete to avoid overlap)
+)
+st.register_type_strategy(
+    pydantic.PaymentCardNumber,
+    st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit),  # type: ignore[arg-type]
+)
+
+# UUIDs
 st.register_type_strategy(pydantic.UUID1, st.uuids(version=1))
 st.register_type_strategy(pydantic.UUID3, st.uuids(version=3))
 st.register_type_strategy(pydantic.UUID4, st.uuids(version=4))
 st.register_type_strategy(pydantic.UUID5, st.uuids(version=5))
-st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.
-    SecretBytes))
-st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr)
-    )
-st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses())
-st.register_type_strategy(pydantic.IPvAnyInterface, st.from_type(ipaddress.
-    IPv4Interface) | st.from_type(ipaddress.IPv6Interface))
-st.register_type_strategy(pydantic.IPvAnyNetwork, st.from_type(ipaddress.
-    IPv4Network) | st.from_type(ipaddress.IPv6Network))
+
+# Secrets
+st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes))
+st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr))
+
+# IP addresses, networks, and interfaces
+st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses())  # type: ignore[arg-type]
+st.register_type_strategy(
+    pydantic.IPvAnyInterface,
+    st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface),  # type: ignore[arg-type]
+)
+st.register_type_strategy(
+    pydantic.IPvAnyNetwork,
+    st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network),  # type: ignore[arg-type]
+)
+
+# We hook into the con***() functions and the ConstrainedNumberMeta metaclass,
+# so here we only have to register subclasses for other constrained types which
+# don't go via those mechanisms.  Then there are the registration hooks below.
 st.register_type_strategy(pydantic.StrictBool, st.booleans())
 st.register_type_strategy(pydantic.StrictStr, st.text())
-st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.
-    date.today() + datetime.timedelta(days=1)))
-st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.
-    date.today() - datetime.timedelta(days=1)))
-RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {}
+
+
+# FutureDate, PastDate
+st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.date.today() + datetime.timedelta(days=1)))
+st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.date.today() - datetime.timedelta(days=1)))
+
+
+# Constrained-type resolver functions
+#
+# For these ones, we actually want to inspect the type in order to work out a
+# satisfying strategy.  First up, the machinery for tracking resolver functions:
+
+RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {}  # type: ignore[type-arg]
+
+
+@overload
+def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]:
+    pass
+
+
+@overload
+def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta:
+    pass
+
+
+def _registered(
+    typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]
+) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]:
+    # This function replaces the version in `pydantic.types`, in order to
+    # effect the registration of new constrained types so that Hypothesis
+    # can generate valid examples.
+    pydantic.types._DEFINED_TYPES.add(typ)
+    for supertype, resolver in RESOLVERS.items():
+        if issubclass(typ, supertype):
+            st.register_type_strategy(typ, resolver(typ))  # type: ignore
+            return typ
+    raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register')  # pragma: no cover
+
+
+def resolves(
+    typ: Union[type, pydantic.types.ConstrainedNumberMeta]
+) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]:  # type: ignore[type-arg]
+    def inner(f):  # type: ignore
+        assert f not in RESOLVERS
+        RESOLVERS[typ] = f
+        return f
+
+    return inner
+
+
+# Type-to-strategy resolver functions
+
+
+@resolves(pydantic.JsonWrapper)
+def resolve_json(cls):  # type: ignore[no-untyped-def]
+    try:
+        inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type)
+    except Exception:  # pragma: no cover
+        finite = st.floats(allow_infinity=False, allow_nan=False)
+        inner = st.recursive(
+            base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()),
+            extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x),  # type: ignore
+        )
+    inner_type = getattr(cls, 'inner_type', None)
+    return st.builds(
+        cls.inner_type.json if lenient_issubclass(inner_type, pydantic.BaseModel) else json.dumps,
+        inner,
+        ensure_ascii=st.booleans(),
+        indent=st.none() | st.integers(0, 16),
+        sort_keys=st.booleans(),
+    )
+
+
+@resolves(pydantic.ConstrainedBytes)
+def resolve_conbytes(cls):  # type: ignore[no-untyped-def]  # pragma: no cover
+    min_size = cls.min_length or 0
+    max_size = cls.max_length
+    if not cls.strip_whitespace:
+        return st.binary(min_size=min_size, max_size=max_size)
+    # Fun with regex to ensure we neither start nor end with whitespace
+    repeats = '{{{},{}}}'.format(
+        min_size - 2 if min_size > 2 else 0,
+        max_size - 2 if (max_size or 0) > 2 else '',
+    )
+    if min_size >= 2:
+        pattern = rf'\W.{repeats}\W'
+    elif min_size == 1:
+        pattern = rf'\W(.{repeats}\W)?'
+    else:
+        assert min_size == 0
+        pattern = rf'(\W(.{repeats}\W)?)?'
+    return st.from_regex(pattern.encode(), fullmatch=True)
+
+
+@resolves(pydantic.ConstrainedDecimal)
+def resolve_condecimal(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt
+    s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places)
+    if cls.lt is not None:
+        s = s.filter(lambda d: d < cls.lt)
+    if cls.gt is not None:
+        s = s.filter(lambda d: cls.gt < d)
+    return s
+
+
+@resolves(pydantic.ConstrainedFloat)
+def resolve_confloat(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    exclude_min = False
+    exclude_max = False
+
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt
+        exclude_min = True
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt
+        exclude_max = True
+
+    if cls.multiple_of is None:
+        return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False)
+
+    if min_value is not None:
+        min_value = math.ceil(min_value / cls.multiple_of)
+        if exclude_min:
+            min_value = min_value + 1
+    if max_value is not None:
+        assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of'
+        max_value = math.floor(max_value / cls.multiple_of)
+        if exclude_max:
+            max_value = max_value - 1
+
+    return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
+
+
+@resolves(pydantic.ConstrainedInt)
+def resolve_conint(cls):  # type: ignore[no-untyped-def]
+    min_value = cls.ge
+    max_value = cls.le
+    if cls.gt is not None:
+        assert min_value is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.gt + 1
+    if cls.lt is not None:
+        assert max_value is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.lt - 1
+
+    if cls.multiple_of is None or cls.multiple_of == 1:
+        return st.integers(min_value, max_value)
+
+    # These adjustments and the .map handle integer-valued multiples, while the
+    # .filter handles trickier cases as for confloat.
+    if min_value is not None:
+        min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of))
+    if max_value is not None:
+        max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of))
+    return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
+
+
+@resolves(pydantic.ConstrainedDate)
+def resolve_condate(cls):  # type: ignore[no-untyped-def]
+    if cls.ge is not None:
+        assert cls.gt is None, 'Set `gt` or `ge`, but not both'
+        min_value = cls.ge
+    elif cls.gt is not None:
+        min_value = cls.gt + datetime.timedelta(days=1)
+    else:
+        min_value = datetime.date.min
+    if cls.le is not None:
+        assert cls.lt is None, 'Set `lt` or `le`, but not both'
+        max_value = cls.le
+    elif cls.lt is not None:
+        max_value = cls.lt - datetime.timedelta(days=1)
+    else:
+        max_value = datetime.date.max
+    return st.dates(min_value, max_value)
+
+
+@resolves(pydantic.ConstrainedStr)
+def resolve_constr(cls):  # type: ignore[no-untyped-def]  # pragma: no cover
+    min_size = cls.min_length or 0
+    max_size = cls.max_length
+
+    if cls.regex is None and not cls.strip_whitespace:
+        return st.text(min_size=min_size, max_size=max_size)
+
+    if cls.regex is not None:
+        strategy = st.from_regex(cls.regex)
+        if cls.strip_whitespace:
+            strategy = strategy.filter(lambda s: s == s.strip())
+    elif cls.strip_whitespace:
+        repeats = '{{{},{}}}'.format(
+            min_size - 2 if min_size > 2 else 0,
+            max_size - 2 if (max_size or 0) > 2 else '',
+        )
+        if min_size >= 2:
+            strategy = st.from_regex(rf'\W.{repeats}\W')
+        elif min_size == 1:
+            strategy = st.from_regex(rf'\W(.{repeats}\W)?')
+        else:
+            assert min_size == 0
+            strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?')
+
+    if min_size == 0 and max_size is None:
+        return strategy
+    elif max_size is None:
+        return strategy.filter(lambda s: min_size <= len(s))
+    return strategy.filter(lambda s: min_size <= len(s) <= max_size)
+
+
+# Finally, register all previously-defined types, and patch in our new function
 for typ in list(pydantic.types._DEFINED_TYPES):
     _registered(typ)
 pydantic.types._registered = _registered
diff --git a/pydantic/v1/annotated_types.py b/pydantic/v1/annotated_types.py
index 06d15ea97..d9eaaafd5 100644
--- a/pydantic/v1/annotated_types.py
+++ b/pydantic/v1/annotated_types.py
@@ -1,29 +1,72 @@
 import sys
 from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type
+
 from pydantic.v1.fields import Required
 from pydantic.v1.main import BaseModel, create_model
 from pydantic.v1.typing import is_typeddict, is_typeddict_special
+
 if TYPE_CHECKING:
     from typing_extensions import TypedDict
+
 if sys.version_info < (3, 11):

+    def is_legacy_typeddict(typeddict_cls: Type['TypedDict']) -> bool:  # type: ignore[valid-type]
+        return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == 'typing'
+
+else:
+
+    def is_legacy_typeddict(_: Any) -> Any:
+        return False

-def create_model_from_typeddict(typeddict_cls: Type['TypedDict'], **kwargs: Any
-    ) ->Type['BaseModel']:
+
+def create_model_from_typeddict(
+    # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030
+    typeddict_cls: Type['TypedDict'],  # type: ignore[valid-type]
+    **kwargs: Any,
+) -> Type['BaseModel']:
     """
     Create a `BaseModel` based on the fields of a `TypedDict`.
     Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys,
     we raise an error if this happens (see https://bugs.python.org/issue38834).
     """
-    pass
+    field_definitions: Dict[str, Any]
+
+    # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions`
+    if not hasattr(typeddict_cls, '__required_keys__'):
+        raise TypeError(
+            'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. '
+            'Without it, there is no way to differentiate required and optional fields when subclassed.'
+        )
+
+    if is_legacy_typeddict(typeddict_cls) and any(
+        is_typeddict_special(t) for t in typeddict_cls.__annotations__.values()
+    ):
+        raise TypeError(
+            'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. '
+            'Without it, there is no way to reflect Required/NotRequired keys.'
+        )
+
+    required_keys: FrozenSet[str] = typeddict_cls.__required_keys__  # type: ignore[attr-defined]
+    field_definitions = {
+        field_name: (field_type, Required if field_name in required_keys else None)
+        for field_name, field_type in typeddict_cls.__annotations__.items()
+    }
+
+    return create_model(typeddict_cls.__name__, **kwargs, **field_definitions)


-def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **
-    kwargs: Any) ->Type['BaseModel']:
+def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']:
     """
     Create a `BaseModel` based on the fields of a named tuple.
     A named tuple can be created with `typing.NamedTuple` and declared annotations
     but also with `collections.namedtuple`, in this case we consider all fields
     to have type `Any`.
     """
-    pass
+    # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic
+    namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or {
+        k: Any for k in namedtuple_cls._fields
+    }
+    field_definitions: Dict[str, Any] = {
+        field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items()
+    }
+    return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions)
diff --git a/pydantic/v1/class_validators.py b/pydantic/v1/class_validators.py
index c7c09f7cd..2f68fc860 100644
--- a/pydantic/v1/class_validators.py
+++ b/pydantic/v1/class_validators.py
@@ -4,20 +4,27 @@ from functools import partial, partialmethod, wraps
 from itertools import chain
 from types import FunctionType
 from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload
+
 from pydantic.v1.errors import ConfigError
 from pydantic.v1.typing import AnyCallable
 from pydantic.v1.utils import ROOT_KEY, in_ipython
+
 if TYPE_CHECKING:
     from pydantic.v1.typing import AnyClassMethod


 class Validator:
-    __slots__ = ('func', 'pre', 'each_item', 'always', 'check_fields',
-        'skip_on_failure')
+    __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure'

-    def __init__(self, func: AnyCallable, pre: bool=False, each_item: bool=
-        False, always: bool=False, check_fields: bool=False,
-        skip_on_failure: bool=False):
+    def __init__(
+        self,
+        func: AnyCallable,
+        pre: bool = False,
+        each_item: bool = False,
+        always: bool = False,
+        check_fields: bool = False,
+        skip_on_failure: bool = False,
+    ):
         self.func = func
         self.pre = pre
         self.each_item = each_item
@@ -28,21 +35,29 @@ class Validator:

 if TYPE_CHECKING:
     from inspect import Signature
+
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.fields import ModelField
     from pydantic.v1.types import ModelOrDc
-    ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any],
-        ModelField, Type[BaseConfig]], Any]
+
+    ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any]
     ValidatorsList = List[ValidatorCallable]
     ValidatorListDict = Dict[str, List[Validator]]
+
 _FUNCS: Set[str] = set()
 VALIDATOR_CONFIG_KEY = '__validator_config__'
 ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__'


-def validator(*fields: str, pre: bool=False, each_item: bool=False, always:
-    bool=False, check_fields: bool=True, whole: Optional[bool]=None,
-    allow_reuse: bool=False) ->Callable[[AnyCallable], 'AnyClassMethod']:
+def validator(
+    *fields: str,
+    pre: bool = False,
+    each_item: bool = False,
+    always: bool = False,
+    check_fields: bool = True,
+    whole: Optional[bool] = None,
+    allow_reuse: bool = False,
+) -> Callable[[AnyCallable], 'AnyClassMethod']:
     """
     Decorate methods on the class indicating that they should be used to validate fields
     :param fields: which field(s) the method should be called on
@@ -53,36 +68,178 @@ def validator(*fields: str, pre: bool=False, each_item: bool=False, always:
     :param check_fields: whether to check that the fields actually exist on the model
     :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function
     """
-    pass
+    if not fields:
+        raise ConfigError('validator with no fields specified')
+    elif isinstance(fields[0], FunctionType):
+        raise ConfigError(
+            "validators should be used with fields and keyword arguments, not bare. "  # noqa: Q000
+            "E.g. usage should be `@validator('<field_name>', ...)`"
+        )
+    elif not all(isinstance(field, str) for field in fields):
+        raise ConfigError(
+            "validator fields should be passed as separate string args. "  # noqa: Q000
+            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`"
+        )
+
+    if whole is not None:
+        warnings.warn(
+            'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead',
+            DeprecationWarning,
+        )
+        assert each_item is False, '"each_item" and "whole" conflict, remove "whole"'
+        each_item = not whole
+
+    def dec(f: AnyCallable) -> 'AnyClassMethod':
+        f_cls = _prepare_validator(f, allow_reuse)
+        setattr(
+            f_cls,
+            VALIDATOR_CONFIG_KEY,
+            (
+                fields,
+                Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields),
+            ),
+        )
+        return f_cls
+
+    return dec
+
+
+@overload
+def root_validator(_func: AnyCallable) -> 'AnyClassMethod':
+    ...


-def root_validator(_func: Optional[AnyCallable]=None, *, pre: bool=False,
-    allow_reuse: bool=False, skip_on_failure: bool=False) ->Union[
-    'AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]:
+@overload
+def root_validator(
+    *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
+) -> Callable[[AnyCallable], 'AnyClassMethod']:
+    ...
+
+
+def root_validator(
+    _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
+) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]:
     """
     Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either
     before or after standard model parsing/validation is performed.
     """
-    pass
+    if _func:
+        f_cls = _prepare_validator(_func, allow_reuse)
+        setattr(
+            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
+        )
+        return f_cls
+
+    def dec(f: AnyCallable) -> 'AnyClassMethod':
+        f_cls = _prepare_validator(f, allow_reuse)
+        setattr(
+            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
+        )
+        return f_cls

+    return dec

-def _prepare_validator(function: AnyCallable, allow_reuse: bool
-    ) ->'AnyClassMethod':
+
+def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod':
     """
     Avoid validators with duplicated names since without this, validators can be overwritten silently
     which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False.
     """
-    pass
+    f_cls = function if isinstance(function, classmethod) else classmethod(function)
+    if not in_ipython() and not allow_reuse:
+        ref = (
+            getattr(f_cls.__func__, '__module__', '<No __module__>')
+            + '.'
+            + getattr(f_cls.__func__, '__qualname__', f'<No __qualname__: id:{id(f_cls.__func__)}>')
+        )
+        if ref in _FUNCS:
+            raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`')
+        _FUNCS.add(ref)
+    return f_cls


 class ValidatorGroup:
-
-    def __init__(self, validators: 'ValidatorListDict') ->None:
+    def __init__(self, validators: 'ValidatorListDict') -> None:
         self.validators = validators
         self.used_validators = {'*'}

+    def get_validators(self, name: str) -> Optional[Dict[str, Validator]]:
+        self.used_validators.add(name)
+        validators = self.validators.get(name, [])
+        if name != ROOT_KEY:
+            validators += self.validators.get('*', [])
+        if validators:
+            return {getattr(v.func, '__name__', f'<No __name__: id:{id(v.func)}>'): v for v in validators}
+        else:
+            return None
+
+    def check_for_unused(self) -> None:
+        unused_validators = set(
+            chain.from_iterable(
+                (
+                    getattr(v.func, '__name__', f'<No __name__: id:{id(v.func)}>')
+                    for v in self.validators[f]
+                    if v.check_fields
+                )
+                for f in (self.validators.keys() - self.used_validators)
+            )
+        )
+        if unused_validators:
+            fn = ', '.join(unused_validators)
+            raise ConfigError(
+                f"Validators defined with incorrect fields: {fn} "  # noqa: Q000
+                f"(use check_fields=False if you're inheriting from the model and intended this)"
+            )
+
+
+def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]:
+    validators: Dict[str, List[Validator]] = {}
+    for var_name, value in namespace.items():
+        validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None)
+        if validator_config:
+            fields, v = validator_config
+            for field in fields:
+                if field in validators:
+                    validators[field].append(v)
+                else:
+                    validators[field] = [v]
+    return validators
+

-def make_generic_validator(validator: AnyCallable) ->'ValidatorCallable':
+def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]:
+    from inspect import signature
+
+    pre_validators: List[AnyCallable] = []
+    post_validators: List[Tuple[bool, AnyCallable]] = []
+    for name, value in namespace.items():
+        validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None)
+        if validator_config:
+            sig = signature(validator_config.func)
+            args = list(sig.parameters.keys())
+            if args[0] == 'self':
+                raise ConfigError(
+                    f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, '
+                    f'should be: (cls, values).'
+                )
+            if len(args) != 2:
+                raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).')
+            # check function signature
+            if validator_config.pre:
+                pre_validators.append(validator_config.func)
+            else:
+                post_validators.append((validator_config.skip_on_failure, validator_config.func))
+    return pre_validators, post_validators
+
+
+def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict':
+    for field, field_validators in base_validators.items():
+        if field not in validators:
+            validators[field] = []
+        validators[field] += field_validators
+    return validators
+
+
+def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
     """
     Make a generic function which calls a validator with the right arguments.

@@ -92,7 +249,113 @@ def make_generic_validator(validator: AnyCallable) ->'ValidatorCallable':
     It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
     the arguments "values", "fields" and/or "config" are permitted.
     """
-    pass
+    from inspect import signature
+
+    if not isinstance(validator, (partial, partialmethod)):
+        # This should be the default case, so overhead is reduced
+        sig = signature(validator)
+        args = list(sig.parameters.keys())
+    else:
+        # Fix the generated argument lists of partial methods
+        sig = signature(validator.func)
+        args = [
+            k
+            for k in signature(validator.func).parameters.keys()
+            if k not in validator.args | validator.keywords.keys()
+        ]
+
+    first_arg = args.pop(0)
+    if first_arg == 'self':
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, '
+            f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+    elif first_arg == 'cls':
+        # assume the second argument is value
+        return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:])))
+    else:
+        # assume the first argument was value which has already been removed
+        return wraps(validator)(_generic_validator_basic(validator, sig, set(args)))
+
+
+def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList':
+    return [make_generic_validator(f) for f in v_funcs if f]


 all_kwargs = {'values', 'field', 'config'}
+
+
+def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
+    # assume the first argument is value
+    has_kwargs = False
+    if 'kwargs' in args:
+        has_kwargs = True
+        args -= {'kwargs'}
+
+    if not args.issubset(all_kwargs):
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, should be: '
+            f'(cls, value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+
+    if has_kwargs:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
+    elif args == set():
+        return lambda cls, v, values, field, config: validator(cls, v)
+    elif args == {'values'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values)
+    elif args == {'field'}:
+        return lambda cls, v, values, field, config: validator(cls, v, field=field)
+    elif args == {'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, config=config)
+    elif args == {'values', 'field'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field)
+    elif args == {'values', 'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config)
+    elif args == {'field', 'config'}:
+        return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config)
+    else:
+        # args == {'values', 'field', 'config'}
+        return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
+
+
+def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
+    has_kwargs = False
+    if 'kwargs' in args:
+        has_kwargs = True
+        args -= {'kwargs'}
+
+    if not args.issubset(all_kwargs):
+        raise ConfigError(
+            f'Invalid signature for validator {validator}: {sig}, should be: '
+            f'(value, values, config, field), "values", "config" and "field" are all optional.'
+        )
+
+    if has_kwargs:
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
+    elif args == set():
+        return lambda cls, v, values, field, config: validator(v)
+    elif args == {'values'}:
+        return lambda cls, v, values, field, config: validator(v, values=values)
+    elif args == {'field'}:
+        return lambda cls, v, values, field, config: validator(v, field=field)
+    elif args == {'config'}:
+        return lambda cls, v, values, field, config: validator(v, config=config)
+    elif args == {'values', 'field'}:
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field)
+    elif args == {'values', 'config'}:
+        return lambda cls, v, values, field, config: validator(v, values=values, config=config)
+    elif args == {'field', 'config'}:
+        return lambda cls, v, values, field, config: validator(v, field=field, config=config)
+    else:
+        # args == {'values', 'field', 'config'}
+        return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
+
+
+def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']:
+    all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__])  # type: ignore[arg-type,var-annotated]
+    return {
+        k: v
+        for k, v in all_attributes.items()
+        if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY)
+    }
diff --git a/pydantic/v1/color.py b/pydantic/v1/color.py
index 021245ab0..b0bbf78f4 100644
--- a/pydantic/v1/color.py
+++ b/pydantic/v1/color.py
@@ -11,20 +11,23 @@ import math
 import re
 from colorsys import hls_to_rgb, rgb_to_hls
 from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast
+
 from pydantic.v1.errors import ColorError
 from pydantic.v1.utils import Representation, almost_equal_floats
+
 if TYPE_CHECKING:
     from pydantic.v1.typing import CallableGenerator, ReprArgs
+
 ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
 ColorType = Union[ColorTuple, str]
-HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float,
-    float]]
+HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]


 class RGBA:
     """
     Internal use only as a representation of a color.
     """
+
     __slots__ = 'r', 'g', 'b', 'alpha', '_tuple'

     def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
@@ -32,29 +35,27 @@ class RGBA:
         self.g = g
         self.b = b
         self.alpha = alpha
-        self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b,
-            alpha)

-    def __getitem__(self, item: Any) ->Any:
+        self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
+
+    def __getitem__(self, item: Any) -> Any:
         return self._tuple[item]


-r_hex_short = '\\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\\s*'
-r_hex_long = (
-    '\\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\\s*')
-_r_255 = '(\\d{1,3}(?:\\.\\d+)?)'
-_r_comma = '\\s*,\\s*'
-r_rgb = f'\\s*rgb\\(\\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\\)\\s*'
-_r_alpha = '(\\d(?:\\.\\d+)?|\\.\\d+|\\d{1,2}%)'
-r_rgba = (
-    f'\\s*rgba\\(\\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\\s*\\)\\s*'
-    )
-_r_h = '(-?\\d+(?:\\.\\d+)?|-?\\.\\d+)(deg|rad|turn)?'
-_r_sl = '(\\d{1,3}(?:\\.\\d+)?)%'
-r_hsl = f'\\s*hsl\\(\\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\\s*\\)\\s*'
-r_hsla = (
-    f'\\s*hsl\\(\\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\\s*\\)\\s*'
-    )
+# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
+r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
+r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
+_r_255 = r'(\d{1,3}(?:\.\d+)?)'
+_r_comma = r'\s*,\s*'
+r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*'
+_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
+r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*'
+_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
+_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
+r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*'
+r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*'
+
+# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
 repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
 rads = 2 * math.pi

@@ -62,7 +63,7 @@ rads = 2 * math.pi
 class Color(Representation):
     __slots__ = '_original', '_rgba'

-    def __init__(self, value: ColorType) ->None:
+    def __init__(self, value: ColorType) -> None:
         self._rgba: RGBA
         self._original: ColorType
         if isinstance(value, (tuple, list)):
@@ -74,32 +75,60 @@ class Color(Representation):
             value = value._original
         else:
             raise ColorError(reason='value must be a tuple, list or string')
+
+        # if we've got here value must be a valid color
         self._original = value

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='color')

-    def original(self) ->ColorType:
+    def original(self) -> ColorType:
         """
         Original value passed to Color
         """
-        pass
+        return self._original
+
+    def as_named(self, *, fallback: bool = False) -> str:
+        if self._rgba.alpha is None:
+            rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
+            try:
+                return COLORS_BY_VALUE[rgb]
+            except KeyError as e:
+                if fallback:
+                    return self.as_hex()
+                else:
+                    raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
+        else:
+            return self.as_hex()

-    def as_hex(self) ->str:
+    def as_hex(self) -> str:
         """
         Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string
         a "short" representation of the color is possible and whether there's an alpha channel.
         """
-        pass
+        values = [float_to_255(c) for c in self._rgba[:3]]
+        if self._rgba.alpha is not None:
+            values.append(float_to_255(self._rgba.alpha))
+
+        as_hex = ''.join(f'{v:02x}' for v in values)
+        if all(c in repeat_colors for c in values):
+            as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
+        return '#' + as_hex

-    def as_rgb(self) ->str:
+    def as_rgb(self) -> str:
         """
         Color as an rgb(<r>, <g>, <b>) or rgba(<r>, <g>, <b>, <a>) string.
         """
-        pass
+        if self._rgba.alpha is None:
+            return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
+        else:
+            return (
+                f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
+                f'{round(self._alpha_float(), 2)})'
+            )

-    def as_rgb_tuple(self, *, alpha: Optional[bool]=None) ->ColorTuple:
+    def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
         """
         Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is
         in the range 0 to 1.
@@ -109,15 +138,30 @@ class Color(Representation):
           True - always include alpha,
           False - always omit alpha,
         """
-        pass
+        r, g, b = (float_to_255(c) for c in self._rgba[:3])
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return r, g, b
+            else:
+                return r, g, b, self._alpha_float()
+        elif alpha:
+            return r, g, b, self._alpha_float()
+        else:
+            # alpha is False
+            return r, g, b

-    def as_hsl(self) ->str:
+    def as_hsl(self) -> str:
         """
         Color as an hsl(<h>, <s>, <l>) or hsl(<h>, <s>, <l>, <a>) string.
         """
-        pass
+        if self._rgba.alpha is None:
+            h, s, li = self.as_hsl_tuple(alpha=False)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
+        else:
+            h, s, li, a = self.as_hsl_tuple(alpha=True)  # type: ignore
+            return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'

-    def as_hsl_tuple(self, *, alpha: Optional[bool]=None) ->HslColorTuple:
+    def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
         """
         Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in
         the range 0 to 1.
@@ -129,35 +173,53 @@ class Color(Representation):
           True - always include alpha,
           False - always omit alpha,
         """
-        pass
+        h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)
+        if alpha is None:
+            if self._rgba.alpha is None:
+                return h, s, l
+            else:
+                return h, s, l, self._alpha_float()
+        if alpha:
+            return h, s, l, self._alpha_float()
+        else:
+            # alpha is False
+            return h, s, l
+
+    def _alpha_float(self) -> float:
+        return 1 if self._rgba.alpha is None else self._rgba.alpha

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.as_named(fallback=True)

-    def __repr_args__(self) ->'ReprArgs':
-        return [(None, self.as_named(fallback=True))] + [('rgb', self.
-            as_rgb_tuple())]
+    def __repr_args__(self) -> 'ReprArgs':
+        return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())]  # type: ignore

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, Color) and self.as_rgb_tuple(
-            ) == other.as_rgb_tuple()
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple()

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.as_rgb_tuple())


-def parse_tuple(value: Tuple[Any, ...]) ->RGBA:
+def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
     """
     Parse a tuple or list as a color.
     """
-    pass
+    if len(value) == 3:
+        r, g, b = (parse_color_value(v) for v in value)
+        return RGBA(r, g, b, None)
+    elif len(value) == 4:
+        r, g, b = (parse_color_value(v) for v in value[:3])
+        return RGBA(r, g, b, parse_float_alpha(value[3]))
+    else:
+        raise ColorError(reason='tuples must have length 3 or 4')


-def parse_str(value: str) ->RGBA:
+def parse_str(value: str) -> RGBA:
     """
     Parse a string to an RGBA tuple, trying the following formats (in this order):
     * named color, see COLORS_BY_NAME below
@@ -166,92 +228,267 @@ def parse_str(value: str) ->RGBA:
     * `rgb(<r>, <g>, <b>) `
     * `rgba(<r>, <g>, <b>, <a>)`
     """
-    pass
+    value_lower = value.lower()
+    try:
+        r, g, b = COLORS_BY_NAME[value_lower]
+    except KeyError:
+        pass
+    else:
+        return ints_to_rgba(r, g, b, None)
+
+    m = re.fullmatch(r_hex_short, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v * 2, 16) for v in rgb)
+        if a:
+            alpha: Optional[float] = int(a * 2, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_hex_long, value_lower)
+    if m:
+        *rgb, a = m.groups()
+        r, g, b = (int(v, 16) for v in rgb)
+        if a:
+            alpha = int(a, 16) / 255
+        else:
+            alpha = None
+        return ints_to_rgba(r, g, b, alpha)
+
+    m = re.fullmatch(r_rgb, value_lower)
+    if m:
+        return ints_to_rgba(*m.groups(), None)  # type: ignore
+
+    m = re.fullmatch(r_rgba, value_lower)
+    if m:
+        return ints_to_rgba(*m.groups())  # type: ignore

+    m = re.fullmatch(r_hsl, value_lower)
+    if m:
+        h, h_units, s, l_ = m.groups()
+        return parse_hsl(h, h_units, s, l_)

-def parse_color_value(value: Union[int, str], max_val: int=255) ->float:
+    m = re.fullmatch(r_hsla, value_lower)
+    if m:
+        h, h_units, s, l_, a = m.groups()
+        return parse_hsl(h, h_units, s, l_, parse_float_alpha(a))
+
+    raise ColorError(reason='string not recognised as a valid color')
+
+
+def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA:
+    return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))
+
+
+def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
     """
     Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number
     in the range 0 to 1
     """
-    pass
+    try:
+        color = float(value)
+    except ValueError:
+        raise ColorError(reason='color values must be a valid number')
+    if 0 <= color <= max_val:
+        return color / max_val
+    else:
+        raise ColorError(reason=f'color values must be in the range 0 to {max_val}')


-def parse_float_alpha(value: Union[None, str, float, int]) ->Optional[float]:
+def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
     """
     Parse a value checking it's a valid float in the range 0 to 1
     """
-    pass
+    if value is None:
+        return None
+    try:
+        if isinstance(value, str) and value.endswith('%'):
+            alpha = float(value[:-1]) / 100
+        else:
+            alpha = float(value)
+    except ValueError:
+        raise ColorError(reason='alpha values must be a valid float')

+    if almost_equal_floats(alpha, 1):
+        return None
+    elif 0 <= alpha <= 1:
+        return alpha
+    else:
+        raise ColorError(reason='alpha values must be in the range 0 to 1')

-def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[
-    float]=None) ->RGBA:
+
+def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
     """
     Parse raw hue, saturation, lightness and alpha values and convert to RGBA.
     """
-    pass
-
-
-COLORS_BY_NAME = {'aliceblue': (240, 248, 255), 'antiquewhite': (250, 235, 
-    215), 'aqua': (0, 255, 255), 'aquamarine': (127, 255, 212), 'azure': (
-    240, 255, 255), 'beige': (245, 245, 220), 'bisque': (255, 228, 196),
-    'black': (0, 0, 0), 'blanchedalmond': (255, 235, 205), 'blue': (0, 0, 
-    255), 'blueviolet': (138, 43, 226), 'brown': (165, 42, 42), 'burlywood':
-    (222, 184, 135), 'cadetblue': (95, 158, 160), 'chartreuse': (127, 255, 
-    0), 'chocolate': (210, 105, 30), 'coral': (255, 127, 80),
-    'cornflowerblue': (100, 149, 237), 'cornsilk': (255, 248, 220),
-    'crimson': (220, 20, 60), 'cyan': (0, 255, 255), 'darkblue': (0, 0, 139
-    ), 'darkcyan': (0, 139, 139), 'darkgoldenrod': (184, 134, 11),
-    'darkgray': (169, 169, 169), 'darkgreen': (0, 100, 0), 'darkgrey': (169,
-    169, 169), 'darkkhaki': (189, 183, 107), 'darkmagenta': (139, 0, 139),
-    'darkolivegreen': (85, 107, 47), 'darkorange': (255, 140, 0),
-    'darkorchid': (153, 50, 204), 'darkred': (139, 0, 0), 'darksalmon': (
-    233, 150, 122), 'darkseagreen': (143, 188, 143), 'darkslateblue': (72, 
-    61, 139), 'darkslategray': (47, 79, 79), 'darkslategrey': (47, 79, 79),
-    'darkturquoise': (0, 206, 209), 'darkviolet': (148, 0, 211), 'deeppink':
-    (255, 20, 147), 'deepskyblue': (0, 191, 255), 'dimgray': (105, 105, 105
-    ), 'dimgrey': (105, 105, 105), 'dodgerblue': (30, 144, 255),
-    'firebrick': (178, 34, 34), 'floralwhite': (255, 250, 240),
-    'forestgreen': (34, 139, 34), 'fuchsia': (255, 0, 255), 'gainsboro': (
-    220, 220, 220), 'ghostwhite': (248, 248, 255), 'gold': (255, 215, 0),
-    'goldenrod': (218, 165, 32), 'gray': (128, 128, 128), 'green': (0, 128,
-    0), 'greenyellow': (173, 255, 47), 'grey': (128, 128, 128), 'honeydew':
-    (240, 255, 240), 'hotpink': (255, 105, 180), 'indianred': (205, 92, 92),
-    'indigo': (75, 0, 130), 'ivory': (255, 255, 240), 'khaki': (240, 230, 
-    140), 'lavender': (230, 230, 250), 'lavenderblush': (255, 240, 245),
-    'lawngreen': (124, 252, 0), 'lemonchiffon': (255, 250, 205),
-    'lightblue': (173, 216, 230), 'lightcoral': (240, 128, 128),
-    'lightcyan': (224, 255, 255), 'lightgoldenrodyellow': (250, 250, 210),
-    'lightgray': (211, 211, 211), 'lightgreen': (144, 238, 144),
-    'lightgrey': (211, 211, 211), 'lightpink': (255, 182, 193),
-    'lightsalmon': (255, 160, 122), 'lightseagreen': (32, 178, 170),
-    'lightskyblue': (135, 206, 250), 'lightslategray': (119, 136, 153),
-    'lightslategrey': (119, 136, 153), 'lightsteelblue': (176, 196, 222),
-    'lightyellow': (255, 255, 224), 'lime': (0, 255, 0), 'limegreen': (50, 
-    205, 50), 'linen': (250, 240, 230), 'magenta': (255, 0, 255), 'maroon':
-    (128, 0, 0), 'mediumaquamarine': (102, 205, 170), 'mediumblue': (0, 0, 
-    205), 'mediumorchid': (186, 85, 211), 'mediumpurple': (147, 112, 219),
-    'mediumseagreen': (60, 179, 113), 'mediumslateblue': (123, 104, 238),
-    'mediumspringgreen': (0, 250, 154), 'mediumturquoise': (72, 209, 204),
-    'mediumvioletred': (199, 21, 133), 'midnightblue': (25, 25, 112),
-    'mintcream': (245, 255, 250), 'mistyrose': (255, 228, 225), 'moccasin':
-    (255, 228, 181), 'navajowhite': (255, 222, 173), 'navy': (0, 0, 128),
-    'oldlace': (253, 245, 230), 'olive': (128, 128, 0), 'olivedrab': (107, 
-    142, 35), 'orange': (255, 165, 0), 'orangered': (255, 69, 0), 'orchid':
-    (218, 112, 214), 'palegoldenrod': (238, 232, 170), 'palegreen': (152, 
-    251, 152), 'paleturquoise': (175, 238, 238), 'palevioletred': (219, 112,
-    147), 'papayawhip': (255, 239, 213), 'peachpuff': (255, 218, 185),
-    'peru': (205, 133, 63), 'pink': (255, 192, 203), 'plum': (221, 160, 221
-    ), 'powderblue': (176, 224, 230), 'purple': (128, 0, 128), 'red': (255,
-    0, 0), 'rosybrown': (188, 143, 143), 'royalblue': (65, 105, 225),
-    'saddlebrown': (139, 69, 19), 'salmon': (250, 128, 114), 'sandybrown':
-    (244, 164, 96), 'seagreen': (46, 139, 87), 'seashell': (255, 245, 238),
-    'sienna': (160, 82, 45), 'silver': (192, 192, 192), 'skyblue': (135, 
-    206, 235), 'slateblue': (106, 90, 205), 'slategray': (112, 128, 144),
-    'slategrey': (112, 128, 144), 'snow': (255, 250, 250), 'springgreen': (
-    0, 255, 127), 'steelblue': (70, 130, 180), 'tan': (210, 180, 140),
-    'teal': (0, 128, 128), 'thistle': (216, 191, 216), 'tomato': (255, 99, 
-    71), 'turquoise': (64, 224, 208), 'violet': (238, 130, 238), 'wheat': (
-    245, 222, 179), 'white': (255, 255, 255), 'whitesmoke': (245, 245, 245),
-    'yellow': (255, 255, 0), 'yellowgreen': (154, 205, 50)}
+    s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
+
+    h_value = float(h)
+    if h_units in {None, 'deg'}:
+        h_value = h_value % 360 / 360
+    elif h_units == 'rad':
+        h_value = h_value % rads / rads
+    else:
+        # turns
+        h_value = h_value % 1
+
+    r, g, b = hls_to_rgb(h_value, l_value, s_value)
+    return RGBA(r, g, b, alpha)
+
+
+def float_to_255(c: float) -> int:
+    return int(round(c * 255))
+
+
+COLORS_BY_NAME = {
+    'aliceblue': (240, 248, 255),
+    'antiquewhite': (250, 235, 215),
+    'aqua': (0, 255, 255),
+    'aquamarine': (127, 255, 212),
+    'azure': (240, 255, 255),
+    'beige': (245, 245, 220),
+    'bisque': (255, 228, 196),
+    'black': (0, 0, 0),
+    'blanchedalmond': (255, 235, 205),
+    'blue': (0, 0, 255),
+    'blueviolet': (138, 43, 226),
+    'brown': (165, 42, 42),
+    'burlywood': (222, 184, 135),
+    'cadetblue': (95, 158, 160),
+    'chartreuse': (127, 255, 0),
+    'chocolate': (210, 105, 30),
+    'coral': (255, 127, 80),
+    'cornflowerblue': (100, 149, 237),
+    'cornsilk': (255, 248, 220),
+    'crimson': (220, 20, 60),
+    'cyan': (0, 255, 255),
+    'darkblue': (0, 0, 139),
+    'darkcyan': (0, 139, 139),
+    'darkgoldenrod': (184, 134, 11),
+    'darkgray': (169, 169, 169),
+    'darkgreen': (0, 100, 0),
+    'darkgrey': (169, 169, 169),
+    'darkkhaki': (189, 183, 107),
+    'darkmagenta': (139, 0, 139),
+    'darkolivegreen': (85, 107, 47),
+    'darkorange': (255, 140, 0),
+    'darkorchid': (153, 50, 204),
+    'darkred': (139, 0, 0),
+    'darksalmon': (233, 150, 122),
+    'darkseagreen': (143, 188, 143),
+    'darkslateblue': (72, 61, 139),
+    'darkslategray': (47, 79, 79),
+    'darkslategrey': (47, 79, 79),
+    'darkturquoise': (0, 206, 209),
+    'darkviolet': (148, 0, 211),
+    'deeppink': (255, 20, 147),
+    'deepskyblue': (0, 191, 255),
+    'dimgray': (105, 105, 105),
+    'dimgrey': (105, 105, 105),
+    'dodgerblue': (30, 144, 255),
+    'firebrick': (178, 34, 34),
+    'floralwhite': (255, 250, 240),
+    'forestgreen': (34, 139, 34),
+    'fuchsia': (255, 0, 255),
+    'gainsboro': (220, 220, 220),
+    'ghostwhite': (248, 248, 255),
+    'gold': (255, 215, 0),
+    'goldenrod': (218, 165, 32),
+    'gray': (128, 128, 128),
+    'green': (0, 128, 0),
+    'greenyellow': (173, 255, 47),
+    'grey': (128, 128, 128),
+    'honeydew': (240, 255, 240),
+    'hotpink': (255, 105, 180),
+    'indianred': (205, 92, 92),
+    'indigo': (75, 0, 130),
+    'ivory': (255, 255, 240),
+    'khaki': (240, 230, 140),
+    'lavender': (230, 230, 250),
+    'lavenderblush': (255, 240, 245),
+    'lawngreen': (124, 252, 0),
+    'lemonchiffon': (255, 250, 205),
+    'lightblue': (173, 216, 230),
+    'lightcoral': (240, 128, 128),
+    'lightcyan': (224, 255, 255),
+    'lightgoldenrodyellow': (250, 250, 210),
+    'lightgray': (211, 211, 211),
+    'lightgreen': (144, 238, 144),
+    'lightgrey': (211, 211, 211),
+    'lightpink': (255, 182, 193),
+    'lightsalmon': (255, 160, 122),
+    'lightseagreen': (32, 178, 170),
+    'lightskyblue': (135, 206, 250),
+    'lightslategray': (119, 136, 153),
+    'lightslategrey': (119, 136, 153),
+    'lightsteelblue': (176, 196, 222),
+    'lightyellow': (255, 255, 224),
+    'lime': (0, 255, 0),
+    'limegreen': (50, 205, 50),
+    'linen': (250, 240, 230),
+    'magenta': (255, 0, 255),
+    'maroon': (128, 0, 0),
+    'mediumaquamarine': (102, 205, 170),
+    'mediumblue': (0, 0, 205),
+    'mediumorchid': (186, 85, 211),
+    'mediumpurple': (147, 112, 219),
+    'mediumseagreen': (60, 179, 113),
+    'mediumslateblue': (123, 104, 238),
+    'mediumspringgreen': (0, 250, 154),
+    'mediumturquoise': (72, 209, 204),
+    'mediumvioletred': (199, 21, 133),
+    'midnightblue': (25, 25, 112),
+    'mintcream': (245, 255, 250),
+    'mistyrose': (255, 228, 225),
+    'moccasin': (255, 228, 181),
+    'navajowhite': (255, 222, 173),
+    'navy': (0, 0, 128),
+    'oldlace': (253, 245, 230),
+    'olive': (128, 128, 0),
+    'olivedrab': (107, 142, 35),
+    'orange': (255, 165, 0),
+    'orangered': (255, 69, 0),
+    'orchid': (218, 112, 214),
+    'palegoldenrod': (238, 232, 170),
+    'palegreen': (152, 251, 152),
+    'paleturquoise': (175, 238, 238),
+    'palevioletred': (219, 112, 147),
+    'papayawhip': (255, 239, 213),
+    'peachpuff': (255, 218, 185),
+    'peru': (205, 133, 63),
+    'pink': (255, 192, 203),
+    'plum': (221, 160, 221),
+    'powderblue': (176, 224, 230),
+    'purple': (128, 0, 128),
+    'red': (255, 0, 0),
+    'rosybrown': (188, 143, 143),
+    'royalblue': (65, 105, 225),
+    'saddlebrown': (139, 69, 19),
+    'salmon': (250, 128, 114),
+    'sandybrown': (244, 164, 96),
+    'seagreen': (46, 139, 87),
+    'seashell': (255, 245, 238),
+    'sienna': (160, 82, 45),
+    'silver': (192, 192, 192),
+    'skyblue': (135, 206, 235),
+    'slateblue': (106, 90, 205),
+    'slategray': (112, 128, 144),
+    'slategrey': (112, 128, 144),
+    'snow': (255, 250, 250),
+    'springgreen': (0, 255, 127),
+    'steelblue': (70, 130, 180),
+    'tan': (210, 180, 140),
+    'teal': (0, 128, 128),
+    'thistle': (216, 191, 216),
+    'tomato': (255, 99, 71),
+    'turquoise': (64, 224, 208),
+    'violet': (238, 130, 238),
+    'wheat': (245, 222, 179),
+    'white': (255, 255, 255),
+    'whitesmoke': (245, 245, 245),
+    'yellow': (255, 255, 0),
+    'yellowgreen': (154, 205, 50),
+}
+
 COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
diff --git a/pydantic/v1/config.py b/pydantic/v1/config.py
index 5c249595b..18f7c999b 100644
--- a/pydantic/v1/config.py
+++ b/pydantic/v1/config.py
@@ -1,31 +1,34 @@
 import json
 from enum import Enum
 from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union
+
 from typing_extensions import Literal, Protocol
+
 from pydantic.v1.typing import AnyArgTCallable, AnyCallable
 from pydantic.v1.utils import GetterDict
 from pydantic.v1.version import compiled
+
 if TYPE_CHECKING:
     from typing import overload
+
     from pydantic.v1.fields import ModelField
     from pydantic.v1.main import BaseModel
-    ConfigType = Type['BaseConfig']

+    ConfigType = Type['BaseConfig']

     class SchemaExtraCallable(Protocol):
-
         @overload
-        def __call__(self, schema: Dict[str, Any]) ->None:
+        def __call__(self, schema: Dict[str, Any]) -> None:
             pass

         @overload
-        def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]
-            ) ->None:
+        def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None:
             pass
+
 else:
     SchemaExtraCallable = Callable[..., None]
-__all__ = ('BaseConfig', 'ConfigDict', 'get_config', 'Extra',
-    'inherit_config', 'prepare_config')
+
+__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config'


 class Extra(str, Enum):
@@ -34,11 +37,13 @@ class Extra(str, Enum):
     forbid = 'forbid'


+# https://github.com/cython/cython/issues/4003
+# Fixed in Cython 3 and Pydantic v1 won't support Cython 3.
+# Pydantic v2 doesn't depend on Cython at all.
 if not compiled:
     from typing_extensions import TypedDict

-
-    class ConfigDict(TypedDict, total=(False)):
+    class ConfigDict(TypedDict, total=False):
         title: Optional[str]
         anystr_lower: bool
         anystr_strip_whitespace: bool
@@ -65,9 +70,11 @@ if not compiled:
         underscore_attrs_are_private: bool
         allow_inf_nan: bool
         copy_on_model_validation: Literal['none', 'deep', 'shallow']
+        # whether dataclass `__post_init__` should be run after validation
         post_init_call: Literal['before_validation', 'after_validation']
+
 else:
-    ConfigDict = dict
+    ConfigDict = dict  # type: ignore


 class BaseConfig:
@@ -97,21 +104,88 @@ class BaseConfig:
     json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {}
     underscore_attrs_are_private: bool = False
     allow_inf_nan: bool = True
+
+    # whether inherited models as fields should be reconstructed as base model,
+    # and whether such a copy should be shallow or deep
     copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow'
+
+    # whether `Union` should check all allowed types before even trying to coerce
     smart_union: bool = False
-    post_init_call: Literal['before_validation', 'after_validation'
-        ] = 'before_validation'
+    # whether dataclass `__post_init__` should be run before or after validation
+    post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation'

     @classmethod
-    def get_field_info(cls, name: str) ->Dict[str, Any]:
+    def get_field_info(cls, name: str) -> Dict[str, Any]:
         """
         Get properties of FieldInfo from the `fields` property of the config class.
         """
-        pass
+
+        fields_value = cls.fields.get(name)
+
+        if isinstance(fields_value, str):
+            field_info: Dict[str, Any] = {'alias': fields_value}
+        elif isinstance(fields_value, dict):
+            field_info = fields_value
+        else:
+            field_info = {}
+
+        if 'alias' in field_info:
+            field_info.setdefault('alias_priority', 2)
+
+        if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator:
+            alias = cls.alias_generator(name)
+            if not isinstance(alias, str):
+                raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}')
+            field_info.update(alias=alias, alias_priority=1)
+        return field_info

     @classmethod
-    def prepare_field(cls, field: 'ModelField') ->None:
+    def prepare_field(cls, field: 'ModelField') -> None:
         """
         Optional hook to check or modify fields during model creation.
         """
         pass
+
+
+def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]:
+    if config is None:
+        return BaseConfig
+
+    else:
+        config_dict = (
+            config
+            if isinstance(config, dict)
+            else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')}
+        )
+
+        class Config(BaseConfig):
+            ...
+
+        for k, v in config_dict.items():
+            setattr(Config, k, v)
+        return Config
+
+
+def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType':
+    if not self_config:
+        base_classes: Tuple['ConfigType', ...] = (parent_config,)
+    elif self_config == parent_config:
+        base_classes = (self_config,)
+    else:
+        base_classes = self_config, parent_config
+
+    namespace['json_encoders'] = {
+        **getattr(parent_config, 'json_encoders', {}),
+        **getattr(self_config, 'json_encoders', {}),
+        **namespace.get('json_encoders', {}),
+    }
+
+    return type('Config', base_classes, namespace)
+
+
+def prepare_config(config: Type[BaseConfig], cls_name: str) -> None:
+    if not isinstance(config.extra, Extra):
+        try:
+            config.extra = Extra(config.extra)
+        except ValueError:
+            raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"')
diff --git a/pydantic/v1/dataclasses.py b/pydantic/v1/dataclasses.py
index 91caa9b02..bd1670291 100644
--- a/pydantic/v1/dataclasses.py
+++ b/pydantic/v1/dataclasses.py
@@ -36,12 +36,17 @@ import dataclasses
 import sys
 from contextlib import contextmanager
 from functools import wraps
+
 try:
     from functools import cached_property
 except ImportError:
+    # cached_property available only for python3.8+
     pass
+
 from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload
+
 from typing_extensions import dataclass_transform
+
 from pydantic.v1.class_validators import gather_all_validators
 from pydantic.v1.config import BaseConfig, ConfigDict, Extra, get_config
 from pydantic.v1.error_wrappers import ValidationError
@@ -49,97 +54,415 @@ from pydantic.v1.errors import DataclassTypeError
 from pydantic.v1.fields import Field, FieldInfo, Required, Undefined
 from pydantic.v1.main import create_model, validate_model
 from pydantic.v1.utils import ClassAttribute
+
 if TYPE_CHECKING:
     from pydantic.v1.main import BaseModel
     from pydantic.v1.typing import CallableGenerator, NoArgAnyCallable
+
     DataclassT = TypeVar('DataclassT', bound='Dataclass')
-    DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy']

+    DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy']

     class Dataclass:
+        # stdlib attributes
         __dataclass_fields__: ClassVar[Dict[str, Any]]
-        __dataclass_params__: ClassVar[Any]
+        __dataclass_params__: ClassVar[Any]  # in reality `dataclasses._DataclassParams`
         __post_init__: ClassVar[Callable[..., None]]
+
+        # Added by pydantic
         __pydantic_run_validation__: ClassVar[bool]
         __post_init_post_parse__: ClassVar[Callable[..., None]]
         __pydantic_initialised__: ClassVar[bool]
         __pydantic_model__: ClassVar[Type[BaseModel]]
         __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]]
-        __pydantic_has_field_info_default__: ClassVar[bool]
+        __pydantic_has_field_info_default__: ClassVar[bool]  # whether a `pydantic.Field` is used as default value

-        def __init__(self, *args: object, **kwargs: object) ->None:
+        def __init__(self, *args: object, **kwargs: object) -> None:
             pass

         @classmethod
-        def __get_validators__(cls: Type['Dataclass']) ->'CallableGenerator':
+        def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator':
             pass

         @classmethod
-        def __validate__(cls: Type['DataclassT'], v: Any) ->'DataclassT':
+        def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
             pass
-__all__ = ['dataclass', 'set_validation',
-    'create_pydantic_model_from_dataclass', 'is_builtin_dataclass',
-    'make_dataclass_validator']
+
+
+__all__ = [
+    'dataclass',
+    'set_validation',
+    'create_pydantic_model_from_dataclass',
+    'is_builtin_dataclass',
+    'make_dataclass_validator',
+]
+
 _T = TypeVar('_T')
+
 if sys.version_info >= (3, 10):

+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+        kw_only: bool = ...,
+    ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        _cls: Type[_T],
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+        kw_only: bool = ...,
+    ) -> 'DataclassClassOrWrapper':
+        ...
+
+else:
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+    ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
+        ...
+
+    @dataclass_transform(field_specifiers=(dataclasses.field, Field))
+    @overload
+    def dataclass(
+        _cls: Type[_T],
+        *,
+        init: bool = True,
+        repr: bool = True,
+        eq: bool = True,
+        order: bool = False,
+        unsafe_hash: bool = False,
+        frozen: bool = False,
+        config: Union[ConfigDict, Type[object], None] = None,
+        validate_on_init: Optional[bool] = None,
+        use_proxy: Optional[bool] = None,
+    ) -> 'DataclassClassOrWrapper':
+        ...
+

 @dataclass_transform(field_specifiers=(dataclasses.field, Field))
-def dataclass(_cls: Optional[Type[_T]]=None, *, init: bool=True, repr: bool
-    =True, eq: bool=True, order: bool=False, unsafe_hash: bool=False,
-    frozen: bool=False, config: Union[ConfigDict, Type[object], None]=None,
-    validate_on_init: Optional[bool]=None, use_proxy: Optional[bool]=None,
-    kw_only: bool=False) ->Union[Callable[[Type[_T]],
-    'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']:
+def dataclass(
+    _cls: Optional[Type[_T]] = None,
+    *,
+    init: bool = True,
+    repr: bool = True,
+    eq: bool = True,
+    order: bool = False,
+    unsafe_hash: bool = False,
+    frozen: bool = False,
+    config: Union[ConfigDict, Type[object], None] = None,
+    validate_on_init: Optional[bool] = None,
+    use_proxy: Optional[bool] = None,
+    kw_only: bool = False,
+) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']:
     """
     Like the python standard lib dataclasses but with type validation.
     The result is either a pydantic dataclass that will validate input data
     or a wrapper that will trigger validation around a stdlib dataclass
     to avoid modifying it directly
     """
-    pass
+    the_config = get_config(config)
+
+    def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper':
+        should_use_proxy = (
+            use_proxy
+            if use_proxy is not None
+            else (
+                is_builtin_dataclass(cls)
+                and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0])))
+            )
+        )
+        if should_use_proxy:
+            dc_cls_doc = ''
+            dc_cls = DataclassProxy(cls)
+            default_validate_on_init = False
+        else:
+            dc_cls_doc = cls.__doc__ or ''  # needs to be done before generating dataclass
+            if sys.version_info >= (3, 10):
+                dc_cls = dataclasses.dataclass(
+                    cls,
+                    init=init,
+                    repr=repr,
+                    eq=eq,
+                    order=order,
+                    unsafe_hash=unsafe_hash,
+                    frozen=frozen,
+                    kw_only=kw_only,
+                )
+            else:
+                dc_cls = dataclasses.dataclass(  # type: ignore
+                    cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
+                )
+            default_validate_on_init = True
+
+        should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init
+        _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc)
+        dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls})
+        return dc_cls
+
+    if _cls is None:
+        return wrap
+
+    return wrap(_cls)
+
+
+@contextmanager
+def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]:
+    original_run_validation = cls.__pydantic_run_validation__
+    try:
+        cls.__pydantic_run_validation__ = value
+        yield cls
+    finally:
+        cls.__pydantic_run_validation__ = original_run_validation


 class DataclassProxy:
     __slots__ = '__dataclass__'

-    def __init__(self, dc_cls: Type['Dataclass']) ->None:
+    def __init__(self, dc_cls: Type['Dataclass']) -> None:
         object.__setattr__(self, '__dataclass__', dc_cls)

-    def __call__(self, *args: Any, **kwargs: Any) ->Any:
+    def __call__(self, *args: Any, **kwargs: Any) -> Any:
         with set_validation(self.__dataclass__, True):
             return self.__dataclass__(*args, **kwargs)

-    def __getattr__(self, name: str) ->Any:
+    def __getattr__(self, name: str) -> Any:
         return getattr(self.__dataclass__, name)

-    def __setattr__(self, __name: str, __value: Any) ->None:
+    def __setattr__(self, __name: str, __value: Any) -> None:
         return setattr(self.__dataclass__, __name, __value)

-    def __instancecheck__(self, instance: Any) ->bool:
+    def __instancecheck__(self, instance: Any) -> bool:
         return isinstance(instance, self.__dataclass__)

-    def __copy__(self) ->'DataclassProxy':
+    def __copy__(self) -> 'DataclassProxy':
         return DataclassProxy(copy.copy(self.__dataclass__))

-    def __deepcopy__(self, memo: Any) ->'DataclassProxy':
+    def __deepcopy__(self, memo: Any) -> 'DataclassProxy':
         return DataclassProxy(copy.deepcopy(self.__dataclass__, memo))


-def _add_pydantic_validation_attributes(dc_cls: Type['Dataclass'], config:
-    Type[BaseConfig], validate_on_init: bool, dc_cls_doc: str) ->None:
+def _add_pydantic_validation_attributes(  # noqa: C901 (ignore complexity)
+    dc_cls: Type['Dataclass'],
+    config: Type[BaseConfig],
+    validate_on_init: bool,
+    dc_cls_doc: str,
+) -> None:
     """
     We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass
     it won't even exist (code is generated on the fly by `dataclasses`)
     By default, we run validation after `__init__` or `__post_init__` if defined
     """
-    pass
+    init = dc_cls.__init__
+
+    @wraps(init)
+    def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+        if config.extra == Extra.ignore:
+            init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__})
+
+        elif config.extra == Extra.allow:
+            for k, v in kwargs.items():
+                self.__dict__.setdefault(k, v)
+            init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__})
+
+        else:
+            init(self, *args, **kwargs)
+
+    if hasattr(dc_cls, '__post_init__'):
+        try:
+            post_init = dc_cls.__post_init__.__wrapped__  # type: ignore[attr-defined]
+        except AttributeError:
+            post_init = dc_cls.__post_init__
+
+        @wraps(post_init)
+        def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+            if config.post_init_call == 'before_validation':
+                post_init(self, *args, **kwargs)
+
+            if self.__class__.__pydantic_run_validation__:
+                self.__pydantic_validate_values__()
+                if hasattr(self, '__post_init_post_parse__'):
+                    self.__post_init_post_parse__(*args, **kwargs)
+
+            if config.post_init_call == 'after_validation':
+                post_init(self, *args, **kwargs)
+
+        setattr(dc_cls, '__init__', handle_extra_init)
+        setattr(dc_cls, '__post_init__', new_post_init)
+
+    else:
+
+        @wraps(init)
+        def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
+            handle_extra_init(self, *args, **kwargs)
+
+            if self.__class__.__pydantic_run_validation__:
+                self.__pydantic_validate_values__()
+
+            if hasattr(self, '__post_init_post_parse__'):
+                # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of
+                # public method `dataclasses.fields`
+
+                # get all initvars and their default values
+                initvars_and_values: Dict[str, Any] = {}
+                for i, f in enumerate(self.__class__.__dataclass_fields__.values()):
+                    if f._field_type is dataclasses._FIELD_INITVAR:  # type: ignore[attr-defined]
+                        try:
+                            # set arg value by default
+                            initvars_and_values[f.name] = args[i]
+                        except IndexError:
+                            initvars_and_values[f.name] = kwargs.get(f.name, f.default)
+
+                self.__post_init_post_parse__(**initvars_and_values)
+
+        setattr(dc_cls, '__init__', new_init)
+
+    setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init))
+    setattr(dc_cls, '__pydantic_initialised__', False)
+    setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc))
+    setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values)
+    setattr(dc_cls, '__validate__', classmethod(_validate_dataclass))
+    setattr(dc_cls, '__get_validators__', classmethod(_get_validators))
+
+    if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen:
+        setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr)
+
+
+def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator':
+    yield cls.__validate__
+
+
+def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
+    with set_validation(cls, True):
+        if isinstance(v, cls):
+            v.__pydantic_validate_values__()
+            return v
+        elif isinstance(v, (list, tuple)):
+            return cls(*v)
+        elif isinstance(v, dict):
+            return cls(**v)
+        else:
+            raise DataclassTypeError(class_name=cls.__name__)
+
+
+def create_pydantic_model_from_dataclass(
+    dc_cls: Type['Dataclass'],
+    config: Type[Any] = BaseConfig,
+    dc_cls_doc: Optional[str] = None,
+) -> Type['BaseModel']:
+    field_definitions: Dict[str, Any] = {}
+    for field in dataclasses.fields(dc_cls):
+        default: Any = Undefined
+        default_factory: Optional['NoArgAnyCallable'] = None
+        field_info: FieldInfo
+
+        if field.default is not dataclasses.MISSING:
+            default = field.default
+        elif field.default_factory is not dataclasses.MISSING:
+            default_factory = field.default_factory
+        else:
+            default = Required
+
+        if isinstance(default, FieldInfo):
+            field_info = default
+            dc_cls.__pydantic_has_field_info_default__ = True
+        else:
+            field_info = Field(default=default, default_factory=default_factory, **field.metadata)
+
+        field_definitions[field.name] = (field.type, field_info)
+
+    validators = gather_all_validators(dc_cls)
+    model: Type['BaseModel'] = create_model(
+        dc_cls.__name__,
+        __config__=config,
+        __module__=dc_cls.__module__,
+        __validators__=validators,
+        __cls_kwargs__={'__resolve_forward_refs__': False},
+        **field_definitions,
+    )
+    model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or ''
+    return model


 if sys.version_info >= (3, 8):

-
-def is_builtin_dataclass(_cls: Type[Any]) ->bool:
+    def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool:
+        return isinstance(getattr(type(obj), k, None), cached_property)
+
+else:
+
+    def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool:
+        return False
+
+
+def _dataclass_validate_values(self: 'Dataclass') -> None:
+    # validation errors can occur if this function is called twice on an already initialised dataclass.
+    # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property
+    if getattr(self, '__pydantic_initialised__'):
+        return
+    if getattr(self, '__pydantic_has_field_info_default__', False):
+        # We need to remove `FieldInfo` values since they are not valid as input
+        # It's ok to do that because they are obviously the default values!
+        input_data = {
+            k: v
+            for k, v in self.__dict__.items()
+            if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k))
+        }
+    else:
+        input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)}
+    d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__)
+    if validation_error:
+        raise validation_error
+    self.__dict__.update(d)
+    object.__setattr__(self, '__pydantic_initialised__', True)
+
+
+def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None:
+    if self.__pydantic_initialised__:
+        d = dict(self.__dict__)
+        d.pop(name, None)
+        known_field = self.__pydantic_model__.__fields__.get(name, None)
+        if known_field:
+            value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__)
+            if error_:
+                raise ValidationError([error_], self.__class__)
+
+    object.__setattr__(self, name, value)
+
+
+def is_builtin_dataclass(_cls: Type[Any]) -> bool:
     """
     Whether a class is a stdlib dataclass
     (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass)
@@ -161,14 +484,17 @@ def is_builtin_dataclass(_cls: Type[Any]) ->bool:
     In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
     which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
     """
-    pass
+    return (
+        dataclasses.is_dataclass(_cls)
+        and not hasattr(_cls, '__pydantic_model__')
+        and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {})))
+    )


-def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[
-    BaseConfig]) ->'CallableGenerator':
+def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator':
     """
     Create a pydantic.dataclass from a builtin dataclass to add type validation
     and yield the validators
     It retrieves the parameters of the dataclass and forwards them to the newly created dataclass
     """
-    pass
+    yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True))
diff --git a/pydantic/v1/datetime_parse.py b/pydantic/v1/datetime_parse.py
index 82d98b484..a7598fc6c 100644
--- a/pydantic/v1/datetime_parse.py
+++ b/pydantic/v1/datetime_parse.py
@@ -17,47 +17,162 @@ Changed to:
 import re
 from datetime import date, datetime, time, timedelta, timezone
 from typing import Dict, Optional, Type, Union
+
 from pydantic.v1 import errors
-date_expr = '(?P<year>\\d{4})-(?P<month>\\d{1,2})-(?P<day>\\d{1,2})'
+
+date_expr = r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
 time_expr = (
-    '(?P<hour>\\d{1,2}):(?P<minute>\\d{1,2})(?::(?P<second>\\d{1,2})(?:\\.(?P<microsecond>\\d{1,6})\\d{0,6})?)?(?P<tzinfo>Z|[+-]\\d{2}(?::?\\d{2})?)?$'
-    )
+    r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
+    r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
+    r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
+)
+
 date_re = re.compile(f'{date_expr}$')
 time_re = re.compile(time_expr)
 datetime_re = re.compile(f'{date_expr}[T ]{time_expr}')
+
 standard_duration_re = re.compile(
-    '^(?:(?P<days>-?\\d+) (days?, )?)?((?:(?P<hours>-?\\d+):)(?=\\d+:\\d+))?(?:(?P<minutes>-?\\d+):)?(?P<seconds>-?\\d+)(?:\\.(?P<microseconds>\\d{1,6})\\d{0,6})?$'
-    )
+    r'^'
+    r'(?:(?P<days>-?\d+) (days?, )?)?'
+    r'((?:(?P<hours>-?\d+):)(?=\d+:\d+))?'
+    r'(?:(?P<minutes>-?\d+):)?'
+    r'(?P<seconds>-?\d+)'
+    r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
+    r'$'
+)
+
+# Support the sections of ISO 8601 date representation that are accepted by timedelta
 iso8601_duration_re = re.compile(
-    '^(?P<sign>[-+]?)P(?:(?P<days>\\d+(.\\d+)?)D)?(?:T(?:(?P<hours>\\d+(.\\d+)?)H)?(?:(?P<minutes>\\d+(.\\d+)?)M)?(?:(?P<seconds>\\d+(.\\d+)?)S)?)?$'
-    )
+    r'^(?P<sign>[-+]?)'
+    r'P'
+    r'(?:(?P<days>\d+(.\d+)?)D)?'
+    r'(?:T'
+    r'(?:(?P<hours>\d+(.\d+)?)H)?'
+    r'(?:(?P<minutes>\d+(.\d+)?)M)?'
+    r'(?:(?P<seconds>\d+(.\d+)?)S)?'
+    r')?'
+    r'$'
+)
+
 EPOCH = datetime(1970, 1, 1)
-MS_WATERSHED = int(20000000000.0)
-MAX_NUMBER = int(3e+20)
+# if greater than this, the number is in ms, if less than or equal it's in seconds
+# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
+MS_WATERSHED = int(2e10)
+# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
+MAX_NUMBER = int(3e20)
 StrBytesIntFloat = Union[str, bytes, int, float]


-def parse_date(value: Union[date, StrBytesIntFloat]) ->date:
+def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
+    if isinstance(value, (int, float)):
+        return value
+    try:
+        return float(value)
+    except ValueError:
+        return None
+    except TypeError:
+        raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float')
+
+
+def from_unix_seconds(seconds: Union[int, float]) -> datetime:
+    if seconds > MAX_NUMBER:
+        return datetime.max
+    elif seconds < -MAX_NUMBER:
+        return datetime.min
+
+    while abs(seconds) > MS_WATERSHED:
+        seconds /= 1000
+    dt = EPOCH + timedelta(seconds=seconds)
+    return dt.replace(tzinfo=timezone.utc)
+
+
+def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]:
+    if value == 'Z':
+        return timezone.utc
+    elif value is not None:
+        offset_mins = int(value[-2:]) if len(value) > 3 else 0
+        offset = 60 * int(value[1:3]) + offset_mins
+        if value[0] == '-':
+            offset = -offset
+        try:
+            return timezone(timedelta(minutes=offset))
+        except ValueError:
+            raise error()
+    else:
+        return None
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
     """
     Parse a date/int/float/string and return a datetime.date.

     Raise ValueError if the input is well formatted but not a valid date.
     Raise ValueError if the input isn't well formatted.
     """
-    pass
+    if isinstance(value, date):
+        if isinstance(value, datetime):
+            return value.date()
+        else:
+            return value
+
+    number = get_numeric(value, 'date')
+    if number is not None:
+        return from_unix_seconds(number).date()
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = date_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.DateError()

+    kw = {k: int(v) for k, v in match.groupdict().items()}

-def parse_time(value: Union[time, StrBytesIntFloat]) ->time:
+    try:
+        return date(**kw)
+    except ValueError:
+        raise errors.DateError()
+
+
+def parse_time(value: Union[time, StrBytesIntFloat]) -> time:
     """
     Parse a time/string and return a datetime.time.

     Raise ValueError if the input is well formatted but not a valid time.
     Raise ValueError if the input isn't well formatted, in particular if it contains an offset.
     """
-    pass
+    if isinstance(value, time):
+        return value
+
+    number = get_numeric(value, 'time')
+    if number is not None:
+        if number >= 86400:
+            # doesn't make sense since the time time loop back around to 0
+            raise errors.TimeError()
+        return (datetime.min + timedelta(seconds=number)).time()
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = time_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.TimeError()
+
+    kw = match.groupdict()
+    if kw['microsecond']:
+        kw['microsecond'] = kw['microsecond'].ljust(6, '0')
+
+    tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError)
+    kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+    kw_['tzinfo'] = tzinfo

+    try:
+        return time(**kw_)  # type: ignore
+    except ValueError:
+        raise errors.TimeError()

-def parse_datetime(value: Union[datetime, StrBytesIntFloat]) ->datetime:
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
     """
     Parse a datetime/int/float/string and return a datetime.datetime.

@@ -67,10 +182,35 @@ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) ->datetime:
     Raise ValueError if the input is well formatted but not a valid datetime.
     Raise ValueError if the input isn't well formatted.
     """
-    pass
+    if isinstance(value, datetime):
+        return value
+
+    number = get_numeric(value, 'datetime')
+    if number is not None:
+        return from_unix_seconds(number)
+
+    if isinstance(value, bytes):
+        value = value.decode()
+
+    match = datetime_re.match(value)  # type: ignore
+    if match is None:
+        raise errors.DateTimeError()
+
+    kw = match.groupdict()
+    if kw['microsecond']:
+        kw['microsecond'] = kw['microsecond'].ljust(6, '0')
+
+    tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError)
+    kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+    kw_['tzinfo'] = tzinfo
+
+    try:
+        return datetime(**kw_)  # type: ignore
+    except ValueError:
+        raise errors.DateTimeError()


-def parse_duration(value: StrBytesIntFloat) ->timedelta:
+def parse_duration(value: StrBytesIntFloat) -> timedelta:
     """
     Parse a duration int/float/string and return a datetime.timedelta.

@@ -78,4 +218,31 @@ def parse_duration(value: StrBytesIntFloat) ->timedelta:

     Also supports ISO 8601 representation.
     """
-    pass
+    if isinstance(value, timedelta):
+        return value
+
+    if isinstance(value, (int, float)):
+        # below code requires a string
+        value = f'{value:f}'
+    elif isinstance(value, bytes):
+        value = value.decode()
+
+    try:
+        match = standard_duration_re.match(value) or iso8601_duration_re.match(value)
+    except TypeError:
+        raise TypeError('invalid type; expected timedelta, string, bytes, int or float')
+
+    if not match:
+        raise errors.DurationError()
+
+    kw = match.groupdict()
+    sign = -1 if kw.pop('sign', '+') == '-' else 1
+    if kw.get('microseconds'):
+        kw['microseconds'] = kw['microseconds'].ljust(6, '0')
+
+    if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
+        kw['microseconds'] = '-' + kw['microseconds']
+
+    kw_ = {k: float(v) for k, v in kw.items() if v is not None}
+
+    return sign * timedelta(**kw_)
diff --git a/pydantic/v1/decorator.py b/pydantic/v1/decorator.py
index 55a4b3c36..2c7c2c2ff 100644
--- a/pydantic/v1/decorator.py
+++ b/pydantic/v1/decorator.py
@@ -1,24 +1,54 @@
 from functools import wraps
 from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload
+
 from pydantic.v1 import validator
 from pydantic.v1.config import Extra
 from pydantic.v1.errors import ConfigError
 from pydantic.v1.main import BaseModel, create_model
 from pydantic.v1.typing import get_all_type_hints
 from pydantic.v1.utils import to_camel
-__all__ = 'validate_arguments',
+
+__all__ = ('validate_arguments',)
+
 if TYPE_CHECKING:
     from pydantic.v1.typing import AnyCallable
+
     AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable)
     ConfigType = Union[None, Type[Any], Dict[str, Any]]


-def validate_arguments(func: Optional['AnyCallableT']=None, *, config:
-    'ConfigType'=None) ->Any:
+@overload
+def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']:
+    ...
+
+
+@overload
+def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT':
+    ...
+
+
+def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any:
     """
     Decorator to validate the arguments passed to a function.
     """
-    pass
+
+    def validate(_func: 'AnyCallable') -> 'AnyCallable':
+        vd = ValidatedFunction(_func, config)
+
+        @wraps(_func)
+        def wrapper_function(*args: Any, **kwargs: Any) -> Any:
+            return vd.call(*args, **kwargs)
+
+        wrapper_function.vd = vd  # type: ignore
+        wrapper_function.validate = vd.init_model_instance  # type: ignore
+        wrapper_function.raw_function = vd.raw_function  # type: ignore
+        wrapper_function.model = vd.model  # type: ignore
+        return wrapper_function
+
+    if func:
+        return validate(func)
+    else:
+        return validate


 ALT_V_ARGS = 'v__args'
@@ -28,20 +58,23 @@ V_DUPLICATE_KWARGS = 'v__duplicate_kwargs'


 class ValidatedFunction:
-
-    def __init__(self, function: 'AnyCallableT', config: 'ConfigType'):
+    def __init__(self, function: 'AnyCallableT', config: 'ConfigType'):  # noqa C901
         from inspect import Parameter, signature
+
         parameters: Mapping[str, Parameter] = signature(function).parameters
-        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS,
-            V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
+
+        if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
             raise ConfigError(
-                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" are not permitted as argument names when using the "{validate_arguments.__name__}" decorator'
-                )
+                f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" '
+                f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator'
+            )
+
         self.raw_function = function
         self.arg_mapping: Dict[int, str] = {}
         self.positional_only_args = set()
         self.v_args_name = 'args'
         self.v_kwargs_name = 'kwargs'
+
         type_hints = get_all_type_hints(function)
         takes_args = False
         takes_kwargs = False
@@ -51,6 +84,7 @@ class ValidatedFunction:
                 annotation = Any
             else:
                 annotation = type_hints[name]
+
             default = ... if p.default is p.empty else p.default
             if p.kind == Parameter.POSITIONAL_ONLY:
                 self.arg_mapping[i] = name
@@ -70,14 +104,161 @@ class ValidatedFunction:
             else:
                 assert p.kind == Parameter.VAR_KEYWORD, p.kind
                 self.v_kwargs_name = name
-                fields[name] = Dict[str, annotation], None
+                fields[name] = Dict[str, annotation], None  # type: ignore
                 takes_kwargs = True
+
+        # these checks avoid a clash between "args" and a field with that name
         if not takes_args and self.v_args_name in fields:
             self.v_args_name = ALT_V_ARGS
+
+        # same with "kwargs"
         if not takes_kwargs and self.v_kwargs_name in fields:
             self.v_kwargs_name = ALT_V_KWARGS
+
         if not takes_args:
+            # we add the field so validation below can raise the correct exception
             fields[self.v_args_name] = List[Any], None
+
         if not takes_kwargs:
+            # same with kwargs
             fields[self.v_kwargs_name] = Dict[Any, Any], None
+
         self.create_model(fields, takes_args, takes_kwargs, config)
+
+    def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel:
+        values = self.build_values(args, kwargs)
+        return self.model(**values)
+
+    def call(self, *args: Any, **kwargs: Any) -> Any:
+        m = self.init_model_instance(*args, **kwargs)
+        return self.execute(m)
+
+    def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]:
+        values: Dict[str, Any] = {}
+        if args:
+            arg_iter = enumerate(args)
+            while True:
+                try:
+                    i, a = next(arg_iter)
+                except StopIteration:
+                    break
+                arg_name = self.arg_mapping.get(i)
+                if arg_name is not None:
+                    values[arg_name] = a
+                else:
+                    values[self.v_args_name] = [a] + [a for _, a in arg_iter]
+                    break
+
+        var_kwargs: Dict[str, Any] = {}
+        wrong_positional_args = []
+        duplicate_kwargs = []
+        fields_alias = [
+            field.alias
+            for name, field in self.model.__fields__.items()
+            if name not in (self.v_args_name, self.v_kwargs_name)
+        ]
+        non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name}
+        for k, v in kwargs.items():
+            if k in non_var_fields or k in fields_alias:
+                if k in self.positional_only_args:
+                    wrong_positional_args.append(k)
+                if k in values:
+                    duplicate_kwargs.append(k)
+                values[k] = v
+            else:
+                var_kwargs[k] = v
+
+        if var_kwargs:
+            values[self.v_kwargs_name] = var_kwargs
+        if wrong_positional_args:
+            values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args
+        if duplicate_kwargs:
+            values[V_DUPLICATE_KWARGS] = duplicate_kwargs
+        return values
+
+    def execute(self, m: BaseModel) -> Any:
+        d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory}
+        var_kwargs = d.pop(self.v_kwargs_name, {})
+
+        if self.v_args_name in d:
+            args_: List[Any] = []
+            in_kwargs = False
+            kwargs = {}
+            for name, value in d.items():
+                if in_kwargs:
+                    kwargs[name] = value
+                elif name == self.v_args_name:
+                    args_ += value
+                    in_kwargs = True
+                else:
+                    args_.append(value)
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        elif self.positional_only_args:
+            args_ = []
+            kwargs = {}
+            for name, value in d.items():
+                if name in self.positional_only_args:
+                    args_.append(value)
+                else:
+                    kwargs[name] = value
+            return self.raw_function(*args_, **kwargs, **var_kwargs)
+        else:
+            return self.raw_function(**d, **var_kwargs)
+
+    def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None:
+        pos_args = len(self.arg_mapping)
+
+        class CustomConfig:
+            pass
+
+        if not TYPE_CHECKING:  # pragma: no branch
+            if isinstance(config, dict):
+                CustomConfig = type('Config', (), config)  # noqa: F811
+            elif config is not None:
+                CustomConfig = config  # noqa: F811
+
+        if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'):
+            raise ConfigError(
+                'Setting the "fields" and "alias_generator" property on custom Config for '
+                '@validate_arguments is not yet supported, please remove.'
+            )
+
+        class DecoratorBaseModel(BaseModel):
+            @validator(self.v_args_name, check_fields=False, allow_reuse=True)
+            def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
+                if takes_args or v is None:
+                    return v
+
+                raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given')
+
+            @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True)
+            def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
+                if takes_kwargs or v is None:
+                    return v
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v.keys()))
+                raise TypeError(f'unexpected keyword argument{plural}: {keys}')
+
+            @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True)
+            def check_positional_only(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}')
+
+            @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True)
+            def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
+                if v is None:
+                    return
+
+                plural = '' if len(v) == 1 else 's'
+                keys = ', '.join(map(repr, v))
+                raise TypeError(f'multiple values for argument{plural}: {keys}')
+
+            class Config(CustomConfig):
+                extra = getattr(CustomConfig, 'extra', Extra.forbid)
+
+        self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields)
diff --git a/pydantic/v1/env_settings.py b/pydantic/v1/env_settings.py
index 8ef7409b0..5f6f21750 100644
--- a/pydantic/v1/env_settings.py
+++ b/pydantic/v1/env_settings.py
@@ -2,13 +2,16 @@ import os
 import warnings
 from pathlib import Path
 from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union
+
 from pydantic.v1.config import BaseConfig, Extra
 from pydantic.v1.fields import ModelField
 from pydantic.v1.main import BaseModel
 from pydantic.v1.types import JsonWrapper
 from pydantic.v1.typing import StrPath, display_as_type, get_origin, is_union
 from pydantic.v1.utils import deep_update, lenient_issubclass, path_type, sequence_like
+
 env_file_sentinel = str(object())
+
 SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]]
 DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]]

@@ -25,15 +28,56 @@ class BaseSettings(BaseModel):
     Heroku and any 12 factor app design.
     """

-    def __init__(__pydantic_self__, _env_file: Optional[DotenvType]=
-        env_file_sentinel, _env_file_encoding: Optional[str]=None,
-        _env_nested_delimiter: Optional[str]=None, _secrets_dir: Optional[
-        StrPath]=None, **values: Any) ->None:
-        super().__init__(**__pydantic_self__._build_values(values,
-            _env_file=_env_file, _env_file_encoding=_env_file_encoding,
-            _env_nested_delimiter=_env_nested_delimiter, _secrets_dir=
-            _secrets_dir))
+    def __init__(
+        __pydantic_self__,
+        _env_file: Optional[DotenvType] = env_file_sentinel,
+        _env_file_encoding: Optional[str] = None,
+        _env_nested_delimiter: Optional[str] = None,
+        _secrets_dir: Optional[StrPath] = None,
+        **values: Any,
+    ) -> None:
+        # Uses something other than `self` the first arg to allow "self" as a settable attribute
+        super().__init__(
+            **__pydantic_self__._build_values(
+                values,
+                _env_file=_env_file,
+                _env_file_encoding=_env_file_encoding,
+                _env_nested_delimiter=_env_nested_delimiter,
+                _secrets_dir=_secrets_dir,
+            )
+        )

+    def _build_values(
+        self,
+        init_kwargs: Dict[str, Any],
+        _env_file: Optional[DotenvType] = None,
+        _env_file_encoding: Optional[str] = None,
+        _env_nested_delimiter: Optional[str] = None,
+        _secrets_dir: Optional[StrPath] = None,
+    ) -> Dict[str, Any]:
+        # Configure built-in sources
+        init_settings = InitSettingsSource(init_kwargs=init_kwargs)
+        env_settings = EnvSettingsSource(
+            env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file),
+            env_file_encoding=(
+                _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding
+            ),
+            env_nested_delimiter=(
+                _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter
+            ),
+            env_prefix_len=len(self.__config__.env_prefix),
+        )
+        file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir)
+        # Provide a hook to set built-in sources priority and add / remove sources
+        sources = self.__config__.customise_sources(
+            init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings
+        )
+        if sources:
+            return deep_update(*reversed([source(self) for source in sources]))
+        else:
+            # no one should mean to do this, but I think returning an empty dict is marginally preferable
+            # to an informative error and much better than a confusing error
+            return {}

     class Config(BaseConfig):
         env_prefix: str = ''
@@ -45,146 +89,262 @@ class BaseSettings(BaseModel):
         extra: Extra = Extra.forbid
         arbitrary_types_allowed: bool = True
         case_sensitive: bool = False
+
+        @classmethod
+        def prepare_field(cls, field: ModelField) -> None:
+            env_names: Union[List[str], AbstractSet[str]]
+            field_info_from_config = cls.get_field_info(field.name)
+
+            env = field_info_from_config.get('env') or field.field_info.extra.get('env')
+            if env is None:
+                if field.has_alias:
+                    warnings.warn(
+                        'aliases are no longer used by BaseSettings to define which environment variables to read. '
+                        'Instead use the "env" field setting. '
+                        'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names',
+                        FutureWarning,
+                    )
+                env_names = {cls.env_prefix + field.name}
+            elif isinstance(env, str):
+                env_names = {env}
+            elif isinstance(env, (set, frozenset)):
+                env_names = env
+            elif sequence_like(env):
+                env_names = list(env)
+            else:
+                raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set')
+
+            if not cls.case_sensitive:
+                env_names = env_names.__class__(n.lower() for n in env_names)
+            field.field_info.extra['env_names'] = env_names
+
+        @classmethod
+        def customise_sources(
+            cls,
+            init_settings: SettingsSourceCallable,
+            env_settings: SettingsSourceCallable,
+            file_secret_settings: SettingsSourceCallable,
+        ) -> Tuple[SettingsSourceCallable, ...]:
+            return init_settings, env_settings, file_secret_settings
+
+        @classmethod
+        def parse_env_var(cls, field_name: str, raw_val: str) -> Any:
+            return cls.json_loads(raw_val)
+
+    # populated by the metaclass using the Config class defined above, annotated here to help IDEs only
     __config__: ClassVar[Type[Config]]


 class InitSettingsSource:
-    __slots__ = 'init_kwargs',
+    __slots__ = ('init_kwargs',)

     def __init__(self, init_kwargs: Dict[str, Any]):
         self.init_kwargs = init_kwargs

-    def __call__(self, settings: BaseSettings) ->Dict[str, Any]:
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
         return self.init_kwargs

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})'


 class EnvSettingsSource:
-    __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter',
-        'env_prefix_len')
+    __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len')

-    def __init__(self, env_file: Optional[DotenvType], env_file_encoding:
-        Optional[str], env_nested_delimiter: Optional[str]=None,
-        env_prefix_len: int=0):
+    def __init__(
+        self,
+        env_file: Optional[DotenvType],
+        env_file_encoding: Optional[str],
+        env_nested_delimiter: Optional[str] = None,
+        env_prefix_len: int = 0,
+    ):
         self.env_file: Optional[DotenvType] = env_file
         self.env_file_encoding: Optional[str] = env_file_encoding
         self.env_nested_delimiter: Optional[str] = env_nested_delimiter
         self.env_prefix_len: int = env_prefix_len

-    def __call__(self, settings: BaseSettings) ->Dict[str, Any]:
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:  # noqa C901
         """
         Build environment variables suitable for passing to the Model.
         """
         d: Dict[str, Any] = {}
+
         if settings.__config__.case_sensitive:
             env_vars: Mapping[str, Optional[str]] = os.environ
         else:
             env_vars = {k.lower(): v for k, v in os.environ.items()}
+
         dotenv_vars = self._read_env_files(settings.__config__.case_sensitive)
         if dotenv_vars:
             env_vars = {**dotenv_vars, **env_vars}
+
         for field in settings.__fields__.values():
             env_val: Optional[str] = None
             for env_name in field.field_info.extra['env_names']:
                 env_val = env_vars.get(env_name)
                 if env_val is not None:
                     break
+
             is_complex, allow_parse_failure = self.field_is_complex(field)
             if is_complex:
                 if env_val is None:
+                    # field is complex but no value found so far, try explode_env_vars
                     env_val_built = self.explode_env_vars(field, env_vars)
                     if env_val_built:
                         d[field.alias] = env_val_built
                 else:
+                    # field is complex and there's a value, decode that as JSON, then add explode_env_vars
                     try:
-                        env_val = settings.__config__.parse_env_var(field.
-                            name, env_val)
+                        env_val = settings.__config__.parse_env_var(field.name, env_val)
                     except ValueError as e:
                         if not allow_parse_failure:
-                            raise SettingsError(
-                                f'error parsing env var "{env_name}"') from e
+                            raise SettingsError(f'error parsing env var "{env_name}"') from e
+
                     if isinstance(env_val, dict):
-                        d[field.alias] = deep_update(env_val, self.
-                            explode_env_vars(field, env_vars))
+                        d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars))
                     else:
                         d[field.alias] = env_val
             elif env_val is not None:
+                # simplest case, field is not complex, we only need to add the value if it was found
                 d[field.alias] = env_val
+
         return d

-    def field_is_complex(self, field: ModelField) ->Tuple[bool, bool]:
+    def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]:
+        env_files = self.env_file
+        if env_files is None:
+            return {}
+
+        if isinstance(env_files, (str, os.PathLike)):
+            env_files = [env_files]
+
+        dotenv_vars = {}
+        for env_file in env_files:
+            env_path = Path(env_file).expanduser()
+            if env_path.is_file():
+                dotenv_vars.update(
+                    read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive)
+                )
+
+        return dotenv_vars
+
+    def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]:
         """
         Find out if a field is complex, and if so whether JSON errors should be ignored
         """
-        pass
+        if lenient_issubclass(field.annotation, JsonWrapper):
+            return False, False
+
+        if field.is_complex():
+            allow_parse_failure = False
+        elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields):
+            allow_parse_failure = True
+        else:
+            return False, False

-    def explode_env_vars(self, field: ModelField, env_vars: Mapping[str,
-        Optional[str]]) ->Dict[str, Any]:
+        return True, allow_parse_failure
+
+    def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]:
         """
         Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries.

         This is applied to a single field, hence filtering by env_var prefix.
         """
-        pass
+        prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']]
+        result: Dict[str, Any] = {}
+        for env_name, env_val in env_vars.items():
+            if not any(env_name.startswith(prefix) for prefix in prefixes):
+                continue
+            # we remove the prefix before splitting in case the prefix has characters in common with the delimiter
+            env_name_without_prefix = env_name[self.env_prefix_len :]
+            _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter)
+            env_var = result
+            for key in keys:
+                env_var = env_var.setdefault(key, {})
+            env_var[last_key] = env_val
+
+        return result

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return (
-            f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, env_nested_delimiter={self.env_nested_delimiter!r})'
-            )
+            f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, '
+            f'env_nested_delimiter={self.env_nested_delimiter!r})'
+        )


 class SecretsSettingsSource:
-    __slots__ = 'secrets_dir',
+    __slots__ = ('secrets_dir',)

     def __init__(self, secrets_dir: Optional[StrPath]):
         self.secrets_dir: Optional[StrPath] = secrets_dir

-    def __call__(self, settings: BaseSettings) ->Dict[str, Any]:
+    def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
         """
         Build fields from "secrets" files.
         """
         secrets: Dict[str, Optional[str]] = {}
+
         if self.secrets_dir is None:
             return secrets
+
         secrets_path = Path(self.secrets_dir).expanduser()
+
         if not secrets_path.exists():
             warnings.warn(f'directory "{secrets_path}" does not exist')
             return secrets
+
         if not secrets_path.is_dir():
-            raise SettingsError(
-                f'secrets_dir must reference a directory, not a {path_type(secrets_path)}'
-                )
+            raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}')
+
         for field in settings.__fields__.values():
             for env_name in field.field_info.extra['env_names']:
-                path = find_case_path(secrets_path, env_name, settings.
-                    __config__.case_sensitive)
+                path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive)
                 if not path:
+                    # path does not exist, we currently don't return a warning for this
                     continue
+
                 if path.is_file():
                     secret_value = path.read_text().strip()
                     if field.is_complex():
                         try:
-                            secret_value = settings.__config__.parse_env_var(
-                                field.name, secret_value)
+                            secret_value = settings.__config__.parse_env_var(field.name, secret_value)
                         except ValueError as e:
-                            raise SettingsError(
-                                f'error parsing env var "{env_name}"') from e
+                            raise SettingsError(f'error parsing env var "{env_name}"') from e
+
                     secrets[field.alias] = secret_value
                 else:
                     warnings.warn(
-                        f'attempted to load secret file "{path}" but found a {path_type(path)} instead.'
-                        , stacklevel=4)
+                        f'attempted to load secret file "{path}" but found a {path_type(path)} instead.',
+                        stacklevel=4,
+                    )
         return secrets

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})'


-def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool
-    ) ->Optional[Path]:
+def read_env_file(
+    file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False
+) -> Dict[str, Optional[str]]:
+    try:
+        from dotenv import dotenv_values
+    except ImportError as e:
+        raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e
+
+    file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8')
+    if not case_sensitive:
+        return {k.lower(): v for k, v in file_vars.items()}
+    else:
+        return file_vars
+
+
+def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]:
     """
     Find a file within path's directory matching filename, optionally ignoring case.
     """
-    pass
+    for f in dir_path.iterdir():
+        if f.name == file_name:
+            return f
+        elif not case_sensitive and f.name.lower() == file_name.lower():
+            return f
+    return None
diff --git a/pydantic/v1/error_wrappers.py b/pydantic/v1/error_wrappers.py
index 5f65f32c2..bc7f26314 100644
--- a/pydantic/v1/error_wrappers.py
+++ b/pydantic/v1/error_wrappers.py
@@ -1,56 +1,161 @@
 import json
 from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union
+
 from pydantic.v1.json import pydantic_encoder
 from pydantic.v1.utils import Representation
+
 if TYPE_CHECKING:
     from typing_extensions import TypedDict
+
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.types import ModelOrDc
     from pydantic.v1.typing import ReprArgs
-    Loc = Tuple[Union[int, str], ...]

+    Loc = Tuple[Union[int, str], ...]

     class _ErrorDictRequired(TypedDict):
         loc: Loc
         msg: str
         type: str

-
-    class ErrorDict(_ErrorDictRequired, total=(False)):
+    class ErrorDict(_ErrorDictRequired, total=False):
         ctx: Dict[str, Any]
+
+
 __all__ = 'ErrorWrapper', 'ValidationError'


 class ErrorWrapper(Representation):
     __slots__ = 'exc', '_loc'

-    def __init__(self, exc: Exception, loc: Union[str, 'Loc']) ->None:
+    def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None:
         self.exc = exc
         self._loc = loc

-    def __repr_args__(self) ->'ReprArgs':
+    def loc_tuple(self) -> 'Loc':
+        if isinstance(self._loc, tuple):
+            return self._loc
+        else:
+            return (self._loc,)
+
+    def __repr_args__(self) -> 'ReprArgs':
         return [('exc', self.exc), ('loc', self.loc_tuple())]


+# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper]
+# but recursive, therefore just use:
 ErrorList = Union[Sequence[Any], ErrorWrapper]


 class ValidationError(Representation, ValueError):
     __slots__ = 'raw_errors', 'model', '_error_cache'

-    def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') ->None:
+    def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None:
         self.raw_errors = errors
         self.model = model
         self._error_cache: Optional[List['ErrorDict']] = None

-    def __str__(self) ->str:
+    def errors(self) -> List['ErrorDict']:
+        if self._error_cache is None:
+            try:
+                config = self.model.__config__  # type: ignore
+            except AttributeError:
+                config = self.model.__pydantic_model__.__config__  # type: ignore
+            self._error_cache = list(flatten_errors(self.raw_errors, config))
+        return self._error_cache
+
+    def json(self, *, indent: Union[None, int, str] = 2) -> str:
+        return json.dumps(self.errors(), indent=indent, default=pydantic_encoder)
+
+    def __str__(self) -> str:
         errors = self.errors()
         no_errors = len(errors)
-        return f"""{no_errors} validation error{'' if no_errors == 1 else 's'} for {self.model.__name__}
-{display_errors(errors)}"""
+        return (
+            f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n'
+            f'{display_errors(errors)}'
+        )

-    def __repr_args__(self) ->'ReprArgs':
+    def __repr_args__(self) -> 'ReprArgs':
         return [('model', self.model.__name__), ('errors', self.errors())]


+def display_errors(errors: List['ErrorDict']) -> str:
+    return '\n'.join(f'{_display_error_loc(e)}\n  {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors)
+
+
+def _display_error_loc(error: 'ErrorDict') -> str:
+    return ' -> '.join(str(e) for e in error['loc'])
+
+
+def _display_error_type_and_ctx(error: 'ErrorDict') -> str:
+    t = 'type=' + error['type']
+    ctx = error.get('ctx')
+    if ctx:
+        return t + ''.join(f'; {k}={v}' for k, v in ctx.items())
+    else:
+        return t
+
+
+def flatten_errors(
+    errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None
+) -> Generator['ErrorDict', None, None]:
+    for error in errors:
+        if isinstance(error, ErrorWrapper):
+            if loc:
+                error_loc = loc + error.loc_tuple()
+            else:
+                error_loc = error.loc_tuple()
+
+            if isinstance(error.exc, ValidationError):
+                yield from flatten_errors(error.exc.raw_errors, config, error_loc)
+            else:
+                yield error_dict(error.exc, config, error_loc)
+        elif isinstance(error, list):
+            yield from flatten_errors(error, config, loc=loc)
+        else:
+            raise RuntimeError(f'Unknown error object: {error}')
+
+
+def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict':
+    type_ = get_exc_type(exc.__class__)
+    msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None)
+    ctx = exc.__dict__
+    if msg_template:
+        msg = msg_template.format(**ctx)
+    else:
+        msg = str(exc)
+
+    d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_}
+
+    if ctx:
+        d['ctx'] = ctx
+
+    return d
+
+
 _EXC_TYPE_CACHE: Dict[Type[Exception], str] = {}
+
+
+def get_exc_type(cls: Type[Exception]) -> str:
+    # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up
+    try:
+        return _EXC_TYPE_CACHE[cls]
+    except KeyError:
+        r = _get_exc_type(cls)
+        _EXC_TYPE_CACHE[cls] = r
+        return r
+
+
+def _get_exc_type(cls: Type[Exception]) -> str:
+    if issubclass(cls, AssertionError):
+        return 'assertion_error'
+
+    base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error'
+    if cls in (TypeError, ValueError):
+        # just TypeError or ValueError, no extra code
+        return base_name
+
+    # if it's not a TypeError or ValueError, we just take the lowercase of the exception name
+    # no chaining or snake case logic, use "code" for more complex error types.
+    code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower()
+    return base_name + '.' + code
diff --git a/pydantic/v1/errors.py b/pydantic/v1/errors.py
index 20d3509bc..6e8644258 100644
--- a/pydantic/v1/errors.py
+++ b/pydantic/v1/errors.py
@@ -1,41 +1,111 @@
 from decimal import Decimal
 from pathlib import Path
 from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union
+
 from pydantic.v1.typing import display_as_type
+
 if TYPE_CHECKING:
     from pydantic.v1.typing import DictStrAny
-__all__ = ('PydanticTypeError', 'PydanticValueError', 'ConfigError',
-    'MissingError', 'ExtraError', 'NoneIsNotAllowedError',
-    'NoneIsAllowedError', 'WrongConstantError', 'NotNoneError', 'BoolError',
-    'BytesError', 'DictError', 'EmailError', 'UrlError', 'UrlSchemeError',
-    'UrlSchemePermittedError', 'UrlUserInfoError', 'UrlHostError',
-    'UrlHostTldError', 'UrlPortError', 'UrlExtraError', 'EnumError',
-    'IntEnumError', 'EnumMemberError', 'IntegerError', 'FloatError',
-    'PathError', 'PathNotExistsError', 'PathNotAFileError',
-    'PathNotADirectoryError', 'PyObjectError', 'SequenceError', 'ListError',
-    'SetError', 'FrozenSetError', 'TupleError', 'TupleLengthError',
-    'ListMinLengthError', 'ListMaxLengthError', 'ListUniqueItemsError',
-    'SetMinLengthError', 'SetMaxLengthError', 'FrozenSetMinLengthError',
-    'FrozenSetMaxLengthError', 'AnyStrMinLengthError',
-    'AnyStrMaxLengthError', 'StrError', 'StrRegexError', 'NumberNotGtError',
-    'NumberNotGeError', 'NumberNotLtError', 'NumberNotLeError',
-    'NumberNotMultipleError', 'DecimalError', 'DecimalIsNotFiniteError',
-    'DecimalMaxDigitsError', 'DecimalMaxPlacesError',
-    'DecimalWholeDigitsError', 'DateTimeError', 'DateError',
-    'DateNotInThePastError', 'DateNotInTheFutureError', 'TimeError',
-    'DurationError', 'HashableError', 'UUIDError', 'UUIDVersionError',
-    'ArbitraryTypeError', 'ClassError', 'SubclassError', 'JsonError',
-    'JsonTypeError', 'PatternError', 'DataclassTypeError', 'CallableError',
-    'IPvAnyAddressError', 'IPvAnyInterfaceError', 'IPvAnyNetworkError',
-    'IPv4AddressError', 'IPv6AddressError', 'IPv4NetworkError',
-    'IPv6NetworkError', 'IPv4InterfaceError', 'IPv6InterfaceError',
-    'ColorError', 'StrictBoolError', 'NotDigitError', 'LuhnValidationError',
-    'InvalidLengthForBrand', 'InvalidByteSize', 'InvalidByteSizeUnit',
-    'MissingDiscriminator', 'InvalidDiscriminator')
-
-
-def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny'
-    ) ->'PydanticErrorMixin':
+
+# explicitly state exports to avoid "from pydantic.v1.errors import *" also importing Decimal, Path etc.
+__all__ = (
+    'PydanticTypeError',
+    'PydanticValueError',
+    'ConfigError',
+    'MissingError',
+    'ExtraError',
+    'NoneIsNotAllowedError',
+    'NoneIsAllowedError',
+    'WrongConstantError',
+    'NotNoneError',
+    'BoolError',
+    'BytesError',
+    'DictError',
+    'EmailError',
+    'UrlError',
+    'UrlSchemeError',
+    'UrlSchemePermittedError',
+    'UrlUserInfoError',
+    'UrlHostError',
+    'UrlHostTldError',
+    'UrlPortError',
+    'UrlExtraError',
+    'EnumError',
+    'IntEnumError',
+    'EnumMemberError',
+    'IntegerError',
+    'FloatError',
+    'PathError',
+    'PathNotExistsError',
+    'PathNotAFileError',
+    'PathNotADirectoryError',
+    'PyObjectError',
+    'SequenceError',
+    'ListError',
+    'SetError',
+    'FrozenSetError',
+    'TupleError',
+    'TupleLengthError',
+    'ListMinLengthError',
+    'ListMaxLengthError',
+    'ListUniqueItemsError',
+    'SetMinLengthError',
+    'SetMaxLengthError',
+    'FrozenSetMinLengthError',
+    'FrozenSetMaxLengthError',
+    'AnyStrMinLengthError',
+    'AnyStrMaxLengthError',
+    'StrError',
+    'StrRegexError',
+    'NumberNotGtError',
+    'NumberNotGeError',
+    'NumberNotLtError',
+    'NumberNotLeError',
+    'NumberNotMultipleError',
+    'DecimalError',
+    'DecimalIsNotFiniteError',
+    'DecimalMaxDigitsError',
+    'DecimalMaxPlacesError',
+    'DecimalWholeDigitsError',
+    'DateTimeError',
+    'DateError',
+    'DateNotInThePastError',
+    'DateNotInTheFutureError',
+    'TimeError',
+    'DurationError',
+    'HashableError',
+    'UUIDError',
+    'UUIDVersionError',
+    'ArbitraryTypeError',
+    'ClassError',
+    'SubclassError',
+    'JsonError',
+    'JsonTypeError',
+    'PatternError',
+    'DataclassTypeError',
+    'CallableError',
+    'IPvAnyAddressError',
+    'IPvAnyInterfaceError',
+    'IPvAnyNetworkError',
+    'IPv4AddressError',
+    'IPv6AddressError',
+    'IPv4NetworkError',
+    'IPv6NetworkError',
+    'IPv4InterfaceError',
+    'IPv6InterfaceError',
+    'ColorError',
+    'StrictBoolError',
+    'NotDigitError',
+    'LuhnValidationError',
+    'InvalidLengthForBrand',
+    'InvalidByteSize',
+    'InvalidByteSizeUnit',
+    'MissingDiscriminator',
+    'InvalidDiscriminator',
+)
+
+
+def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin':
     """
     For built-in exceptions like ValueError or TypeError, we need to implement
     __reduce__ to override the default behaviour (instead of __getstate__/__setstate__)
@@ -43,21 +113,20 @@ def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny'
     Since we only use kwargs, we need a little constructor to change that.
     Note: the callable can't be a lambda as pickle looks in the namespace to find it
     """
-    pass
+    return cls(**ctx)


 class PydanticErrorMixin:
     code: str
     msg_template: str

-    def __init__(self, **ctx: Any) ->None:
+    def __init__(self, **ctx: Any) -> None:
         self.__dict__ = ctx

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.msg_template.format(**self.__dict__)

-    def __reduce__(self) ->Tuple[Callable[..., 'PydanticErrorMixin'], Tuple
-        [Type['PydanticErrorMixin'], 'DictStrAny']]:
+    def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]:
         return cls_kwargs, (self.__class__, self.__dict__)


@@ -94,8 +163,8 @@ class NoneIsAllowedError(PydanticTypeError):
 class WrongConstantError(PydanticValueError):
     code = 'const'

-    def __str__(self) ->str:
-        permitted = ', '.join(repr(v) for v in self.permitted)
+    def __str__(self) -> str:
+        permitted = ', '.join(repr(v) for v in self.permitted)  # type: ignore
         return f'unexpected value; permitted: {permitted}'


@@ -159,17 +228,15 @@ class UrlPortError(UrlError):

 class UrlExtraError(UrlError):
     code = 'url.extra'
-    msg_template = (
-        'URL invalid, extra characters found after valid URL: {extra!r}')
+    msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}'


 class EnumMemberError(PydanticTypeError):
     code = 'enum'

-    def __str__(self) ->str:
-        permitted = ', '.join(repr(v.value) for v in self.enum_values)
-        return (
-            f'value is not a valid enumeration member; permitted: {permitted}')
+    def __str__(self) -> str:
+        permitted = ', '.join(repr(v.value) for v in self.enum_values)  # type: ignore
+        return f'value is not a valid enumeration member; permitted: {permitted}'


 class IntegerError(PydanticTypeError):
@@ -185,8 +252,7 @@ class PathError(PydanticTypeError):


 class _PathValueError(PydanticValueError):
-
-    def __init__(self, *, path: Path) ->None:
+    def __init__(self, *, path: Path) -> None:
         super().__init__(path=str(path))


@@ -206,9 +272,7 @@ class PathNotADirectoryError(_PathValueError):


 class PyObjectError(PydanticTypeError):
-    msg_template = (
-        'ensure this value contains valid import path or valid callable: {error_message}'
-        )
+    msg_template = 'ensure this value contains valid import path or valid callable: {error_message}'


 class SequenceError(PydanticTypeError):
@@ -241,19 +305,17 @@ class TupleError(PydanticTypeError):

 class TupleLengthError(PydanticValueError):
     code = 'tuple.length'
-    msg_template = (
-        'wrong tuple length {actual_length}, expected {expected_length}')
+    msg_template = 'wrong tuple length {actual_length}, expected {expected_length}'

-    def __init__(self, *, actual_length: int, expected_length: int) ->None:
-        super().__init__(actual_length=actual_length, expected_length=
-            expected_length)
+    def __init__(self, *, actual_length: int, expected_length: int) -> None:
+        super().__init__(actual_length=actual_length, expected_length=expected_length)


 class ListMinLengthError(PydanticValueError):
     code = 'list.min_items'
     msg_template = 'ensure this value has at least {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -261,7 +323,7 @@ class ListMaxLengthError(PydanticValueError):
     code = 'list.max_items'
     msg_template = 'ensure this value has at most {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -274,7 +336,7 @@ class SetMinLengthError(PydanticValueError):
     code = 'set.min_items'
     msg_template = 'ensure this value has at least {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -282,7 +344,7 @@ class SetMaxLengthError(PydanticValueError):
     code = 'set.max_items'
     msg_template = 'ensure this value has at most {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -290,7 +352,7 @@ class FrozenSetMinLengthError(PydanticValueError):
     code = 'frozenset.min_items'
     msg_template = 'ensure this value has at least {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -298,7 +360,7 @@ class FrozenSetMaxLengthError(PydanticValueError):
     code = 'frozenset.max_items'
     msg_template = 'ensure this value has at most {limit_value} items'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -306,7 +368,7 @@ class AnyStrMinLengthError(PydanticValueError):
     code = 'any_str.min_length'
     msg_template = 'ensure this value has at least {limit_value} characters'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -314,7 +376,7 @@ class AnyStrMaxLengthError(PydanticValueError):
     code = 'any_str.max_length'
     msg_template = 'ensure this value has at most {limit_value} characters'

-    def __init__(self, *, limit_value: int) ->None:
+    def __init__(self, *, limit_value: int) -> None:
         super().__init__(limit_value=limit_value)


@@ -326,13 +388,12 @@ class StrRegexError(PydanticValueError):
     code = 'str.regex'
     msg_template = 'string does not match regex "{pattern}"'

-    def __init__(self, *, pattern: str) ->None:
+    def __init__(self, *, pattern: str) -> None:
         super().__init__(pattern=pattern)


 class _NumberBoundError(PydanticValueError):
-
-    def __init__(self, *, limit_value: Union[int, float, Decimal]) ->None:
+    def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None:
         super().__init__(limit_value=limit_value)


@@ -343,8 +404,7 @@ class NumberNotGtError(_NumberBoundError):

 class NumberNotGeError(_NumberBoundError):
     code = 'number.not_ge'
-    msg_template = (
-        'ensure this value is greater than or equal to {limit_value}')
+    msg_template = 'ensure this value is greater than or equal to {limit_value}'


 class NumberNotLtError(_NumberBoundError):
@@ -366,7 +426,7 @@ class NumberNotMultipleError(PydanticValueError):
     code = 'number.not_multiple'
     msg_template = 'ensure this value is a multiple of {multiple_of}'

-    def __init__(self, *, multiple_of: Union[int, float, Decimal]) ->None:
+    def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None:
         super().__init__(multiple_of=multiple_of)


@@ -381,29 +441,25 @@ class DecimalIsNotFiniteError(PydanticValueError):

 class DecimalMaxDigitsError(PydanticValueError):
     code = 'decimal.max_digits'
-    msg_template = (
-        'ensure that there are no more than {max_digits} digits in total')
+    msg_template = 'ensure that there are no more than {max_digits} digits in total'

-    def __init__(self, *, max_digits: int) ->None:
+    def __init__(self, *, max_digits: int) -> None:
         super().__init__(max_digits=max_digits)


 class DecimalMaxPlacesError(PydanticValueError):
     code = 'decimal.max_places'
-    msg_template = (
-        'ensure that there are no more than {decimal_places} decimal places')
+    msg_template = 'ensure that there are no more than {decimal_places} decimal places'

-    def __init__(self, *, decimal_places: int) ->None:
+    def __init__(self, *, decimal_places: int) -> None:
         super().__init__(decimal_places=decimal_places)


 class DecimalWholeDigitsError(PydanticValueError):
     code = 'decimal.whole_digits'
-    msg_template = (
-        'ensure that there are no more than {whole_digits} digits before the decimal point'
-        )
+    msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point'

-    def __init__(self, *, whole_digits: int) ->None:
+    def __init__(self, *, whole_digits: int) -> None:
         super().__init__(whole_digits=whole_digits)


@@ -445,7 +501,7 @@ class UUIDVersionError(PydanticValueError):
     code = 'uuid.version'
     msg_template = 'uuid version {required_version} expected'

-    def __init__(self, *, required_version: int) ->None:
+    def __init__(self, *, required_version: int) -> None:
         super().__init__(required_version=required_version)


@@ -453,9 +509,8 @@ class ArbitraryTypeError(PydanticTypeError):
     code = 'arbitrary_type'
     msg_template = 'instance of {expected_arbitrary_type} expected'

-    def __init__(self, *, expected_arbitrary_type: Type[Any]) ->None:
-        super().__init__(expected_arbitrary_type=display_as_type(
-            expected_arbitrary_type))
+    def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None:
+        super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type))


 class ClassError(PydanticTypeError):
@@ -467,7 +522,7 @@ class SubclassError(PydanticTypeError):
     code = 'subclass'
     msg_template = 'subclass of {expected_class} expected'

-    def __init__(self, *, expected_class: Type[Any]) ->None:
+    def __init__(self, *, expected_class: Type[Any]) -> None:
         super().__init__(expected_class=display_as_type(expected_class))


@@ -579,11 +634,13 @@ class MissingDiscriminator(PydanticValueError):
 class InvalidDiscriminator(PydanticValueError):
     code = 'discriminated_union.invalid_discriminator'
     msg_template = (
-        'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} (allowed values: {allowed_values})'
+        'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} '
+        '(allowed values: {allowed_values})'
+    )
+
+    def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None:
+        super().__init__(
+            discriminator_key=discriminator_key,
+            discriminator_value=discriminator_value,
+            allowed_values=', '.join(map(repr, allowed_values)),
         )
-
-    def __init__(self, *, discriminator_key: str, discriminator_value: Any,
-        allowed_values: Sequence[Any]) ->None:
-        super().__init__(discriminator_key=discriminator_key,
-            discriminator_value=discriminator_value, allowed_values=', '.
-            join(map(repr, allowed_values)))
diff --git a/pydantic/v1/fields.py b/pydantic/v1/fields.py
index 5364c3533..002b60cde 100644
--- a/pydantic/v1/fields.py
+++ b/pydantic/v1/fields.py
@@ -2,42 +2,93 @@ import copy
 import re
 from collections import Counter as CollectionCounter, defaultdict, deque
 from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable
-from typing import TYPE_CHECKING, Any, Counter, DefaultDict, Deque, Dict, ForwardRef, FrozenSet, Generator, Iterable, Iterator, List, Mapping, Optional, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Counter,
+    DefaultDict,
+    Deque,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Pattern,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+
 from typing_extensions import Annotated, Final
+
 from pydantic.v1 import errors as errors_
 from pydantic.v1.class_validators import Validator, make_generic_validator, prep_validators
 from pydantic.v1.error_wrappers import ErrorWrapper
 from pydantic.v1.errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError
 from pydantic.v1.types import Json, JsonWrapper
-from pydantic.v1.typing import NoArgAnyCallable, convert_generics, display_as_type, get_args, get_origin, is_finalvar, is_literal_type, is_new_type, is_none_type, is_typeddict, is_typeddict_special, is_union, new_type_supertype
-from pydantic.v1.utils import PyObjectStr, Representation, ValueItems, get_discriminator_alias_and_values, get_unique_discriminator_alias, lenient_isinstance, lenient_issubclass, sequence_like, smart_deepcopy
+from pydantic.v1.typing import (
+    NoArgAnyCallable,
+    convert_generics,
+    display_as_type,
+    get_args,
+    get_origin,
+    is_finalvar,
+    is_literal_type,
+    is_new_type,
+    is_none_type,
+    is_typeddict,
+    is_typeddict_special,
+    is_union,
+    new_type_supertype,
+)
+from pydantic.v1.utils import (
+    PyObjectStr,
+    Representation,
+    ValueItems,
+    get_discriminator_alias_and_values,
+    get_unique_discriminator_alias,
+    lenient_isinstance,
+    lenient_issubclass,
+    sequence_like,
+    smart_deepcopy,
+)
 from pydantic.v1.validators import constant_validator, dict_validator, find_validators, validate_json
+
 Required: Any = Ellipsis
+
 T = TypeVar('T')


 class UndefinedType:
-
-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return 'PydanticUndefined'

-    def __copy__(self: T) ->T:
+    def __copy__(self: T) -> T:
         return self

-    def __reduce__(self) ->str:
+    def __reduce__(self) -> str:
         return 'Undefined'

-    def __deepcopy__(self: T, _: Any) ->T:
+    def __deepcopy__(self: T, _: Any) -> T:
         return self


 Undefined = UndefinedType()
+
 if TYPE_CHECKING:
     from pydantic.v1.class_validators import ValidatorsList
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.error_wrappers import ErrorList
     from pydantic.v1.types import ModelOrDc
     from pydantic.v1.typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs
+
     ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]]
     LocStr = Union[Tuple[Union[int, str], ...], str]
     BoolUndefined = Union[bool, UndefinedType]
@@ -47,24 +98,61 @@ class FieldInfo(Representation):
     """
     Captures extra information about a field.
     """
-    __slots__ = ('default', 'default_factory', 'alias', 'alias_priority',
-        'title', 'description', 'exclude', 'include', 'const', 'gt', 'ge',
-        'lt', 'le', 'multiple_of', 'allow_inf_nan', 'max_digits',
-        'decimal_places', 'min_items', 'max_items', 'unique_items',
-        'min_length', 'max_length', 'allow_mutation', 'repr', 'regex',
-        'discriminator', 'extra')
-    __field_constraints__ = {'min_length': None, 'max_length': None,
-        'regex': None, 'gt': None, 'lt': None, 'ge': None, 'le': None,
-        'multiple_of': None, 'allow_inf_nan': None, 'max_digits': None,
-        'decimal_places': None, 'min_items': None, 'max_items': None,
-        'unique_items': None, 'allow_mutation': True}
-
-    def __init__(self, default: Any=Undefined, **kwargs: Any) ->None:
+
+    __slots__ = (
+        'default',
+        'default_factory',
+        'alias',
+        'alias_priority',
+        'title',
+        'description',
+        'exclude',
+        'include',
+        'const',
+        'gt',
+        'ge',
+        'lt',
+        'le',
+        'multiple_of',
+        'allow_inf_nan',
+        'max_digits',
+        'decimal_places',
+        'min_items',
+        'max_items',
+        'unique_items',
+        'min_length',
+        'max_length',
+        'allow_mutation',
+        'repr',
+        'regex',
+        'discriminator',
+        'extra',
+    )
+
+    # field constraints with the default value, it's also used in update_from_config below
+    __field_constraints__ = {
+        'min_length': None,
+        'max_length': None,
+        'regex': None,
+        'gt': None,
+        'lt': None,
+        'ge': None,
+        'le': None,
+        'multiple_of': None,
+        'allow_inf_nan': None,
+        'max_digits': None,
+        'decimal_places': None,
+        'min_items': None,
+        'max_items': None,
+        'unique_items': None,
+        'allow_mutation': True,
+    }
+
+    def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
         self.default = default
         self.default_factory = kwargs.pop('default_factory', None)
         self.alias = kwargs.pop('alias', None)
-        self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias
-             is not None else None)
+        self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None)
         self.title = kwargs.pop('title', None)
         self.description = kwargs.pop('description', None)
         self.exclude = kwargs.pop('exclude', None)
@@ -89,42 +177,76 @@ class FieldInfo(Representation):
         self.repr = kwargs.pop('repr', True)
         self.extra = kwargs

-    def __repr_args__(self) ->'ReprArgs':
-        field_defaults_to_hide: Dict[str, Any] = {'repr': True, **self.
-            __field_constraints__}
+    def __repr_args__(self) -> 'ReprArgs':
+        field_defaults_to_hide: Dict[str, Any] = {
+            'repr': True,
+            **self.__field_constraints__,
+        }
+
         attrs = ((s, getattr(self, s)) for s in self.__slots__)
-        return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get
-            (a, None)]
+        return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)]

-    def get_constraints(self) ->Set[str]:
+    def get_constraints(self) -> Set[str]:
         """
         Gets the constraints set on the field by comparing the constraint value with its default value

         :return: the constraints set on field_info
         """
-        pass
+        return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default}

-    def update_from_config(self, from_config: Dict[str, Any]) ->None:
+    def update_from_config(self, from_config: Dict[str, Any]) -> None:
         """
         Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated.
         """
-        pass
-
-
-def Field(default: Any=Undefined, *, default_factory: Optional[
-    NoArgAnyCallable]=None, alias: Optional[str]=None, title: Optional[str]
-    =None, description: Optional[str]=None, exclude: Optional[Union[
-    'AbstractSetIntStr', 'MappingIntStrAny', Any]]=None, include: Optional[
-    Union['AbstractSetIntStr', 'MappingIntStrAny', Any]]=None, const:
-    Optional[bool]=None, gt: Optional[float]=None, ge: Optional[float]=None,
-    lt: Optional[float]=None, le: Optional[float]=None, multiple_of:
-    Optional[float]=None, allow_inf_nan: Optional[bool]=None, max_digits:
-    Optional[int]=None, decimal_places: Optional[int]=None, min_items:
-    Optional[int]=None, max_items: Optional[int]=None, unique_items:
-    Optional[bool]=None, min_length: Optional[int]=None, max_length:
-    Optional[int]=None, allow_mutation: bool=True, regex: Optional[str]=
-    None, discriminator: Optional[str]=None, repr: bool=True, **extra: Any
-    ) ->Any:
+        for attr_name, value in from_config.items():
+            try:
+                current_value = getattr(self, attr_name)
+            except AttributeError:
+                # attr_name is not an attribute of FieldInfo, it should therefore be added to extra
+                # (except if extra already has this value!)
+                self.extra.setdefault(attr_name, value)
+            else:
+                if current_value is self.__field_constraints__.get(attr_name, None):
+                    setattr(self, attr_name, value)
+                elif attr_name == 'exclude':
+                    self.exclude = ValueItems.merge(value, current_value)
+                elif attr_name == 'include':
+                    self.include = ValueItems.merge(value, current_value, intersect=True)
+
+    def _validate(self) -> None:
+        if self.default is not Undefined and self.default_factory is not None:
+            raise ValueError('cannot specify both default and default_factory')
+
+
+def Field(
+    default: Any = Undefined,
+    *,
+    default_factory: Optional[NoArgAnyCallable] = None,
+    alias: Optional[str] = None,
+    title: Optional[str] = None,
+    description: Optional[str] = None,
+    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+    const: Optional[bool] = None,
+    gt: Optional[float] = None,
+    ge: Optional[float] = None,
+    lt: Optional[float] = None,
+    le: Optional[float] = None,
+    multiple_of: Optional[float] = None,
+    allow_inf_nan: Optional[bool] = None,
+    max_digits: Optional[int] = None,
+    decimal_places: Optional[int] = None,
+    min_items: Optional[int] = None,
+    max_items: Optional[int] = None,
+    unique_items: Optional[bool] = None,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    allow_mutation: bool = True,
+    regex: Optional[str] = None,
+    discriminator: Optional[str] = None,
+    repr: bool = True,
+    **extra: Any,
+) -> Any:
     """
     Used to provide extra information about a field, either for the model schema or complex validation. Some arguments
     apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``.
@@ -176,9 +298,39 @@ def Field(default: Any=Undefined, *, default_factory: Optional[
     :param repr: show this field in the representation
     :param **extra: any additional keyword arguments will be added as is to the schema
     """
-    pass
+    field_info = FieldInfo(
+        default,
+        default_factory=default_factory,
+        alias=alias,
+        title=title,
+        description=description,
+        exclude=exclude,
+        include=include,
+        const=const,
+        gt=gt,
+        ge=ge,
+        lt=lt,
+        le=le,
+        multiple_of=multiple_of,
+        allow_inf_nan=allow_inf_nan,
+        max_digits=max_digits,
+        decimal_places=decimal_places,
+        min_items=min_items,
+        max_items=max_items,
+        unique_items=unique_items,
+        min_length=min_length,
+        max_length=max_length,
+        allow_mutation=allow_mutation,
+        regex=regex,
+        discriminator=discriminator,
+        repr=repr,
+        **extra,
+    )
+    field_info._validate()
+    return field_info


+# used to be an enum but changed to int's for small performance improvement as less access overhead
 SHAPE_SINGLETON = 1
 SHAPE_LIST = 2
 SHAPE_SET = 3
@@ -193,28 +345,65 @@ SHAPE_DEQUE = 11
 SHAPE_DICT = 12
 SHAPE_DEFAULTDICT = 13
 SHAPE_COUNTER = 14
-SHAPE_NAME_LOOKUP = {SHAPE_LIST: 'List[{}]', SHAPE_SET: 'Set[{}]',
-    SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', SHAPE_SEQUENCE: 'Sequence[{}]',
-    SHAPE_FROZENSET: 'FrozenSet[{}]', SHAPE_ITERABLE: 'Iterable[{}]',
-    SHAPE_DEQUE: 'Deque[{}]', SHAPE_DICT: 'Dict[{}]', SHAPE_DEFAULTDICT:
-    'DefaultDict[{}]', SHAPE_COUNTER: 'Counter[{}]'}
-MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT,
-    SHAPE_MAPPING, SHAPE_COUNTER}
+SHAPE_NAME_LOOKUP = {
+    SHAPE_LIST: 'List[{}]',
+    SHAPE_SET: 'Set[{}]',
+    SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]',
+    SHAPE_SEQUENCE: 'Sequence[{}]',
+    SHAPE_FROZENSET: 'FrozenSet[{}]',
+    SHAPE_ITERABLE: 'Iterable[{}]',
+    SHAPE_DEQUE: 'Deque[{}]',
+    SHAPE_DICT: 'Dict[{}]',
+    SHAPE_DEFAULTDICT: 'DefaultDict[{}]',
+    SHAPE_COUNTER: 'Counter[{}]',
+}
+
+MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER}


 class ModelField(Representation):
-    __slots__ = ('type_', 'outer_type_', 'annotation', 'sub_fields',
-        'sub_fields_mapping', 'key_field', 'validators', 'pre_validators',
-        'post_validators', 'default', 'default_factory', 'required',
-        'final', 'model_config', 'name', 'alias', 'has_alias', 'field_info',
-        'discriminator_key', 'discriminator_alias', 'validate_always',
-        'allow_none', 'shape', 'class_validators', 'parse_json')
-
-    def __init__(self, *, name: str, type_: Type[Any], class_validators:
-        Optional[Dict[str, Validator]], model_config: Type['BaseConfig'],
-        default: Any=None, default_factory: Optional[NoArgAnyCallable]=None,
-        required: 'BoolUndefined'=Undefined, final: bool=False, alias:
-        Optional[str]=None, field_info: Optional[FieldInfo]=None) ->None:
+    __slots__ = (
+        'type_',
+        'outer_type_',
+        'annotation',
+        'sub_fields',
+        'sub_fields_mapping',
+        'key_field',
+        'validators',
+        'pre_validators',
+        'post_validators',
+        'default',
+        'default_factory',
+        'required',
+        'final',
+        'model_config',
+        'name',
+        'alias',
+        'has_alias',
+        'field_info',
+        'discriminator_key',
+        'discriminator_alias',
+        'validate_always',
+        'allow_none',
+        'shape',
+        'class_validators',
+        'parse_json',
+    )
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        type_: Type[Any],
+        class_validators: Optional[Dict[str, Validator]],
+        model_config: Type['BaseConfig'],
+        default: Any = None,
+        default_factory: Optional[NoArgAnyCallable] = None,
+        required: 'BoolUndefined' = Undefined,
+        final: bool = False,
+        alias: Optional[str] = None,
+        field_info: Optional[FieldInfo] = None,
+    ) -> None:
         self.name: str = name
         self.has_alias: bool = alias is not None
         self.alias: str = alias if alias is not None else name
@@ -230,10 +419,11 @@ class ModelField(Representation):
         self.field_info: FieldInfo = field_info or FieldInfo(default)
         self.discriminator_key: Optional[str] = self.field_info.discriminator
         self.discriminator_alias: Optional[str] = self.discriminator_key
+
         self.allow_none: bool = False
         self.validate_always: bool = False
         self.sub_fields: Optional[List[ModelField]] = None
-        self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None
+        self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None  # used for discriminated union
         self.key_field: Optional[ModelField] = None
         self.validators: 'ValidatorsList' = []
         self.pre_validators: Optional['ValidatorsList'] = None
@@ -243,9 +433,13 @@ class ModelField(Representation):
         self.model_config.prepare_field(self)
         self.prepare()

+    def get_default(self) -> Any:
+        return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
+
     @staticmethod
-    def _get_field_info(field_name: str, annotation: Any, value: Any,
-        config: Type['BaseConfig']) ->Tuple[FieldInfo, Any]:
+    def _get_field_info(
+        field_name: str, annotation: Any, value: Any, config: Type['BaseConfig']
+    ) -> Tuple[FieldInfo, Any]:
         """
         Get a FieldInfo from a root typing.Annotated annotation, value, or config default.

@@ -258,99 +452,777 @@ class ModelField(Representation):
         :param config: the model's config object
         :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config.
         """
-        pass
+        field_info_from_config = config.get_field_info(field_name)
+
+        field_info = None
+        if get_origin(annotation) is Annotated:
+            field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)]
+            if len(field_infos) > 1:
+                raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}')
+            field_info = next(iter(field_infos), None)
+            if field_info is not None:
+                field_info = copy.copy(field_info)
+                field_info.update_from_config(field_info_from_config)
+                if field_info.default not in (Undefined, Required):
+                    raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}')
+                if value is not Undefined and value is not Required:
+                    # check also `Required` because of `validate_arguments` that sets `...` as default value
+                    field_info.default = value
+
+        if isinstance(value, FieldInfo):
+            if field_info is not None:
+                raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}')
+            field_info = value
+            field_info.update_from_config(field_info_from_config)
+        elif field_info is None:
+            field_info = FieldInfo(value, **field_info_from_config)
+        value = None if field_info.default_factory is not None else field_info.default
+        field_info._validate()
+        return field_info, value

-    def prepare(self) ->None:
+    @classmethod
+    def infer(
+        cls,
+        *,
+        name: str,
+        value: Any,
+        annotation: Any,
+        class_validators: Optional[Dict[str, Validator]],
+        config: Type['BaseConfig'],
+    ) -> 'ModelField':
+        from pydantic.v1.schema import get_annotation_from_field_info
+
+        field_info, value = cls._get_field_info(name, annotation, value, config)
+        required: 'BoolUndefined' = Undefined
+        if value is Required:
+            required = True
+            value = None
+        elif value is not Undefined:
+            required = False
+        annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment)
+
+        return cls(
+            name=name,
+            type_=annotation,
+            alias=field_info.alias,
+            class_validators=class_validators,
+            default=value,
+            default_factory=field_info.default_factory,
+            required=required,
+            model_config=config,
+            field_info=field_info,
+        )
+
+    def set_config(self, config: Type['BaseConfig']) -> None:
+        self.model_config = config
+        info_from_config = config.get_field_info(self.name)
+        config.prepare_field(self)
+        new_alias = info_from_config.get('alias')
+        new_alias_priority = info_from_config.get('alias_priority') or 0
+        if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0):
+            self.field_info.alias = new_alias
+            self.field_info.alias_priority = new_alias_priority
+            self.alias = new_alias
+        new_exclude = info_from_config.get('exclude')
+        if new_exclude is not None:
+            self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude)
+        new_include = info_from_config.get('include')
+        if new_include is not None:
+            self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True)
+
+    @property
+    def alt_alias(self) -> bool:
+        return self.name != self.alias
+
+    def prepare(self) -> None:
         """
         Prepare the field but inspecting self.default, self.type_ etc.

         Note: this method is **not** idempotent (because _type_analysis is not idempotent),
         e.g. calling it it multiple times may modify the field and configure it incorrectly.
         """
-        pass
+        self._set_default_and_type()
+        if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType:
+            # self.type_ is currently a ForwardRef and there's nothing we can do now,
+            # user will need to call model.update_forward_refs()
+            return
+
+        self._type_analysis()
+        if self.required is Undefined:
+            self.required = True
+        if self.default is Undefined and self.default_factory is None:
+            self.default = None
+        self.populate_validators()

-    def _set_default_and_type(self) ->None:
+    def _set_default_and_type(self) -> None:
         """
         Set the default value, infer the type if needed and check if `None` value is valid.
         """
-        pass
+        if self.default_factory is not None:
+            if self.type_ is Undefined:
+                raise errors_.ConfigError(
+                    f'you need to set the type of field {self.name!r} when using `default_factory`'
+                )
+            return

-    def prepare_discriminated_union_sub_fields(self) ->None:
+        default_value = self.get_default()
+
+        if default_value is not None and self.type_ is Undefined:
+            self.type_ = default_value.__class__
+            self.outer_type_ = self.type_
+            self.annotation = self.type_
+
+        if self.type_ is Undefined:
+            raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"')
+
+        if self.required is False and default_value is None:
+            self.allow_none = True
+
+    def _type_analysis(self) -> None:  # noqa: C901 (ignore complexity)
+        # typing interface is horrible, we have to do some ugly checks
+        if lenient_issubclass(self.type_, JsonWrapper):
+            self.type_ = self.type_.inner_type
+            self.parse_json = True
+        elif lenient_issubclass(self.type_, Json):
+            self.type_ = Any
+            self.parse_json = True
+        elif isinstance(self.type_, TypeVar):
+            if self.type_.__bound__:
+                self.type_ = self.type_.__bound__
+            elif self.type_.__constraints__:
+                self.type_ = Union[self.type_.__constraints__]
+            else:
+                self.type_ = Any
+        elif is_new_type(self.type_):
+            self.type_ = new_type_supertype(self.type_)
+
+        if self.type_ is Any or self.type_ is object:
+            if self.required is Undefined:
+                self.required = False
+            self.allow_none = True
+            return
+        elif self.type_ is Pattern or self.type_ is re.Pattern:
+            # python 3.7 only, Pattern is a typing object but without sub fields
+            return
+        elif is_literal_type(self.type_):
+            return
+        elif is_typeddict(self.type_):
+            return
+
+        if is_finalvar(self.type_):
+            self.final = True
+
+            if self.type_ is Final:
+                self.type_ = Any
+            else:
+                self.type_ = get_args(self.type_)[0]
+
+            self._type_analysis()
+            return
+
+        origin = get_origin(self.type_)
+
+        if origin is Annotated or is_typeddict_special(origin):
+            self.type_ = get_args(self.type_)[0]
+            self._type_analysis()
+            return
+
+        if self.discriminator_key is not None and not is_union(origin):
+            raise TypeError('`discriminator` can only be used with `Union` type with more than one variant')
+
+        # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None`
+        if origin is None or origin is CollectionsHashable:
+            # field is not "typing" object eg. Union, Dict, List etc.
+            # allow None for virtual superclasses of NoneType, e.g. Hashable
+            if isinstance(self.type_, type) and isinstance(None, self.type_):
+                self.allow_none = True
+            return
+        elif origin is Callable:
+            return
+        elif is_union(origin):
+            types_ = []
+            for type_ in get_args(self.type_):
+                if is_none_type(type_) or type_ is Any or type_ is object:
+                    if self.required is Undefined:
+                        self.required = False
+                    self.allow_none = True
+                if is_none_type(type_):
+                    continue
+                types_.append(type_)
+
+            if len(types_) == 1:
+                # Optional[]
+                self.type_ = types_[0]
+                # this is the one case where the "outer type" isn't just the original type
+                self.outer_type_ = self.type_
+                # re-run to correctly interpret the new self.type_
+                self._type_analysis()
+            else:
+                self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_]
+
+                if self.discriminator_key is not None:
+                    self.prepare_discriminated_union_sub_fields()
+            return
+        elif issubclass(origin, Tuple):  # type: ignore
+            # origin == Tuple without item type
+            args = get_args(self.type_)
+            if not args:  # plain tuple
+                self.type_ = Any
+                self.shape = SHAPE_TUPLE_ELLIPSIS
+            elif len(args) == 2 and args[1] is Ellipsis:  # e.g. Tuple[int, ...]
+                self.type_ = args[0]
+                self.shape = SHAPE_TUPLE_ELLIPSIS
+                self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')]
+            elif args == ((),):  # Tuple[()] means empty tuple
+                self.shape = SHAPE_TUPLE
+                self.type_ = Any
+                self.sub_fields = []
+            else:
+                self.shape = SHAPE_TUPLE
+                self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)]
+            return
+        elif issubclass(origin, List):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_LIST
+        elif issubclass(origin, Set):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_SET
+        elif issubclass(origin, FrozenSet):
+            # Create self validators
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            if get_validators:
+                self.class_validators.update(
+                    {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+                )
+
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_FROZENSET
+        elif issubclass(origin, Deque):
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_DEQUE
+        elif issubclass(origin, Sequence):
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_SEQUENCE
+        # priority to most common mapping: dict
+        elif origin is dict or origin is Dict:
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_DICT
+        elif issubclass(origin, DefaultDict):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_DEFAULTDICT
+        elif issubclass(origin, Counter):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = int
+            self.shape = SHAPE_COUNTER
+        elif issubclass(origin, Mapping):
+            self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
+            self.type_ = get_args(self.type_)[1]
+            self.shape = SHAPE_MAPPING
+        # Equality check as almost everything inherits form Iterable, including str
+        # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other
+        elif origin in {Iterable, CollectionsIterable}:
+            self.type_ = get_args(self.type_)[0]
+            self.shape = SHAPE_ITERABLE
+            self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')]
+        elif issubclass(origin, Type):  # type: ignore
+            return
+        elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed:
+            # Is a Pydantic-compatible generic that handles itself
+            # or we have arbitrary_types_allowed = True
+            self.shape = SHAPE_GENERIC
+            self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))]
+            self.type_ = origin
+            return
+        else:
+            raise TypeError(f'Fields of type "{origin}" are not supported.')
+
+        # type_ has been refined eg. as the type of a List and sub_fields needs to be populated
+        self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)]
+
+    def prepare_discriminated_union_sub_fields(self) -> None:
         """
         Prepare the mapping <discriminator key> -> <ModelField> and update `sub_fields`
         Note that this process can be aborted if a `ForwardRef` is encountered
         """
-        pass
+        assert self.discriminator_key is not None
+
+        if self.type_.__class__ is DeferredType:
+            return
+
+        assert self.sub_fields is not None
+        sub_fields_mapping: Dict[str, 'ModelField'] = {}
+        all_aliases: Set[str] = set()
+
+        for sub_field in self.sub_fields:
+            t = sub_field.type_
+            if t.__class__ is ForwardRef:
+                # Stopping everything...will need to call `update_forward_refs`
+                return
+
+            alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key)
+            all_aliases.add(alias)
+            for discriminator_value in discriminator_values:
+                sub_fields_mapping[discriminator_value] = sub_field

-    def populate_validators(self) ->None:
+        self.sub_fields_mapping = sub_fields_mapping
+        self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key)
+
+    def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField':
+        if for_keys:
+            class_validators = None
+        else:
+            # validators for sub items should not have `each_item` as we want to check only the first sublevel
+            class_validators = {
+                k: Validator(
+                    func=v.func,
+                    pre=v.pre,
+                    each_item=False,
+                    always=v.always,
+                    check_fields=v.check_fields,
+                    skip_on_failure=v.skip_on_failure,
+                )
+                for k, v in self.class_validators.items()
+                if v.each_item
+            }
+
+        field_info, _ = self._get_field_info(name, type_, None, self.model_config)
+
+        return self.__class__(
+            type_=type_,
+            name=name,
+            class_validators=class_validators,
+            model_config=self.model_config,
+            field_info=field_info,
+        )
+
+    def populate_validators(self) -> None:
         """
         Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s  __get_validators__
         and class validators. This method should be idempotent, e.g. it should be safe to call multiple times
         without mis-configuring the field.
         """
-        pass
+        self.validate_always = getattr(self.type_, 'validate_always', False) or any(
+            v.always for v in self.class_validators.values()
+        )
+
+        class_validators_ = self.class_validators.values()
+        if not self.sub_fields or self.shape == SHAPE_GENERIC:
+            get_validators = getattr(self.type_, '__get_validators__', None)
+            v_funcs = (
+                *[v.func for v in class_validators_ if v.each_item and v.pre],
+                *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))),
+                *[v.func for v in class_validators_ if v.each_item and not v.pre],
+            )
+            self.validators = prep_validators(v_funcs)
+
+        self.pre_validators = []
+        self.post_validators = []
+
+        if self.field_info and self.field_info.const:
+            self.post_validators.append(make_generic_validator(constant_validator))

-    def _validate_sequence_like(self, v: Any, values: Dict[str, Any], loc:
-        'LocStr', cls: Optional['ModelOrDc']) ->'ValidateReturn':
+        if class_validators_:
+            self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre)
+            self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre)
+
+        if self.parse_json:
+            self.pre_validators.append(make_generic_validator(validate_json))
+
+        self.pre_validators = self.pre_validators or None
+        self.post_validators = self.post_validators or None
+
+    def validate(
+        self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None
+    ) -> 'ValidateReturn':
+        assert self.type_.__class__ is not DeferredType
+
+        if self.type_.__class__ is ForwardRef:
+            assert cls is not None
+            raise ConfigError(
+                f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
+                f'you might need to call {cls.__name__}.update_forward_refs().'
+            )
+
+        errors: Optional['ErrorList']
+        if self.pre_validators:
+            v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators)
+            if errors:
+                return v, errors
+
+        if v is None:
+            if is_none_type(self.type_):
+                # keep validating
+                pass
+            elif self.allow_none:
+                if self.post_validators:
+                    return self._apply_validators(v, values, loc, cls, self.post_validators)
+                else:
+                    return None, None
+            else:
+                return v, ErrorWrapper(NoneIsNotAllowedError(), loc)
+
+        if self.shape == SHAPE_SINGLETON:
+            v, errors = self._validate_singleton(v, values, loc, cls)
+        elif self.shape in MAPPING_LIKE_SHAPES:
+            v, errors = self._validate_mapping_like(v, values, loc, cls)
+        elif self.shape == SHAPE_TUPLE:
+            v, errors = self._validate_tuple(v, values, loc, cls)
+        elif self.shape == SHAPE_ITERABLE:
+            v, errors = self._validate_iterable(v, values, loc, cls)
+        elif self.shape == SHAPE_GENERIC:
+            v, errors = self._apply_validators(v, values, loc, cls, self.validators)
+        else:
+            #  sequence, list, set, generator, tuple with ellipsis, frozen set
+            v, errors = self._validate_sequence_like(v, values, loc, cls)
+
+        if not errors and self.post_validators:
+            v, errors = self._apply_validators(v, values, loc, cls, self.post_validators)
+        return v, errors
+
+    def _validate_sequence_like(  # noqa: C901 (ignore complexity)
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
         """
         Validate sequence-like containers: lists, tuples, sets and generators
         Note that large if-else blocks are necessary to enable Cython
         optimization, which is why we disable the complexity check above.
         """
-        pass
+        if not sequence_like(v):
+            e: errors_.PydanticTypeError
+            if self.shape == SHAPE_LIST:
+                e = errors_.ListError()
+            elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS):
+                e = errors_.TupleError()
+            elif self.shape == SHAPE_SET:
+                e = errors_.SetError()
+            elif self.shape == SHAPE_FROZENSET:
+                e = errors_.FrozenSetError()
+            else:
+                e = errors_.SequenceError()
+            return v, ErrorWrapper(e, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result = []
+        errors: List[ErrorList] = []
+        for i, v_ in enumerate(v):
+            v_loc = *loc, i
+            r, ee = self._validate_singleton(v_, values, v_loc, cls)
+            if ee:
+                errors.append(ee)
+            else:
+                result.append(r)
+
+        if errors:
+            return v, errors

-    def _validate_iterable(self, v: Any, values: Dict[str, Any], loc:
-        'LocStr', cls: Optional['ModelOrDc']) ->'ValidateReturn':
+        converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result
+
+        if self.shape == SHAPE_SET:
+            converted = set(result)
+        elif self.shape == SHAPE_FROZENSET:
+            converted = frozenset(result)
+        elif self.shape == SHAPE_TUPLE_ELLIPSIS:
+            converted = tuple(result)
+        elif self.shape == SHAPE_DEQUE:
+            converted = deque(result, maxlen=getattr(v, 'maxlen', None))
+        elif self.shape == SHAPE_SEQUENCE:
+            if isinstance(v, tuple):
+                converted = tuple(result)
+            elif isinstance(v, set):
+                converted = set(result)
+            elif isinstance(v, Generator):
+                converted = iter(result)
+            elif isinstance(v, deque):
+                converted = deque(result, maxlen=getattr(v, 'maxlen', None))
+        return converted, None
+
+    def _validate_iterable(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
         """
         Validate Iterables.

         This intentionally doesn't validate values to allow infinite generators.
         """
-        pass

-    def _get_mapping_value(self, original: T, converted: Dict[Any, Any]
-        ) ->Union[T, Dict[Any, Any]]:
+        try:
+            iterable = iter(v)
+        except TypeError:
+            return v, ErrorWrapper(errors_.IterableError(), loc)
+        return iterable, None
+
+    def _validate_tuple(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        e: Optional[Exception] = None
+        if not sequence_like(v):
+            e = errors_.TupleError()
+        else:
+            actual_length, expected_length = len(v), len(self.sub_fields)  # type: ignore
+            if actual_length != expected_length:
+                e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length)
+
+        if e:
+            return v, ErrorWrapper(e, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result = []
+        errors: List[ErrorList] = []
+        for i, (v_, field) in enumerate(zip(v, self.sub_fields)):  # type: ignore
+            v_loc = *loc, i
+            r, ee = field.validate(v_, values, loc=v_loc, cls=cls)
+            if ee:
+                errors.append(ee)
+            else:
+                result.append(r)
+
+        if errors:
+            return v, errors
+        else:
+            return tuple(result), None
+
+    def _validate_mapping_like(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        try:
+            v_iter = dict_validator(v)
+        except TypeError as exc:
+            return v, ErrorWrapper(exc, loc)
+
+        loc = loc if isinstance(loc, tuple) else (loc,)
+        result, errors = {}, []
+        for k, v_ in v_iter.items():
+            v_loc = *loc, '__key__'
+            key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls)  # type: ignore
+            if key_errors:
+                errors.append(key_errors)
+                continue
+
+            v_loc = *loc, k
+            value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls)
+            if value_errors:
+                errors.append(value_errors)
+                continue
+
+            result[key_result] = value_result
+        if errors:
+            return v, errors
+        elif self.shape == SHAPE_DICT:
+            return result, None
+        elif self.shape == SHAPE_DEFAULTDICT:
+            return defaultdict(self.type_, result), None
+        elif self.shape == SHAPE_COUNTER:
+            return CollectionCounter(result), None
+        else:
+            return self._get_mapping_value(v, result), None
+
+    def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]:
         """
         When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid
         coercing to `dict` unwillingly.
         """
-        pass
+        original_cls = original.__class__
+
+        if original_cls == dict or original_cls == Dict:
+            return converted
+        elif original_cls in {defaultdict, DefaultDict}:
+            return defaultdict(self.type_, converted)
+        else:
+            try:
+                # Counter, OrderedDict, UserDict, ...
+                return original_cls(converted)  # type: ignore
+            except TypeError:
+                raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None
+
+    def _validate_singleton(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        if self.sub_fields:
+            if self.discriminator_key is not None:
+                return self._validate_discriminated_union(v, values, loc, cls)
+
+            errors = []
+
+            if self.model_config.smart_union and is_union(get_origin(self.type_)):
+                # 1st pass: check if the value is an exact instance of one of the Union types
+                # (e.g. to avoid coercing a bool into an int)
+                for field in self.sub_fields:
+                    if v.__class__ is field.outer_type_:
+                        return v, None
+
+                # 2nd pass: check if the value is an instance of any subclass of the Union types
+                for field in self.sub_fields:
+                    # This whole logic will be improved later on to support more complex `isinstance` checks
+                    # It will probably be done once a strict mode is added and be something like:
+                    # ```
+                    #     value, error = field.validate(v, values, strict=True)
+                    #     if error is None:
+                    #         return value, None
+                    # ```
+                    try:
+                        if isinstance(v, field.outer_type_):
+                            return v, None
+                    except TypeError:
+                        # compound type
+                        if lenient_isinstance(v, get_origin(field.outer_type_)):
+                            value, error = field.validate(v, values, loc=loc, cls=cls)
+                            if not error:
+                                return value, None
+
+            # 1st pass by default or 3rd pass with `smart_union` enabled:
+            # check if the value can be coerced into one of the Union types
+            for field in self.sub_fields:
+                value, error = field.validate(v, values, loc=loc, cls=cls)
+                if error:
+                    errors.append(error)
+                else:
+                    return value, None
+            return v, errors
+        else:
+            return self._apply_validators(v, values, loc, cls, self.validators)
+
+    def _validate_discriminated_union(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
+    ) -> 'ValidateReturn':
+        assert self.discriminator_key is not None
+        assert self.discriminator_alias is not None
+
+        try:
+            try:
+                discriminator_value = v[self.discriminator_alias]
+            except KeyError:
+                if self.model_config.allow_population_by_field_name:
+                    discriminator_value = v[self.discriminator_key]
+                else:
+                    raise
+        except KeyError:
+            return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
+        except TypeError:
+            try:
+                # BaseModel or dataclass
+                discriminator_value = getattr(v, self.discriminator_key)
+            except (AttributeError, TypeError):
+                return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)

-    def is_complex(self) ->bool:
+        if self.sub_fields_mapping is None:
+            assert cls is not None
+            raise ConfigError(
+                f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
+                f'you might need to call {cls.__name__}.update_forward_refs().'
+            )
+
+        try:
+            sub_field = self.sub_fields_mapping[discriminator_value]
+        except (KeyError, TypeError):
+            # KeyError: `discriminator_value` is not in the dictionary.
+            # TypeError: `discriminator_value` is unhashable.
+            assert self.sub_fields_mapping is not None
+            return v, ErrorWrapper(
+                InvalidDiscriminator(
+                    discriminator_key=self.discriminator_key,
+                    discriminator_value=discriminator_value,
+                    allowed_values=list(self.sub_fields_mapping),
+                ),
+                loc,
+            )
+        else:
+            if not isinstance(loc, tuple):
+                loc = (loc,)
+            return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls)
+
+    def _apply_validators(
+        self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList'
+    ) -> 'ValidateReturn':
+        for validator in validators:
+            try:
+                v = validator(cls, v, values, self, self.model_config)
+            except (ValueError, TypeError, AssertionError) as exc:
+                return v, ErrorWrapper(exc, loc)
+        return v, None
+
+    def is_complex(self) -> bool:
         """
         Whether the field is "complex" eg. env variables should be parsed as JSON.
         """
-        pass
+        from pydantic.v1.main import BaseModel
+
+        return (
+            self.shape != SHAPE_SINGLETON
+            or hasattr(self.type_, '__pydantic_model__')
+            or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict))
+        )
+
+    def _type_display(self) -> PyObjectStr:
+        t = display_as_type(self.type_)
+
+        if self.shape in MAPPING_LIKE_SHAPES:
+            t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]'  # type: ignore
+        elif self.shape == SHAPE_TUPLE:
+            t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields))  # type: ignore
+        elif self.shape == SHAPE_GENERIC:
+            assert self.sub_fields
+            t = '{}[{}]'.format(
+                display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields)
+            )
+        elif self.shape != SHAPE_SINGLETON:
+            t = SHAPE_NAME_LOOKUP[self.shape].format(t)
+
+        if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields):
+            t = f'Optional[{t}]'
+        return PyObjectStr(t)
+
+    def __repr_args__(self) -> 'ReprArgs':
+        args = [('name', self.name), ('type', self._type_display()), ('required', self.required)]

-    def __repr_args__(self) ->'ReprArgs':
-        args = [('name', self.name), ('type', self._type_display()), (
-            'required', self.required)]
         if not self.required:
             if self.default_factory is not None:
-                args.append(('default_factory',
-                    f'<function {self.default_factory.__name__}>'))
+                args.append(('default_factory', f'<function {self.default_factory.__name__}>'))
             else:
                 args.append(('default', self.default))
+
         if self.alt_alias:
             args.append(('alias', self.alias))
         return args


 class ModelPrivateAttr(Representation):
-    __slots__ = 'default', 'default_factory'
+    __slots__ = ('default', 'default_factory')

-    def __init__(self, default: Any=Undefined, *, default_factory: Optional
-        [NoArgAnyCallable]=None) ->None:
+    def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None:
         self.default = default
         self.default_factory = default_factory

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, self.__class__) and (self.default, self.
-            default_factory) == (other.default, other.default_factory)
+    def get_default(self) -> Any:
+        return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
+
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and (self.default, self.default_factory) == (
+            other.default,
+            other.default_factory,
+        )


-def PrivateAttr(default: Any=Undefined, *, default_factory: Optional[
-    NoArgAnyCallable]=None) ->Any:
+def PrivateAttr(
+    default: Any = Undefined,
+    *,
+    default_factory: Optional[NoArgAnyCallable] = None,
+) -> Any:
     """
     Indicates that attribute is only used internally and never mixed with regular fields.

@@ -362,10 +1234,20 @@ def PrivateAttr(default: Any=Undefined, *, default_factory: Optional[
     :param default_factory: callable that will be called when a default value is needed for this attribute
       If both `default` and `default_factory` are set, an error is raised.
     """
-    pass
+    if default is not Undefined and default_factory is not None:
+        raise ValueError('cannot specify both default and default_factory')
+
+    return ModelPrivateAttr(
+        default,
+        default_factory=default_factory,
+    )


 class DeferredType:
     """
     Used to postpone field preparation, while creating recursive generic models.
     """
+
+
+def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool:
+    return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo)
diff --git a/pydantic/v1/generics.py b/pydantic/v1/generics.py
index 2211c7798..9a69f2b39 100644
--- a/pydantic/v1/generics.py
+++ b/pydantic/v1/generics.py
@@ -1,41 +1,78 @@
 import sys
 import types
 import typing
-from typing import TYPE_CHECKING, Any, ClassVar, Dict, ForwardRef, Generic, Iterator, List, Mapping, Optional, Tuple, Type, TypeVar, Union, cast
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    ClassVar,
+    Dict,
+    ForwardRef,
+    Generic,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
 from weakref import WeakKeyDictionary, WeakValueDictionary
+
 from typing_extensions import Annotated, Literal as ExtLiteral
+
 from pydantic.v1.class_validators import gather_all_validators
 from pydantic.v1.fields import DeferredType
 from pydantic.v1.main import BaseModel, create_model
 from pydantic.v1.types import JsonWrapper
 from pydantic.v1.typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base
 from pydantic.v1.utils import all_identical, lenient_issubclass
+
 if sys.version_info >= (3, 10):
     from typing import _UnionGenericAlias
 if sys.version_info >= (3, 8):
     from typing import Literal
+
 GenericModelT = TypeVar('GenericModelT', bound='GenericModel')
-TypeVarType = Any
+TypeVarType = Any  # since mypy doesn't allow the use of TypeVar as a type
+
 CacheKey = Tuple[Type[Any], Any, Tuple[Any, ...]]
 Parametrization = Mapping[TypeVarType, Type[Any]]
-if sys.version_info >= (3, 9):
+
+# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected
+# once they are no longer referenced by the caller.
+if sys.version_info >= (3, 9):  # Typing for weak dictionaries available at 3.9
     GenericTypesCache = WeakValueDictionary[CacheKey, Type[BaseModel]]
     AssignedParameters = WeakKeyDictionary[Type[BaseModel], Parametrization]
 else:
     GenericTypesCache = WeakValueDictionary
     AssignedParameters = WeakKeyDictionary
+
+# _generic_types_cache is a Mapping from __class_getitem__ arguments to the parametrized version of generic models.
+# This ensures multiple calls of e.g. A[B] return always the same class.
 _generic_types_cache = GenericTypesCache()
+
+# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations
+# as captured during construction of the class (not instances).
+# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created,
+# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`.
+# (This information is only otherwise available after creation from the class name string).
 _assigned_parameters = AssignedParameters()


 class GenericModel(BaseModel):
     __slots__ = ()
     __concrete__: ClassVar[bool] = False
+
     if TYPE_CHECKING:
+        # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with
+        # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of
+        # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below.
         __parameters__: ClassVar[Tuple[TypeVarType, ...]]

-    def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any],
-        Tuple[Type[Any], ...]]) ->Type[Any]:
+    # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings
+    def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]:
         """Instantiates a new class from a generic class `cls` and type variables `params`.

         :param params: Tuple of types the class . Given a generic class
@@ -47,72 +84,91 @@ class GenericModel(BaseModel):

         """

-        def _cache_key(_params: Any) ->CacheKey:
+        def _cache_key(_params: Any) -> CacheKey:
             args = get_args(_params)
+            # python returns a list for Callables, which is not hashable
             if len(args) == 2 and isinstance(args[0], list):
-                args = tuple(args[0]), args[1]
+                args = (tuple(args[0]), args[1])
             return cls, _params, args
+
         cached = _generic_types_cache.get(_cache_key(params))
         if cached is not None:
             return cached
         if cls.__concrete__ and Generic not in cls.__bases__:
-            raise TypeError(
-                'Cannot parameterize a concrete instantiation of a generic model'
-                )
+            raise TypeError('Cannot parameterize a concrete instantiation of a generic model')
         if not isinstance(params, tuple):
-            params = params,
-        if cls is GenericModel and any(isinstance(param, TypeVar) for param in
-            params):
-            raise TypeError(
-                'Type parameters should be placed on typing.Generic, not GenericModel'
-                )
+            params = (params,)
+        if cls is GenericModel and any(isinstance(param, TypeVar) for param in params):
+            raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel')
         if not hasattr(cls, '__parameters__'):
-            raise TypeError(
-                f'Type {cls.__name__} must inherit from typing.Generic before being parameterized'
-                )
+            raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized')
+
         check_parameters_count(cls, params)
-        typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.
-            __parameters__, params))
-        if all_identical(typevars_map.keys(), typevars_map.values()
-            ) and typevars_map:
-            return cls
+        # Build map from generic typevars to passed params
+        typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params))
+        if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
+            return cls  # if arguments are equal to parameters it's the same object
+
+        # Create new model with original model as parent inserting fields with DeferredType.
         model_name = cls.__concrete_name__(params)
         validators = gather_all_validators(cls)
+
         type_hints = get_all_type_hints(cls).items()
-        instance_type_hints = {k: v for k, v in type_hints if get_origin(v)
-             is not ClassVar}
-        fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in
-            instance_type_hints if k in cls.__fields__}
+        instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar}
+
+        fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__}
+
         model_module, called_globally = get_caller_frame_info()
-        created_model = cast(Type[GenericModel], create_model(model_name,
-            __module__=model_module or cls.__module__, __base__=(cls,) +
-            tuple(cls.__parameterized_bases__(typevars_map)), __config__=
-            None, __validators__=validators, __cls_kwargs__=None, **fields))
+        created_model = cast(
+            Type[GenericModel],  # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes
+            create_model(
+                model_name,
+                __module__=model_module or cls.__module__,
+                __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)),
+                __config__=None,
+                __validators__=validators,
+                __cls_kwargs__=None,
+                **fields,
+            ),
+        )
+
         _assigned_parameters[created_model] = typevars_map
-        if called_globally:
+
+        if called_globally:  # create global reference and therefore allow pickling
             object_by_reference = None
             reference_name = model_name
-            reference_module_globals = sys.modules[created_model.__module__
-                ].__dict__
+            reference_module_globals = sys.modules[created_model.__module__].__dict__
             while object_by_reference is not created_model:
-                object_by_reference = reference_module_globals.setdefault(
-                    reference_name, created_model)
+                object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
                 reference_name += '_'
+
         created_model.Config = cls.Config
-        new_params = tuple({param: None for param in
-            iter_contained_typevars(typevars_map.values())})
+
+        # Find any typevars that are still present in the model.
+        # If none are left, the model is fully "concrete", otherwise the new
+        # class is a generic class as well taking the found typevars as
+        # parameters.
+        new_params = tuple(
+            {param: None for param in iter_contained_typevars(typevars_map.values())}
+        )  # use dict as ordered set
         created_model.__concrete__ = not new_params
         if new_params:
             created_model.__parameters__ = new_params
+
+        # Save created model in cache so we don't end up creating duplicate
+        # models that should be identical.
         _generic_types_cache[_cache_key(params)] = created_model
         if len(params) == 1:
             _generic_types_cache[_cache_key(params[0])] = created_model
-        _prepare_model_fields(created_model, fields, instance_type_hints,
-            typevars_map)
+
+        # Recursively walk class type hints and replace generic typevars
+        # with concrete types that were passed.
+        _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map)
+
         return created_model

     @classmethod
-    def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) ->str:
+    def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str:
         """Compute class name for child classes.

         :param params: Tuple of types the class . Given a generic class
@@ -128,8 +184,7 @@ class GenericModel(BaseModel):
         return f'{cls.__name__}[{params_component}]'

     @classmethod
-    def __parameterized_bases__(cls, typevars_map: Parametrization) ->Iterator[
-        Type[Any]]:
+    def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]:
         """
         Returns unbound bases of cls parameterised to given type variables

@@ -152,32 +207,48 @@ class GenericModel(BaseModel):
         ```
         """

-        def build_base_model(base_model: Type[GenericModel], mapped_types:
-            Parametrization) ->Iterator[Type[GenericModel]]:
-            base_parameters = tuple(mapped_types[param] for param in
-                base_model.__parameters__)
+        def build_base_model(
+            base_model: Type[GenericModel], mapped_types: Parametrization
+        ) -> Iterator[Type[GenericModel]]:
+            base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__)
             parameterized_base = base_model.__class_getitem__(base_parameters)
             if parameterized_base is base_model or parameterized_base is cls:
+                # Avoid duplication in MRO
                 return
             yield parameterized_base
+
         for base_model in cls.__bases__:
             if not issubclass(base_model, GenericModel):
+                # not a class that can be meaningfully parameterized
                 continue
             elif not getattr(base_model, '__parameters__', None):
+                # base_model is "GenericModel"  (and has no __parameters__)
+                # or
+                # base_model is already concrete, and will be included transitively via cls.
                 continue
             elif cls in _assigned_parameters:
                 if base_model in _assigned_parameters:
+                    # cls is partially parameterised but not from base_model
+                    # e.g. cls = B[S], base_model = A[S]
+                    # B[S][int] should subclass A[int],  (and will be transitively via B[int])
+                    # but it's not viable to consistently subclass types with arbitrary construction
+                    # So don't attempt to include A[S][int]
                     continue
-                else:
-                    mapped_types: Parametrization = {key: typevars_map.get(
-                        value, value) for key, value in
-                        _assigned_parameters[cls].items()}
+                else:  # base_model not in _assigned_parameters:
+                    # cls is partially parameterized, base_model is original generic
+                    # e.g.  cls = B[str, T], base_model = B[S, T]
+                    # Need to determine the mapping for the base_model parameters
+                    mapped_types: Parametrization = {
+                        key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items()
+                    }
                     yield from build_base_model(base_model, mapped_types)
             else:
+                # cls is base generic, so base_class has a distinct base
+                # can construct the Parameterised base model using typevars_map directly
                 yield from build_base_model(base_model, typevars_map)


-def replace_types(type_: Any, type_map: Mapping[Any, Any]) ->Any:
+def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any:
     """Return type with all occurrences of `type_map` keys recursively replaced with their values.

     :param type_: Any type, class or generic alias
@@ -189,18 +260,103 @@ def replace_types(type_: Any, type_map: Mapping[Any, Any]) ->Any:
     Tuple[int, Union[List[int], float]]

     """
-    pass
+    if not type_map:
+        return type_
+
+    type_args = get_args(type_)
+    origin_type = get_origin(type_)
+
+    if origin_type is Annotated:
+        annotated_type, *annotations = type_args
+        return Annotated[replace_types(annotated_type, type_map), tuple(annotations)]
+
+    if (origin_type is ExtLiteral) or (sys.version_info >= (3, 8) and origin_type is Literal):
+        return type_map.get(type_, type_)
+    # Having type args is a good indicator that this is a typing module
+    # class instantiation or a generic alias of some sort.
+    if type_args:
+        resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
+        if all_identical(type_args, resolved_type_args):
+            # If all arguments are the same, there is no need to modify the
+            # type or create a new object at all
+            return type_
+        if (
+            origin_type is not None
+            and isinstance(type_, typing_base)
+            and not isinstance(origin_type, typing_base)
+            and getattr(type_, '_name', None) is not None
+        ):
+            # In python < 3.9 generic aliases don't exist so any of these like `list`,
+            # `type` or `collections.abc.Callable` need to be translated.
+            # See: https://www.python.org/dev/peps/pep-0585
+            origin_type = getattr(typing, type_._name)
+        assert origin_type is not None
+        # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
+        # We also cannot use isinstance() since we have to compare types.
+        if sys.version_info >= (3, 10) and origin_type is types.UnionType:  # noqa: E721
+            return _UnionGenericAlias(origin_type, resolved_type_args)
+        return origin_type[resolved_type_args]
+
+    # We handle pydantic generic models separately as they don't have the same
+    # semantics as "typing" classes or generic aliases
+    if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__:
+        type_args = type_.__parameters__
+        resolved_type_args = tuple(replace_types(t, type_map) for t in type_args)
+        if all_identical(type_args, resolved_type_args):
+            return type_
+        return type_[resolved_type_args]
+
+    # Handle special case for typehints that can have lists as arguments.
+    # `typing.Callable[[int, str], int]` is an example for this.
+    if isinstance(type_, (List, list)):
+        resolved_list = list(replace_types(element, type_map) for element in type_)
+        if all_identical(type_, resolved_list):
+            return type_
+        return resolved_list
+
+    # For JsonWrapperValue, need to handle its inner type to allow correct parsing
+    # of generic Json arguments like Json[T]
+    if not origin_type and lenient_issubclass(type_, JsonWrapper):
+        type_.inner_type = replace_types(type_.inner_type, type_map)
+        return type_
+
+    # If all else fails, we try to resolve the type directly and otherwise just
+    # return the input with no modifications.
+    new_type = type_map.get(type_, type_)
+    # Convert string to ForwardRef
+    if isinstance(new_type, str):
+        return ForwardRef(new_type)
+    else:
+        return new_type
+
+
+def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None:
+    actual = len(parameters)
+    expected = len(cls.__parameters__)
+    if actual != expected:
+        description = 'many' if actual > expected else 'few'
+        raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}')


 DictValues: Type[Any] = {}.values().__class__


-def iter_contained_typevars(v: Any) ->Iterator[TypeVarType]:
+def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
     """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found."""
-    pass
+    if isinstance(v, TypeVar):
+        yield v
+    elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel):
+        yield from v.__parameters__
+    elif isinstance(v, (DictValues, list)):
+        for var in v:
+            yield from iter_contained_typevars(var)
+    else:
+        args = get_args(v)
+        for arg in args:
+            yield from iter_contained_typevars(arg)


-def get_caller_frame_info() ->Tuple[Optional[str], bool]:
+def get_caller_frame_info() -> Tuple[Optional[str], bool]:
     """
     Used inside a function to check whether it was called globally

@@ -208,13 +364,37 @@ def get_caller_frame_info() ->Tuple[Optional[str], bool]:

     :returns Tuple[module_name, called_globally]
     """
-    pass
+    try:
+        previous_caller_frame = sys._getframe(2)
+    except ValueError as e:
+        raise RuntimeError('This function must be used inside another function') from e
+    except AttributeError:  # sys module does not have _getframe function, so there's nothing we can do about it
+        return None, False
+    frame_globals = previous_caller_frame.f_globals
+    return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals


-def _prepare_model_fields(created_model: Type[GenericModel], fields:
-    Mapping[str, Any], instance_type_hints: Mapping[str, type],
-    typevars_map: Mapping[Any, type]) ->None:
+def _prepare_model_fields(
+    created_model: Type[GenericModel],
+    fields: Mapping[str, Any],
+    instance_type_hints: Mapping[str, type],
+    typevars_map: Mapping[Any, type],
+) -> None:
     """
     Replace DeferredType fields with concrete type hints and prepare them.
     """
-    pass
+
+    for key, field in created_model.__fields__.items():
+        if key not in fields:
+            assert field.type_.__class__ is not DeferredType
+            # https://github.com/nedbat/coveragepy/issues/198
+            continue  # pragma: no cover
+
+        assert field.type_.__class__ is DeferredType, field.type_.__class__
+
+        field_type_hint = instance_type_hints[key]
+        concrete_type = replace_types(field_type_hint, typevars_map)
+        field.type_ = concrete_type
+        field.outer_type_ = concrete_type
+        field.prepare()
+        created_model.__annotations__[key] = concrete_type
diff --git a/pydantic/v1/json.py b/pydantic/v1/json.py
index 9de1baa27..41d0d5fca 100644
--- a/pydantic/v1/json.py
+++ b/pydantic/v1/json.py
@@ -8,13 +8,19 @@ from re import Pattern
 from types import GeneratorType
 from typing import Any, Callable, Dict, Type, Union
 from uuid import UUID
+
 from pydantic.v1.color import Color
 from pydantic.v1.networks import NameEmail
 from pydantic.v1.types import SecretBytes, SecretStr
+
 __all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'


-def decimal_encoder(dec_value: Decimal) ->Union[int, float]:
+def isoformat(o: Union[datetime.date, datetime.time]) -> str:
+    return o.isoformat()
+
+
+def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
     """
     Encodes a Decimal as int of there's no exponent, otherwise float

@@ -29,21 +35,78 @@ def decimal_encoder(dec_value: Decimal) ->Union[int, float]:
     >>> decimal_encoder(Decimal("1"))
     1
     """
-    pass
+    if dec_value.as_tuple().exponent >= 0:
+        return int(dec_value)
+    else:
+        return float(dec_value)
+
+
+ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
+    bytes: lambda o: o.decode(),
+    Color: str,
+    datetime.date: isoformat,
+    datetime.datetime: isoformat,
+    datetime.time: isoformat,
+    datetime.timedelta: lambda td: td.total_seconds(),
+    Decimal: decimal_encoder,
+    Enum: lambda o: o.value,
+    frozenset: list,
+    deque: list,
+    GeneratorType: list,
+    IPv4Address: str,
+    IPv4Interface: str,
+    IPv4Network: str,
+    IPv6Address: str,
+    IPv6Interface: str,
+    IPv6Network: str,
+    NameEmail: str,
+    Path: str,
+    Pattern: lambda o: o.pattern,
+    SecretBytes: str,
+    SecretStr: str,
+    set: list,
+    UUID: str,
+}
+
+
+def pydantic_encoder(obj: Any) -> Any:
+    from dataclasses import asdict, is_dataclass
+
+    from pydantic.v1.main import BaseModel
+
+    if isinstance(obj, BaseModel):
+        return obj.dict()
+    elif is_dataclass(obj):
+        return asdict(obj)
+
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = ENCODERS_BY_TYPE[base]
+        except KeyError:
+            continue
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
+

+def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
+    # Check the class type and its superclasses for a matching encoder
+    for base in obj.__class__.__mro__[:-1]:
+        try:
+            encoder = type_encoders[base]
+        except KeyError:
+            continue

-ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {bytes: lambda o:
-    o.decode(), Color: str, datetime.date: isoformat, datetime.datetime:
-    isoformat, datetime.time: isoformat, datetime.timedelta: lambda td: td.
-    total_seconds(), Decimal: decimal_encoder, Enum: lambda o: o.value,
-    frozenset: list, deque: list, GeneratorType: list, IPv4Address: str,
-    IPv4Interface: str, IPv4Network: str, IPv6Address: str, IPv6Interface:
-    str, IPv6Network: str, NameEmail: str, Path: str, Pattern: lambda o: o.
-    pattern, SecretBytes: str, SecretStr: str, set: list, UUID: str}
+        return encoder(obj)
+    else:  # We have exited the for loop without finding a suitable encoder
+        return pydantic_encoder(obj)


-def timedelta_isoformat(td: datetime.timedelta) ->str:
+def timedelta_isoformat(td: datetime.timedelta) -> str:
     """
     ISO 8601 encoding for Python timedelta object.
     """
-    pass
+    minutes, seconds = divmod(td.seconds, 60)
+    hours, minutes = divmod(minutes, 60)
+    return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
diff --git a/pydantic/v1/main.py b/pydantic/v1/main.py
index 48a9e5a2a..cd376037c 100644
--- a/pydantic/v1/main.py
+++ b/pydantic/v1/main.py
@@ -5,42 +5,126 @@ from enum import Enum
 from functools import partial
 from pathlib import Path
 from types import FunctionType, prepare_class, resolve_bases
-from typing import TYPE_CHECKING, AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, cast, no_type_check, overload
+from typing import (
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+    no_type_check,
+    overload,
+)
+
 from typing_extensions import dataclass_transform
+
 from pydantic.v1.class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators
 from pydantic.v1.config import BaseConfig, Extra, inherit_config, prepare_config
 from pydantic.v1.error_wrappers import ErrorWrapper, ValidationError
 from pydantic.v1.errors import ConfigError, DictError, ExtraError, MissingError
-from pydantic.v1.fields import MAPPING_LIKE_SHAPES, Field, ModelField, ModelPrivateAttr, PrivateAttr, Undefined, is_finalvar_with_default_val
+from pydantic.v1.fields import (
+    MAPPING_LIKE_SHAPES,
+    Field,
+    ModelField,
+    ModelPrivateAttr,
+    PrivateAttr,
+    Undefined,
+    is_finalvar_with_default_val,
+)
 from pydantic.v1.json import custom_pydantic_encoder, pydantic_encoder
 from pydantic.v1.parse import Protocol, load_file, load_str_bytes
 from pydantic.v1.schema import default_ref_template, model_schema
 from pydantic.v1.types import PyObject, StrBytes
-from pydantic.v1.typing import AnyCallable, get_args, get_origin, is_classvar, is_namedtuple, is_union, resolve_annotations, update_model_forward_refs
-from pydantic.v1.utils import DUNDER_ATTRIBUTES, ROOT_KEY, ClassAttribute, GetterDict, Representation, ValueItems, generate_model_signature, is_valid_field, is_valid_private_name, lenient_issubclass, sequence_like, smart_deepcopy, unique_list, validate_field_name
+from pydantic.v1.typing import (
+    AnyCallable,
+    get_args,
+    get_origin,
+    is_classvar,
+    is_namedtuple,
+    is_union,
+    resolve_annotations,
+    update_model_forward_refs,
+)
+from pydantic.v1.utils import (
+    DUNDER_ATTRIBUTES,
+    ROOT_KEY,
+    ClassAttribute,
+    GetterDict,
+    Representation,
+    ValueItems,
+    generate_model_signature,
+    is_valid_field,
+    is_valid_private_name,
+    lenient_issubclass,
+    sequence_like,
+    smart_deepcopy,
+    unique_list,
+    validate_field_name,
+)
+
 if TYPE_CHECKING:
     from inspect import Signature
+
     from pydantic.v1.class_validators import ValidatorListDict
     from pydantic.v1.types import ModelOrDc
-    from pydantic.v1.typing import AbstractSetIntStr, AnyClassMethod, CallableGenerator, DictAny, DictStrAny, MappingIntStrAny, ReprArgs, SetStr, TupleGenerator
+    from pydantic.v1.typing import (
+        AbstractSetIntStr,
+        AnyClassMethod,
+        CallableGenerator,
+        DictAny,
+        DictStrAny,
+        MappingIntStrAny,
+        ReprArgs,
+        SetStr,
+        TupleGenerator,
+    )
+
     Model = TypeVar('Model', bound='BaseModel')
+
 __all__ = 'BaseModel', 'create_model', 'validate_model'
+
 _T = TypeVar('_T')
-ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type,
-    classmethod, staticmethod)
-UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,
-    ) + ANNOTATED_FIELD_UNTOUCHED_TYPES
+
+
+def validate_custom_root_type(fields: Dict[str, ModelField]) -> None:
+    if len(fields) > 1:
+        raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields')
+
+
+def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]:
+    def hash_function(self_: Any) -> int:
+        return hash(self_.__class__) + hash(tuple(self_.__dict__.values()))
+
+    return hash_function if frozen else None
+
+
+# If a field is of type `Callable`, its default value should be a function and cannot to ignored.
+ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod)
+# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model
+UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES
+# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra
+# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's
+# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for
+# the `BaseModel` class, since that's defined immediately after the metaclass.
 _is_base_model_class_defined = False


 @dataclass_transform(kw_only_default=True, field_specifiers=(Field,))
 class ModelMetaclass(ABCMeta):
-
-    @no_type_check
-    def __new__(mcs, name, bases, namespace, **kwargs):
+    @no_type_check  # noqa C901
+    def __new__(mcs, name, bases, namespace, **kwargs):  # noqa C901
         fields: Dict[str, ModelField] = {}
         config = BaseConfig
         validators: 'ValidatorListDict' = {}
+
         pre_root_validators, post_root_validators = [], []
         private_attributes: Dict[str, ModelPrivateAttr] = {}
         base_private_attributes: Dict[str, ModelPrivateAttr] = {}
@@ -48,150 +132,176 @@ class ModelMetaclass(ABCMeta):
         slots = {slots} if isinstance(slots, str) else set(slots)
         class_vars: SetStr = set()
         hash_func: Optional[Callable[[Any], int]] = None
+
         for base in reversed(bases):
-            if _is_base_model_class_defined and issubclass(base, BaseModel
-                ) and base != BaseModel:
+            if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel:
                 fields.update(smart_deepcopy(base.__fields__))
                 config = inherit_config(base.__config__, config)
-                validators = inherit_validators(base.__validators__, validators
-                    )
+                validators = inherit_validators(base.__validators__, validators)
                 pre_root_validators += base.__pre_root_validators__
                 post_root_validators += base.__post_root_validators__
                 base_private_attributes.update(base.__private_attributes__)
                 class_vars.update(base.__class_vars__)
                 hash_func = base.__hash__
+
         resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True)
-        allowed_config_kwargs: SetStr = {key for key in dir(config) if not
-            (key.startswith('__') and key.endswith('__'))}
-        config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() &
-            allowed_config_kwargs}
+        allowed_config_kwargs: SetStr = {
+            key
+            for key in dir(config)
+            if not (key.startswith('__') and key.endswith('__'))  # skip dunder methods and attributes
+        }
+        config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs}
         config_from_namespace = namespace.get('Config')
         if config_kwargs and config_from_namespace:
-            raise TypeError(
-                'Specifying config in two places is ambiguous, use either Config attribute or class kwargs'
-                )
+            raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs')
         config = inherit_config(config_from_namespace, config, **config_kwargs)
-        validators = inherit_validators(extract_validators(namespace),
-            validators)
+
+        validators = inherit_validators(extract_validators(namespace), validators)
         vg = ValidatorGroup(validators)
+
         for f in fields.values():
             f.set_config(config)
             extra_validators = vg.get_validators(f.name)
             if extra_validators:
                 f.class_validators.update(extra_validators)
+                # re-run prepare to add extra validators
                 f.populate_validators()
+
         prepare_config(config, name)
+
         untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES

-        def is_untouched(v: Any) ->bool:
-            return isinstance(v, untouched_types
-                ) or v.__class__.__name__ == 'cython_function_or_method'
-        if (namespace.get('__module__'), namespace.get('__qualname__')) != (
-            'pydantic.main', 'BaseModel'):
-            annotations = resolve_annotations(namespace.get(
-                '__annotations__', {}), namespace.get('__module__', None))
+        def is_untouched(v: Any) -> bool:
+            return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method'
+
+        if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'):
+            annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None))
+            # annotation only fields need to come first in fields
             for ann_name, ann_type in annotations.items():
                 if is_classvar(ann_type):
                     class_vars.add(ann_name)
-                elif is_finalvar_with_default_val(ann_type, namespace.get(
-                    ann_name, Undefined)):
+                elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)):
                     class_vars.add(ann_name)
                 elif is_valid_field(ann_name):
                     validate_field_name(bases, ann_name)
                     value = namespace.get(ann_name, Undefined)
-                    allowed_types = get_args(ann_type) if is_union(get_origin
-                        (ann_type)) else (ann_type,)
-                    if is_untouched(value
-                        ) and ann_type != PyObject and not any(
-                        lenient_issubclass(get_origin(allowed_type), Type) for
-                        allowed_type in allowed_types):
+                    allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,)
+                    if (
+                        is_untouched(value)
+                        and ann_type != PyObject
+                        and not any(
+                            lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types
+                        )
+                    ):
                         continue
-                    fields[ann_name] = ModelField.infer(name=ann_name,
-                        value=value, annotation=ann_type, class_validators=
-                        vg.get_validators(ann_name), config=config)
+                    fields[ann_name] = ModelField.infer(
+                        name=ann_name,
+                        value=value,
+                        annotation=ann_type,
+                        class_validators=vg.get_validators(ann_name),
+                        config=config,
+                    )
                 elif ann_name not in namespace and config.underscore_attrs_are_private:
                     private_attributes[ann_name] = PrivateAttr()
+
             untouched_types = UNTOUCHED_TYPES + config.keep_untouched
             for var_name, value in namespace.items():
-                can_be_changed = (var_name not in class_vars and not
-                    is_untouched(value))
+                can_be_changed = var_name not in class_vars and not is_untouched(value)
                 if isinstance(value, ModelPrivateAttr):
                     if not is_valid_private_name(var_name):
                         raise NameError(
-                            f'Private attributes "{var_name}" must not be a valid field name; Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"'
-                            )
+                            f'Private attributes "{var_name}" must not be a valid field name; '
+                            f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"'
+                        )
                     private_attributes[var_name] = value
-                elif config.underscore_attrs_are_private and is_valid_private_name(
-                    var_name) and can_be_changed:
+                elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed:
                     private_attributes[var_name] = PrivateAttr(default=value)
-                elif is_valid_field(var_name
-                    ) and var_name not in annotations and can_be_changed:
+                elif is_valid_field(var_name) and var_name not in annotations and can_be_changed:
                     validate_field_name(bases, var_name)
-                    inferred = ModelField.infer(name=var_name, value=value,
+                    inferred = ModelField.infer(
+                        name=var_name,
+                        value=value,
                         annotation=annotations.get(var_name, Undefined),
                         class_validators=vg.get_validators(var_name),
-                        config=config)
+                        config=config,
+                    )
                     if var_name in fields:
-                        if lenient_issubclass(inferred.type_, fields[
-                            var_name].type_):
+                        if lenient_issubclass(inferred.type_, fields[var_name].type_):
                             inferred.type_ = fields[var_name].type_
                         else:
                             raise TypeError(
-                                f'The type of {name}.{var_name} differs from the new default value; if you wish to change the type of this field, please use a type annotation'
-                                )
+                                f'The type of {name}.{var_name} differs from the new default value; '
+                                f'if you wish to change the type of this field, please use a type annotation'
+                            )
                     fields[var_name] = inferred
+
         _custom_root_type = ROOT_KEY in fields
         if _custom_root_type:
             validate_custom_root_type(fields)
         vg.check_for_unused()
         if config.json_encoders:
-            json_encoder = partial(custom_pydantic_encoder, config.
-                json_encoders)
+            json_encoder = partial(custom_pydantic_encoder, config.json_encoders)
         else:
             json_encoder = pydantic_encoder
         pre_rv_new, post_rv_new = extract_root_validators(namespace)
+
         if hash_func is None:
             hash_func = generate_hash_function(config.frozen)
-        exclude_from_namespace = fields | private_attributes.keys() | {
-            '__slots__'}
-        new_namespace = {'__config__': config, '__fields__': fields,
-            '__exclude_fields__': {name: field.field_info.exclude for name,
-            field in fields.items() if field.field_info.exclude is not None
-            } or None, '__include_fields__': {name: field.field_info.
-            include for name, field in fields.items() if field.field_info.
-            include is not None} or None, '__validators__': vg.validators,
-            '__pre_root_validators__': unique_list(pre_root_validators +
-            pre_rv_new, name_factory=lambda v: v.__name__),
-            '__post_root_validators__': unique_list(post_root_validators +
-            post_rv_new, name_factory=lambda skip_on_failure_and_v:
-            skip_on_failure_and_v[1].__name__), '__schema_cache__': {},
+
+        exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'}
+        new_namespace = {
+            '__config__': config,
+            '__fields__': fields,
+            '__exclude_fields__': {
+                name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None
+            }
+            or None,
+            '__include_fields__': {
+                name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None
+            }
+            or None,
+            '__validators__': vg.validators,
+            '__pre_root_validators__': unique_list(
+                pre_root_validators + pre_rv_new,
+                name_factory=lambda v: v.__name__,
+            ),
+            '__post_root_validators__': unique_list(
+                post_root_validators + post_rv_new,
+                name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__,
+            ),
+            '__schema_cache__': {},
             '__json_encoder__': staticmethod(json_encoder),
             '__custom_root_type__': _custom_root_type,
-            '__private_attributes__': {**base_private_attributes, **
-            private_attributes}, '__slots__': slots | private_attributes.
-            keys(), '__hash__': hash_func, '__class_vars__': class_vars, **
-            {n: v for n, v in namespace.items() if n not in
-            exclude_from_namespace}}
+            '__private_attributes__': {**base_private_attributes, **private_attributes},
+            '__slots__': slots | private_attributes.keys(),
+            '__hash__': hash_func,
+            '__class_vars__': class_vars,
+            **{n: v for n, v in namespace.items() if n not in exclude_from_namespace},
+        }
+
         cls = super().__new__(mcs, name, bases, new_namespace, **kwargs)
-        cls.__signature__ = ClassAttribute('__signature__',
-            generate_model_signature(cls.__init__, fields, config))
+        # set __signature__ attr only for model class, but not for its instances
+        cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config))
         if resolve_forward_refs:
             cls.__try_update_forward_refs__()
+
+        # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487
+        # for attributes not in `new_namespace` (e.g. private attributes)
         for name, obj in namespace.items():
             if name not in new_namespace:
                 set_name = getattr(obj, '__set_name__', None)
                 if callable(set_name):
                     set_name(cls, name)
+
         return cls

-    def __instancecheck__(self, instance: Any) ->bool:
+    def __instancecheck__(self, instance: Any) -> bool:
         """
         Avoid calling ABC _abc_subclasscheck unless we're pretty sure.

         See #3829 and python/cpython#92810
         """
-        return hasattr(instance, '__fields__') and super().__instancecheck__(
-            instance)
+        return hasattr(instance, '__fields__') and super().__instancecheck__(instance)


 object_setattr = object.__setattr__
@@ -199,6 +309,7 @@ object_setattr = object.__setattr__

 class BaseModel(Representation, metaclass=ModelMetaclass):
     if TYPE_CHECKING:
+        # populated by the metaclass, defined here to help IDEs only
         __fields__: ClassVar[Dict[str, ModelField]] = {}
         __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
         __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
@@ -213,18 +324,19 @@ class BaseModel(Representation, metaclass=ModelMetaclass):
         __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]]
         __class_vars__: ClassVar[SetStr]
         __fields_set__: ClassVar[SetStr] = set()
+
     Config = BaseConfig
-    __slots__ = '__dict__', '__fields_set__'
-    __doc__ = ''
+    __slots__ = ('__dict__', '__fields_set__')
+    __doc__ = ''  # Null out the Representation docstring

-    def __init__(__pydantic_self__, **data: Any) ->None:
+    def __init__(__pydantic_self__, **data: Any) -> None:
         """
         Create a new model by parsing and validating input data from keyword arguments.

         Raises ValidationError if the input data cannot be parsed to form a valid model.
         """
-        values, fields_set, validation_error = validate_model(__pydantic_self__
-            .__class__, data)
+        # Uses something other than `self` the first arg to allow "self" as a settable attribute
+        values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data)
         if validation_error:
             raise validation_error
         try:
@@ -232,48 +344,47 @@ class BaseModel(Representation, metaclass=ModelMetaclass):
         except TypeError as e:
             raise TypeError(
                 'Model values must be a dict; you may not have returned a dictionary from a root validator'
-                ) from e
+            ) from e
         object_setattr(__pydantic_self__, '__fields_set__', fields_set)
         __pydantic_self__._init_private_attributes()

     @no_type_check
-    def __setattr__(self, name, value):
+    def __setattr__(self, name, value):  # noqa: C901 (ignore complexity)
         if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES:
             return object_setattr(self, name, value)
-        if (self.__config__.extra is not Extra.allow and name not in self.
-            __fields__):
-            raise ValueError(
-                f'"{self.__class__.__name__}" object has no field "{name}"')
+
+        if self.__config__.extra is not Extra.allow and name not in self.__fields__:
+            raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"')
         elif not self.__config__.allow_mutation or self.__config__.frozen:
-            raise TypeError(
-                f'"{self.__class__.__name__}" is immutable and does not support item assignment'
-                )
+            raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment')
         elif name in self.__fields__ and self.__fields__[name].final:
             raise TypeError(
                 f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment'
-                )
+            )
         elif self.__config__.validate_assignment:
             new_values = {**self.__dict__, name: value}
+
             for validator in self.__pre_root_validators__:
                 try:
                     new_values = validator(self.__class__, new_values)
                 except (ValueError, TypeError, AssertionError) as exc:
-                    raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)],
-                        self.__class__)
+                    raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__)
+
             known_field = self.__fields__.get(name, None)
             if known_field:
+                # We want to
+                # - make sure validators are called without the current value for this field inside `values`
+                # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts)
+                # - keep the order of the fields
                 if not known_field.field_info.allow_mutation:
-                    raise TypeError(
-                        f'"{known_field.name}" has allow_mutation set to False and cannot be assigned'
-                        )
-                dict_without_original_value = {k: v for k, v in self.
-                    __dict__.items() if k != name}
-                value, error_ = known_field.validate(value,
-                    dict_without_original_value, loc=name, cls=self.__class__)
+                    raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned')
+                dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name}
+                value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__)
                 if error_:
                     raise ValidationError([error_], self.__class__)
                 else:
                     new_values[name] = value
+
             errors = []
             for skip_on_failure, validator in self.__post_root_validators__:
                 if skip_on_failure and errors:
@@ -284,65 +395,243 @@ class BaseModel(Representation, metaclass=ModelMetaclass):
                     errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
             if errors:
                 raise ValidationError(errors, self.__class__)
+
+            # update the whole __dict__ as other values than just `value`
+            # may be changed (e.g. with `root_validator`)
             object_setattr(self, '__dict__', new_values)
         else:
             self.__dict__[name] = value
+
         self.__fields_set__.add(name)

-    def __getstate__(self) ->'DictAny':
-        private_attrs = ((k, getattr(self, k, Undefined)) for k in self.
-            __private_attributes__)
-        return {'__dict__': self.__dict__, '__fields_set__': self.
-            __fields_set__, '__private_attribute_values__': {k: v for k, v in
-            private_attrs if v is not Undefined}}
+    def __getstate__(self) -> 'DictAny':
+        private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__)
+        return {
+            '__dict__': self.__dict__,
+            '__fields_set__': self.__fields_set__,
+            '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined},
+        }

-    def __setstate__(self, state: 'DictAny') ->None:
+    def __setstate__(self, state: 'DictAny') -> None:
         object_setattr(self, '__dict__', state['__dict__'])
         object_setattr(self, '__fields_set__', state['__fields_set__'])
-        for name, value in state.get('__private_attribute_values__', {}).items(
-            ):
+        for name, value in state.get('__private_attribute_values__', {}).items():
             object_setattr(self, name, value)

-    def dict(self, *, include: Optional[Union['AbstractSetIntStr',
-        'MappingIntStrAny']]=None, exclude: Optional[Union[
-        'AbstractSetIntStr', 'MappingIntStrAny']]=None, by_alias: bool=
-        False, skip_defaults: Optional[bool]=None, exclude_unset: bool=
-        False, exclude_defaults: bool=False, exclude_none: bool=False
-        ) ->'DictStrAny':
+    def _init_private_attributes(self) -> None:
+        for name, private_attr in self.__private_attributes__.items():
+            default = private_attr.get_default()
+            if default is not Undefined:
+                object_setattr(self, name, default)
+
+    def dict(
+        self,
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        by_alias: bool = False,
+        skip_defaults: Optional[bool] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+    ) -> 'DictStrAny':
         """
         Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

         """
-        pass
-
-    def json(self, *, include: Optional[Union['AbstractSetIntStr',
-        'MappingIntStrAny']]=None, exclude: Optional[Union[
-        'AbstractSetIntStr', 'MappingIntStrAny']]=None, by_alias: bool=
-        False, skip_defaults: Optional[bool]=None, exclude_unset: bool=
-        False, exclude_defaults: bool=False, exclude_none: bool=False,
-        encoder: Optional[Callable[[Any], Any]]=None, models_as_dict: bool=
-        True, **dumps_kwargs: Any) ->str:
+        if skip_defaults is not None:
+            warnings.warn(
+                f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
+                DeprecationWarning,
+            )
+            exclude_unset = skip_defaults
+
+        return dict(
+            self._iter(
+                to_dict=True,
+                by_alias=by_alias,
+                include=include,
+                exclude=exclude,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        )
+
+    def json(
+        self,
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        by_alias: bool = False,
+        skip_defaults: Optional[bool] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+        encoder: Optional[Callable[[Any], Any]] = None,
+        models_as_dict: bool = True,
+        **dumps_kwargs: Any,
+    ) -> str:
         """
         Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.

         `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
         """
-        pass
+        if skip_defaults is not None:
+            warnings.warn(
+                f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
+                DeprecationWarning,
+            )
+            exclude_unset = skip_defaults
+        encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__)
+
+        # We don't directly call `self.dict()`, which does exactly this with `to_dict=True`
+        # because we want to be able to keep raw `BaseModel` instances and not as `dict`.
+        # This allows users to write custom JSON encoders for given `BaseModel` classes.
+        data = dict(
+            self._iter(
+                to_dict=models_as_dict,
+                by_alias=by_alias,
+                include=include,
+                exclude=exclude,
+                exclude_unset=exclude_unset,
+                exclude_defaults=exclude_defaults,
+                exclude_none=exclude_none,
+            )
+        )
+        if self.__custom_root_type__:
+            data = data[ROOT_KEY]
+        return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs)

     @classmethod
-    def construct(cls: Type['Model'], _fields_set: Optional['SetStr']=None,
-        **values: Any) ->'Model':
+    def _enforce_dict_if_root(cls, obj: Any) -> Any:
+        if cls.__custom_root_type__ and (
+            not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY})
+            and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY})
+            or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES
+        ):
+            return {ROOT_KEY: obj}
+        else:
+            return obj
+
+    @classmethod
+    def parse_obj(cls: Type['Model'], obj: Any) -> 'Model':
+        obj = cls._enforce_dict_if_root(obj)
+        if not isinstance(obj, dict):
+            try:
+                obj = dict(obj)
+            except (TypeError, ValueError) as e:
+                exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}')
+                raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e
+        return cls(**obj)
+
+    @classmethod
+    def parse_raw(
+        cls: Type['Model'],
+        b: StrBytes,
+        *,
+        content_type: str = None,
+        encoding: str = 'utf8',
+        proto: Protocol = None,
+        allow_pickle: bool = False,
+    ) -> 'Model':
+        try:
+            obj = load_str_bytes(
+                b,
+                proto=proto,
+                content_type=content_type,
+                encoding=encoding,
+                allow_pickle=allow_pickle,
+                json_loads=cls.__config__.json_loads,
+            )
+        except (ValueError, TypeError, UnicodeDecodeError) as e:
+            raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
+        return cls.parse_obj(obj)
+
+    @classmethod
+    def parse_file(
+        cls: Type['Model'],
+        path: Union[str, Path],
+        *,
+        content_type: str = None,
+        encoding: str = 'utf8',
+        proto: Protocol = None,
+        allow_pickle: bool = False,
+    ) -> 'Model':
+        obj = load_file(
+            path,
+            proto=proto,
+            content_type=content_type,
+            encoding=encoding,
+            allow_pickle=allow_pickle,
+            json_loads=cls.__config__.json_loads,
+        )
+        return cls.parse_obj(obj)
+
+    @classmethod
+    def from_orm(cls: Type['Model'], obj: Any) -> 'Model':
+        if not cls.__config__.orm_mode:
+            raise ConfigError('You must have the config attribute orm_mode=True to use from_orm')
+        obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
+        m = cls.__new__(cls)
+        values, fields_set, validation_error = validate_model(cls, obj)
+        if validation_error:
+            raise validation_error
+        object_setattr(m, '__dict__', values)
+        object_setattr(m, '__fields_set__', fields_set)
+        m._init_private_attributes()
+        return m
+
+    @classmethod
+    def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model':
         """
         Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.
         Default values are respected, but no other validation is performed.
         Behaves as if `Config.extra = 'allow'` was set since it adds all passed values
         """
-        pass
+        m = cls.__new__(cls)
+        fields_values: Dict[str, Any] = {}
+        for name, field in cls.__fields__.items():
+            if field.alt_alias and field.alias in values:
+                fields_values[name] = values[field.alias]
+            elif name in values:
+                fields_values[name] = values[name]
+            elif not field.required:
+                fields_values[name] = field.get_default()
+        fields_values.update(values)
+        object_setattr(m, '__dict__', fields_values)
+        if _fields_set is None:
+            _fields_set = set(values.keys())
+        object_setattr(m, '__fields_set__', _fields_set)
+        m._init_private_attributes()
+        return m

-    def copy(self: 'Model', *, include: Optional[Union['AbstractSetIntStr',
-        'MappingIntStrAny']]=None, exclude: Optional[Union[
-        'AbstractSetIntStr', 'MappingIntStrAny']]=None, update: Optional[
-        'DictStrAny']=None, deep: bool=False) ->'Model':
+    def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model':
+        if deep:
+            # chances of having empty dict here are quite low for using smart_deepcopy
+            values = deepcopy(values)
+
+        cls = self.__class__
+        m = cls.__new__(cls)
+        object_setattr(m, '__dict__', values)
+        object_setattr(m, '__fields_set__', fields_set)
+        for name in self.__private_attributes__:
+            value = getattr(self, name, Undefined)
+            if value is not Undefined:
+                if deep:
+                    value = deepcopy(value)
+                object_setattr(m, name, value)
+
+        return m
+
+    def copy(
+        self: 'Model',
+        *,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        update: Optional['DictStrAny'] = None,
+        deep: bool = False,
+    ) -> 'Model':
         """
         Duplicate a model, optionally choose which fields to include, exclude and change.

@@ -353,54 +642,322 @@ class BaseModel(Representation, metaclass=ModelMetaclass):
         :param deep: set to `True` to make a deep copy of the model
         :return: new model instance
         """
-        pass
+
+        values = dict(
+            self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False),
+            **(update or {}),
+        )
+
+        # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg
+        if update:
+            fields_set = self.__fields_set__ | update.keys()
+        else:
+            fields_set = set(self.__fields_set__)
+
+        return self._copy_and_set_values(values, fields_set, deep=deep)
+
+    @classmethod
+    def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny':
+        cached = cls.__schema_cache__.get((by_alias, ref_template))
+        if cached is not None:
+            return cached
+        s = model_schema(cls, by_alias=by_alias, ref_template=ref_template)
+        cls.__schema_cache__[(by_alias, ref_template)] = s
+        return s

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def schema_json(
+        cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any
+    ) -> str:
+        from pydantic.v1.json import pydantic_encoder
+
+        return cls.__config__.json_dumps(
+            cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs
+        )
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

     @classmethod
-    def __try_update_forward_refs__(cls, **localns: Any) ->None:
+    def validate(cls: Type['Model'], value: Any) -> 'Model':
+        if isinstance(value, cls):
+            copy_on_model_validation = cls.__config__.copy_on_model_validation
+            # whether to deep or shallow copy the model on validation, None means do not copy
+            deep_copy: Optional[bool] = None
+            if copy_on_model_validation not in {'deep', 'shallow', 'none'}:
+                # Warn about deprecated behavior
+                warnings.warn(
+                    "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning
+                )
+                if copy_on_model_validation:
+                    deep_copy = False
+
+            if copy_on_model_validation == 'shallow':
+                # shallow copy
+                deep_copy = False
+            elif copy_on_model_validation == 'deep':
+                # deep copy
+                deep_copy = True
+
+            if deep_copy is None:
+                return value
+            else:
+                return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy)
+
+        value = cls._enforce_dict_if_root(value)
+
+        if isinstance(value, dict):
+            return cls(**value)
+        elif cls.__config__.orm_mode:
+            return cls.from_orm(value)
+        else:
+            try:
+                value_as_dict = dict(value)
+            except (TypeError, ValueError) as e:
+                raise DictError() from e
+            return cls(**value_as_dict)
+
+    @classmethod
+    def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict:
+        if isinstance(obj, GetterDict):
+            return obj
+        return cls.__config__.getter_dict(obj)
+
+    @classmethod
+    @no_type_check
+    def _get_value(
+        cls,
+        v: Any,
+        to_dict: bool,
+        by_alias: bool,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
+        exclude_unset: bool,
+        exclude_defaults: bool,
+        exclude_none: bool,
+    ) -> Any:
+        if isinstance(v, BaseModel):
+            if to_dict:
+                v_dict = v.dict(
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=include,
+                    exclude=exclude,
+                    exclude_none=exclude_none,
+                )
+                if ROOT_KEY in v_dict:
+                    return v_dict[ROOT_KEY]
+                return v_dict
+            else:
+                return v.copy(include=include, exclude=exclude)
+
+        value_exclude = ValueItems(v, exclude) if exclude else None
+        value_include = ValueItems(v, include) if include else None
+
+        if isinstance(v, dict):
+            return {
+                k_: cls._get_value(
+                    v_,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=value_include and value_include.for_element(k_),
+                    exclude=value_exclude and value_exclude.for_element(k_),
+                    exclude_none=exclude_none,
+                )
+                for k_, v_ in v.items()
+                if (not value_exclude or not value_exclude.is_excluded(k_))
+                and (not value_include or value_include.is_included(k_))
+            }
+
+        elif sequence_like(v):
+            seq_args = (
+                cls._get_value(
+                    v_,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    include=value_include and value_include.for_element(i),
+                    exclude=value_exclude and value_exclude.for_element(i),
+                    exclude_none=exclude_none,
+                )
+                for i, v_ in enumerate(v)
+                if (not value_exclude or not value_exclude.is_excluded(i))
+                and (not value_include or value_include.is_included(i))
+            )
+
+            return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args)
+
+        elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False):
+            return v.value
+
+        else:
+            return v
+
+    @classmethod
+    def __try_update_forward_refs__(cls, **localns: Any) -> None:
         """
         Same as update_forward_refs but will not raise exception
         when forward references are not defined.
         """
-        update_model_forward_refs(cls, cls.__fields__.values(), cls.
-            __config__.json_encoders, localns, (NameError,))
+        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,))

     @classmethod
-    def update_forward_refs(cls, **localns: Any) ->None:
+    def update_forward_refs(cls, **localns: Any) -> None:
         """
         Try to update ForwardRefs on fields based on this Model, globalns and localns.
         """
-        pass
+        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns)

-    def __iter__(self) ->'TupleGenerator':
+    def __iter__(self) -> 'TupleGenerator':
         """
         so `dict(model)` works
         """
         yield from self.__dict__.items()

-    def __eq__(self, other: Any) ->bool:
+    def _iter(
+        self,
+        to_dict: bool = False,
+        by_alias: bool = False,
+        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
+        exclude_unset: bool = False,
+        exclude_defaults: bool = False,
+        exclude_none: bool = False,
+    ) -> 'TupleGenerator':
+        # Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
+        # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
+        if exclude is not None or self.__exclude_fields__ is not None:
+            exclude = ValueItems.merge(self.__exclude_fields__, exclude)
+
+        if include is not None or self.__include_fields__ is not None:
+            include = ValueItems.merge(self.__include_fields__, include, intersect=True)
+
+        allowed_keys = self._calculate_keys(
+            include=include, exclude=exclude, exclude_unset=exclude_unset  # type: ignore
+        )
+        if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
+            # huge boost for plain _iter()
+            yield from self.__dict__.items()
+            return
+
+        value_exclude = ValueItems(self, exclude) if exclude is not None else None
+        value_include = ValueItems(self, include) if include is not None else None
+
+        for field_key, v in self.__dict__.items():
+            if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
+                continue
+
+            if exclude_defaults:
+                model_field = self.__fields__.get(field_key)
+                if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v:
+                    continue
+
+            if by_alias and field_key in self.__fields__:
+                dict_key = self.__fields__[field_key].alias
+            else:
+                dict_key = field_key
+
+            if to_dict or value_include or value_exclude:
+                v = self._get_value(
+                    v,
+                    to_dict=to_dict,
+                    by_alias=by_alias,
+                    include=value_include and value_include.for_element(field_key),
+                    exclude=value_exclude and value_exclude.for_element(field_key),
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+            yield dict_key, v
+
+    def _calculate_keys(
+        self,
+        include: Optional['MappingIntStrAny'],
+        exclude: Optional['MappingIntStrAny'],
+        exclude_unset: bool,
+        update: Optional['DictStrAny'] = None,
+    ) -> Optional[AbstractSet[str]]:
+        if include is None and exclude is None and exclude_unset is False:
+            return None
+
+        keys: AbstractSet[str]
+        if exclude_unset:
+            keys = self.__fields_set__.copy()
+        else:
+            keys = self.__dict__.keys()
+
+        if include is not None:
+            keys &= include.keys()
+
+        if update:
+            keys -= update.keys()
+
+        if exclude:
+            keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)}
+
+        return keys
+
+    def __eq__(self, other: Any) -> bool:
         if isinstance(other, BaseModel):
             return self.dict() == other.dict()
         else:
             return self.dict() == other

-    def __repr_args__(self) ->'ReprArgs':
-        return [(k, v) for k, v in self.__dict__.items() if k not in
-            DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.
-            __fields__[k].field_info.repr)]
+    def __repr_args__(self) -> 'ReprArgs':
+        return [
+            (k, v)
+            for k, v in self.__dict__.items()
+            if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr)
+        ]


 _is_base_model_class_defined = True


-def create_model(__model_name: str, *, __config__: Optional[Type[BaseConfig
-    ]]=None, __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]
-    ]=None, __module__: str=__name__, __validators__: Dict[str,
-    'AnyClassMethod']=None, __cls_kwargs__: Dict[str, Any]=None, __slots__:
-    Optional[Tuple[str, ...]]=None, **field_definitions: Any) ->Type['Model']:
+@overload
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: None = None,
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    **field_definitions: Any,
+) -> Type['BaseModel']:
+    ...
+
+
+@overload
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: Union[Type['Model'], Tuple[Type['Model'], ...]],
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    **field_definitions: Any,
+) -> Type['Model']:
+    ...
+
+
+def create_model(
+    __model_name: str,
+    *,
+    __config__: Optional[Type[BaseConfig]] = None,
+    __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None,
+    __module__: str = __name__,
+    __validators__: Dict[str, 'AnyClassMethod'] = None,
+    __cls_kwargs__: Dict[str, Any] = None,
+    __slots__: Optional[Tuple[str, ...]] = None,
+    **field_definitions: Any,
+) -> Type['Model']:
     """
     Dynamically create a model.
     :param __model_name: name of the created model
@@ -417,16 +974,134 @@ def create_model(__model_name: str, *, __config__: Optional[Type[BaseConfig
         `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or
         `foo=(str, FieldInfo(title='Foo'))`
     """
-    pass
+    if __slots__ is not None:
+        # __slots__ will be ignored from here on
+        warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning)
+
+    if __base__ is not None:
+        if __config__ is not None:
+            raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together')
+        if not isinstance(__base__, tuple):
+            __base__ = (__base__,)
+    else:
+        __base__ = (cast(Type['Model'], BaseModel),)
+
+    __cls_kwargs__ = __cls_kwargs__ or {}
+
+    fields = {}
+    annotations = {}
+
+    for f_name, f_def in field_definitions.items():
+        if not is_valid_field(f_name):
+            warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning)
+        if isinstance(f_def, tuple):
+            try:
+                f_annotation, f_value = f_def
+            except ValueError as e:
+                raise ConfigError(
+                    'field definitions should either be a tuple of (<type>, <default>) or just a '
+                    'default value, unfortunately this means tuples as '
+                    'default values are not allowed'
+                ) from e
+        else:
+            f_annotation, f_value = None, f_def
+
+        if f_annotation:
+            annotations[f_name] = f_annotation
+        fields[f_name] = f_value
+
+    namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__}
+    if __validators__:
+        namespace.update(__validators__)
+    namespace.update(fields)
+    if __config__:
+        namespace['Config'] = inherit_config(__config__, BaseConfig)
+    resolved_bases = resolve_bases(__base__)
+    meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__)
+    if resolved_bases is not __base__:
+        ns['__orig_bases__'] = __base__
+    namespace.update(ns)
+    return meta(__model_name, resolved_bases, namespace, **kwds)


 _missing = object()


-def validate_model(model: Type[BaseModel], input_data: 'DictStrAny', cls:
-    'ModelOrDc'=None) ->Tuple['DictStrAny', 'SetStr', Optional[ValidationError]
-    ]:
+def validate_model(  # noqa: C901 (ignore complexity)
+    model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None
+) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]:
     """
     validate data against a model.
     """
-    pass
+    values = {}
+    errors = []
+    # input_data names, possibly alias
+    names_used = set()
+    # field names, never aliases
+    fields_set = set()
+    config = model.__config__
+    check_extra = config.extra is not Extra.ignore
+    cls_ = cls or model
+
+    for validator in model.__pre_root_validators__:
+        try:
+            input_data = validator(cls_, input_data)
+        except (ValueError, TypeError, AssertionError) as exc:
+            return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_)
+
+    for name, field in model.__fields__.items():
+        value = input_data.get(field.alias, _missing)
+        using_name = False
+        if value is _missing and config.allow_population_by_field_name and field.alt_alias:
+            value = input_data.get(field.name, _missing)
+            using_name = True
+
+        if value is _missing:
+            if field.required:
+                errors.append(ErrorWrapper(MissingError(), loc=field.alias))
+                continue
+
+            value = field.get_default()
+
+            if not config.validate_all and not field.validate_always:
+                values[name] = value
+                continue
+        else:
+            fields_set.add(name)
+            if check_extra:
+                names_used.add(field.name if using_name else field.alias)
+
+        v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_)
+        if isinstance(errors_, ErrorWrapper):
+            errors.append(errors_)
+        elif isinstance(errors_, list):
+            errors.extend(errors_)
+        else:
+            values[name] = v_
+
+    if check_extra:
+        if isinstance(input_data, GetterDict):
+            extra = input_data.extra_keys() - names_used
+        else:
+            extra = input_data.keys() - names_used
+        if extra:
+            fields_set |= extra
+            if config.extra is Extra.allow:
+                for f in extra:
+                    values[f] = input_data[f]
+            else:
+                for f in sorted(extra):
+                    errors.append(ErrorWrapper(ExtraError(), loc=f))
+
+    for skip_on_failure, validator in model.__post_root_validators__:
+        if skip_on_failure and errors:
+            continue
+        try:
+            values = validator(cls_, values)
+        except (ValueError, TypeError, AssertionError) as exc:
+            errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
+
+    if errors:
+        return values, fields_set, ValidationError(errors, cls_)
+    else:
+        return values, fields_set, None
diff --git a/pydantic/v1/mypy.py b/pydantic/v1/mypy.py
index 69913f634..20fc039d5 100644
--- a/pydantic/v1/mypy.py
+++ b/pydantic/v1/mypy.py
@@ -1,78 +1,174 @@
 import sys
 from configparser import ConfigParser
 from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union
+
 from mypy.errorcodes import ErrorCode
-from mypy.nodes import ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR2, MDEF, Argument, AssignmentStmt, Block, CallExpr, ClassDef, Context, Decorator, EllipsisExpr, FuncBase, FuncDef, JsonDict, MemberExpr, NameExpr, PassStmt, PlaceholderNode, RefExpr, StrExpr, SymbolNode, SymbolTableNode, TempNode, TypeInfo, TypeVarExpr, Var
+from mypy.nodes import (
+    ARG_NAMED,
+    ARG_NAMED_OPT,
+    ARG_OPT,
+    ARG_POS,
+    ARG_STAR2,
+    MDEF,
+    Argument,
+    AssignmentStmt,
+    Block,
+    CallExpr,
+    ClassDef,
+    Context,
+    Decorator,
+    EllipsisExpr,
+    FuncBase,
+    FuncDef,
+    JsonDict,
+    MemberExpr,
+    NameExpr,
+    PassStmt,
+    PlaceholderNode,
+    RefExpr,
+    StrExpr,
+    SymbolNode,
+    SymbolTableNode,
+    TempNode,
+    TypeInfo,
+    TypeVarExpr,
+    Var,
+)
 from mypy.options import Options
-from mypy.plugin import CheckerPluginInterface, ClassDefContext, FunctionContext, MethodContext, Plugin, ReportConfigContext, SemanticAnalyzerPluginInterface
+from mypy.plugin import (
+    CheckerPluginInterface,
+    ClassDefContext,
+    FunctionContext,
+    MethodContext,
+    Plugin,
+    ReportConfigContext,
+    SemanticAnalyzerPluginInterface,
+)
 from mypy.plugins import dataclasses
-from mypy.semanal import set_callable_name
+from mypy.semanal import set_callable_name  # type: ignore
 from mypy.server.trigger import make_wildcard_trigger
-from mypy.types import AnyType, CallableType, Instance, NoneType, Overloaded, ProperType, Type, TypeOfAny, TypeType, TypeVarId, TypeVarType, UnionType, get_proper_type
+from mypy.types import (
+    AnyType,
+    CallableType,
+    Instance,
+    NoneType,
+    Overloaded,
+    ProperType,
+    Type,
+    TypeOfAny,
+    TypeType,
+    TypeVarId,
+    TypeVarType,
+    UnionType,
+    get_proper_type,
+)
 from mypy.typevars import fill_typevars
 from mypy.util import get_unique_redefinition_name
 from mypy.version import __version__ as mypy_version
+
 from pydantic.v1.utils import is_valid_field
+
 try:
-    from mypy.types import TypeVarDef
-except ImportError:
+    from mypy.types import TypeVarDef  # type: ignore[attr-defined]
+except ImportError:  # pragma: no cover
+    # Backward-compatible with TypeVarDef from Mypy 0.910.
     from mypy.types import TypeVarType as TypeVarDef
+
 CONFIGFILE_KEY = 'pydantic-mypy'
 METADATA_KEY = 'pydantic-mypy-metadata'
-_NAMESPACE = __name__[:-5]
+_NAMESPACE = __name__[:-5]  # 'pydantic' in 1.10.X, 'pydantic.v1' in v2.X
 BASEMODEL_FULLNAME = f'{_NAMESPACE}.main.BaseModel'
 BASESETTINGS_FULLNAME = f'{_NAMESPACE}.env_settings.BaseSettings'
 MODEL_METACLASS_FULLNAME = f'{_NAMESPACE}.main.ModelMetaclass'
 FIELD_FULLNAME = f'{_NAMESPACE}.fields.Field'
 DATACLASS_FULLNAME = f'{_NAMESPACE}.dataclasses.dataclass'
+
+
+def parse_mypy_version(version: str) -> Tuple[int, ...]:
+    return tuple(map(int, version.partition('+')[0].split('.')))
+
+
 MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version)
-BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930
-    ) else '__builtins__'
+BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__'
+
+# Increment version if plugin changes and mypy caches should be invalidated
 __version__ = 2


-def plugin(version: str) ->'TypingType[Plugin]':
+def plugin(version: str) -> 'TypingType[Plugin]':
     """
     `version` is the mypy version string

     We might want to use this to print a warning if the mypy version being used is
     newer, or especially older, than we expect (or need).
     """
-    pass
+    return PydanticPlugin


 class PydanticPlugin(Plugin):
-
-    def __init__(self, options: Options) ->None:
+    def __init__(self, options: Options) -> None:
         self.plugin_config = PydanticPluginConfig(options)
         self._plugin_data = self.plugin_config.to_data()
         super().__init__(options)

-    def get_class_decorator_hook(self, fullname: str) ->Optional[Callable[[
-        ClassDefContext], None]]:
+    def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]':
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and isinstance(sym.node, TypeInfo):  # pragma: no branch
+            # No branching may occur if the mypy cache has not been cleared
+            if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro):
+                return self._pydantic_model_class_maker_callback
+        return None
+
+    def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
+        if fullname == MODEL_METACLASS_FULLNAME:
+            return self._pydantic_model_metaclass_marker_callback
+        return None
+
+    def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]':
+        sym = self.lookup_fully_qualified(fullname)
+        if sym and sym.fullname == FIELD_FULLNAME:
+            return self._pydantic_field_callback
+        return None
+
+    def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]:
+        if fullname.endswith('.from_orm'):
+            return from_orm_callback
+        return None
+
+    def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
         """Mark pydantic.dataclasses as dataclass.

         Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
         """
-        pass
+        if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1):
+            return dataclasses.dataclass_class_maker_callback  # type: ignore[return-value]
+        return None

-    def report_config_data(self, ctx: ReportConfigContext) ->Dict[str, Any]:
+    def report_config_data(self, ctx: ReportConfigContext) -> Dict[str, Any]:
         """Return all plugin config data.

         Used by mypy to determine if cache needs to be discarded.
         """
-        pass
+        return self._plugin_data

-    def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext
-        ) ->None:
+    def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None:
+        transformer = PydanticModelTransformer(ctx, self.plugin_config)
+        transformer.transform()
+
+    def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None:
         """Reset dataclass_transform_spec attribute of ModelMetaclass.

         Let the plugin handle it. This behavior can be disabled
         if 'debug_dataclass_transform' is set to True', for testing purposes.
         """
-        pass
+        if self.plugin_config.debug_dataclass_transform:
+            return
+        info_metaclass = ctx.cls.info.declared_metaclass
+        assert info_metaclass, "callback not passed from 'get_metaclass_hook'"
+        if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
+            info_metaclass.type.dataclass_transform_spec = None  # type: ignore[attr-defined]

-    def _pydantic_field_callback(self, ctx: FunctionContext) ->'Type':
+    def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type':
         """
         Extract the type of the `default` argument from the Field function, and use it as the return type.

@@ -81,58 +177,128 @@ class PydanticPlugin(Plugin):
         * Output an error if both are specified.
         * Retrieve the type of the argument which is specified, and use it as return type for the function.
         """
-        pass
+        default_any_type = ctx.default_return_type
+
+        assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()'
+        assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()'
+        default_args = ctx.args[0]
+        default_factory_args = ctx.args[1]
+
+        if default_args and default_factory_args:
+            error_default_and_default_factory_specified(ctx.api, ctx.context)
+            return default_any_type
+
+        if default_args:
+            default_type = ctx.arg_types[0][0]
+            default_arg = default_args[0]
+
+            # Fallback to default Any type if the field is required
+            if not isinstance(default_arg, EllipsisExpr):
+                return default_type
+
+        elif default_factory_args:
+            default_factory_type = ctx.arg_types[1][0]
+
+            # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter
+            # Pydantic calls the default factory without any argument, so we retrieve the first item
+            if isinstance(default_factory_type, Overloaded):
+                if MYPY_VERSION_TUPLE > (0, 910):
+                    default_factory_type = default_factory_type.items[0]
+                else:
+                    # Mypy0.910 exposes the items of overloaded types in a function
+                    default_factory_type = default_factory_type.items()[0]  # type: ignore[operator]
+
+            if isinstance(default_factory_type, CallableType):
+                ret_type = default_factory_type.ret_type
+                # mypy doesn't think `ret_type` has `args`, you'd think mypy should know,
+                # add this check in case it varies by version
+                args = getattr(ret_type, 'args', None)
+                if args:
+                    if all(isinstance(arg, TypeVarType) for arg in args):
+                        # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any`
+                        ret_type.args = tuple(default_any_type for _ in args)  # type: ignore[attr-defined]
+                return ret_type
+
+        return default_any_type


 class PydanticPluginConfig:
-    __slots__ = ('init_forbid_extra', 'init_typed',
-        'warn_required_dynamic_aliases', 'warn_untyped_fields',
-        'debug_dataclass_transform')
+    __slots__ = (
+        'init_forbid_extra',
+        'init_typed',
+        'warn_required_dynamic_aliases',
+        'warn_untyped_fields',
+        'debug_dataclass_transform',
+    )
     init_forbid_extra: bool
     init_typed: bool
     warn_required_dynamic_aliases: bool
     warn_untyped_fields: bool
-    debug_dataclass_transform: bool
+    debug_dataclass_transform: bool  # undocumented

-    def __init__(self, options: Options) ->None:
-        if options.config_file is None:
+    def __init__(self, options: Options) -> None:
+        if options.config_file is None:  # pragma: no cover
             return
+
         toml_config = parse_toml(options.config_file)
         if toml_config is not None:
             config = toml_config.get('tool', {}).get('pydantic-mypy', {})
             for key in self.__slots__:
                 setting = config.get(key, False)
                 if not isinstance(setting, bool):
-                    raise ValueError(
-                        f'Configuration value must be a boolean for key: {key}'
-                        )
+                    raise ValueError(f'Configuration value must be a boolean for key: {key}')
                 setattr(self, key, setting)
         else:
             plugin_config = ConfigParser()
             plugin_config.read(options.config_file)
             for key in self.__slots__:
-                setting = plugin_config.getboolean(CONFIGFILE_KEY, key,
-                    fallback=False)
+                setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False)
                 setattr(self, key, setting)

+    def to_data(self) -> Dict[str, Any]:
+        return {key: getattr(self, key) for key in self.__slots__}

-def from_orm_callback(ctx: MethodContext) ->Type:
+
+def from_orm_callback(ctx: MethodContext) -> Type:
     """
     Raise an error if orm_mode is not enabled
     """
-    pass
+    model_type: Instance
+    ctx_type = ctx.type
+    if isinstance(ctx_type, TypeType):
+        ctx_type = ctx_type.item
+    if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance):
+        model_type = ctx_type.ret_type  # called on the class
+    elif isinstance(ctx_type, Instance):
+        model_type = ctx_type  # called on an instance (unusual, but still valid)
+    else:  # pragma: no cover
+        detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
+        error_unexpected_behavior(detail, ctx.api, ctx.context)
+        return ctx.default_return_type
+    pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
+    if pydantic_metadata is None:
+        return ctx.default_return_type
+    orm_mode = pydantic_metadata.get('config', {}).get('orm_mode')
+    if orm_mode is not True:
+        error_from_orm(get_name(model_type.type), ctx.api, ctx.context)
+    return ctx.default_return_type


 class PydanticModelTransformer:
-    tracked_config_fields: Set[str] = {'extra', 'allow_mutation', 'frozen',
-        'orm_mode', 'allow_population_by_field_name', 'alias_generator'}
-
-    def __init__(self, ctx: ClassDefContext, plugin_config:
-        PydanticPluginConfig) ->None:
+    tracked_config_fields: Set[str] = {
+        'extra',
+        'allow_mutation',
+        'frozen',
+        'orm_mode',
+        'allow_population_by_field_name',
+        'alias_generator',
+    }
+
+    def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None:
         self._ctx = ctx
         self.plugin_config = plugin_config

-    def transform(self) ->None:
+    def transform(self) -> None:
         """
         Configures the BaseModel subclass according to the plugin settings.

@@ -142,9 +308,22 @@ class PydanticModelTransformer:
         * freezes the class if allow_mutation = False or frozen = True
         * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
         """
-        pass
-
-    def adjust_validator_signatures(self) ->None:
+        ctx = self._ctx
+        info = ctx.cls.info
+
+        self.adjust_validator_signatures()
+        config = self.collect_config()
+        fields = self.collect_fields(config)
+        is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1])
+        self.add_initializer(fields, config, is_settings)
+        self.add_construct_method(fields)
+        self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True)
+        info.metadata[METADATA_KEY] = {
+            'fields': {field.name: field.serialize() for field in fields},
+            'config': config.set_values_dict(),
+        }
+
+    def adjust_validator_signatures(self) -> None:
         """When we decorate a function `f` with `pydantic.validator(...), mypy sees
         `f` as a regular method taking a `self` instance, even though pydantic
         internally wraps `f` with `classmethod` if necessary.
@@ -152,117 +331,390 @@ class PydanticModelTransformer:
         Teach mypy this by marking any function whose outermost decorator is a
         `validator()` call as a classmethod.
         """
-        pass
+        for name, sym in self._ctx.cls.info.names.items():
+            if isinstance(sym.node, Decorator):
+                first_dec = sym.node.original_decorators[0]
+                if (
+                    isinstance(first_dec, CallExpr)
+                    and isinstance(first_dec.callee, NameExpr)
+                    and first_dec.callee.fullname == f'{_NAMESPACE}.class_validators.validator'
+                ):
+                    sym.node.func.is_class = True

-    def collect_config(self) ->'ModelConfigData':
+    def collect_config(self) -> 'ModelConfigData':
         """
         Collects the values of the config attributes that are used by the plugin, accounting for parent classes.
         """
-        pass
-
-    def collect_fields(self, model_config: 'ModelConfigData') ->List[
-        'PydanticModelField']:
+        ctx = self._ctx
+        cls = ctx.cls
+        config = ModelConfigData()
+        for stmt in cls.defs.body:
+            if not isinstance(stmt, ClassDef):
+                continue
+            if stmt.name == 'Config':
+                for substmt in stmt.defs.body:
+                    if not isinstance(substmt, AssignmentStmt):
+                        continue
+                    config.update(self.get_config_update(substmt))
+                if (
+                    config.has_alias_generator
+                    and not config.allow_population_by_field_name
+                    and self.plugin_config.warn_required_dynamic_aliases
+                ):
+                    error_required_dynamic_aliases(ctx.api, stmt)
+        for info in cls.info.mro[1:]:  # 0 is the current class
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            # Each class depends on the set of fields in its ancestors
+            ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
+            for name, value in info.metadata[METADATA_KEY]['config'].items():
+                config.setdefault(name, value)
+        return config
+
+    def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']:
         """
         Collects the fields for the model, accounting for parent classes
         """
-        pass
-
-    def add_initializer(self, fields: List['PydanticModelField'], config:
-        'ModelConfigData', is_settings: bool) ->None:
+        # First, collect fields belonging to the current class.
+        ctx = self._ctx
+        cls = self._ctx.cls
+        fields = []  # type: List[PydanticModelField]
+        known_fields = set()  # type: Set[str]
+        for stmt in cls.defs.body:
+            if not isinstance(stmt, AssignmentStmt):  # `and stmt.new_syntax` to require annotation
+                continue
+
+            lhs = stmt.lvalues[0]
+            if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name):
+                continue
+
+            if not stmt.new_syntax and self.plugin_config.warn_untyped_fields:
+                error_untyped_fields(ctx.api, stmt)
+
+            # if lhs.name == '__config__':  # BaseConfig not well handled; I'm not sure why yet
+            #     continue
+
+            sym = cls.info.names.get(lhs.name)
+            if sym is None:  # pragma: no cover
+                # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
+                # This is the same logic used in the dataclasses plugin
+                continue
+
+            node = sym.node
+            if isinstance(node, PlaceholderNode):  # pragma: no cover
+                # See the PlaceholderNode docstring for more detail about how this can occur
+                # Basically, it is an edge case when dealing with complex import logic
+                # This is the same logic used in the dataclasses plugin
+                continue
+            if not isinstance(node, Var):  # pragma: no cover
+                # Don't know if this edge case still happens with the `is_valid_field` check above
+                # but better safe than sorry
+                continue
+
+            # x: ClassVar[int] is ignored by dataclasses.
+            if node.is_classvar:
+                continue
+
+            is_required = self.get_is_required(cls, stmt, lhs)
+            alias, has_dynamic_alias = self.get_alias_info(stmt)
+            if (
+                has_dynamic_alias
+                and not model_config.allow_population_by_field_name
+                and self.plugin_config.warn_required_dynamic_aliases
+            ):
+                error_required_dynamic_aliases(ctx.api, stmt)
+            fields.append(
+                PydanticModelField(
+                    name=lhs.name,
+                    is_required=is_required,
+                    alias=alias,
+                    has_dynamic_alias=has_dynamic_alias,
+                    line=stmt.line,
+                    column=stmt.column,
+                )
+            )
+            known_fields.add(lhs.name)
+        all_fields = fields.copy()
+        for info in cls.info.mro[1:]:  # 0 is the current class, -2 is BaseModel, -1 is object
+            if METADATA_KEY not in info.metadata:
+                continue
+
+            superclass_fields = []
+            # Each class depends on the set of fields in its ancestors
+            ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
+
+            for name, data in info.metadata[METADATA_KEY]['fields'].items():
+                if name not in known_fields:
+                    field = PydanticModelField.deserialize(info, data)
+                    known_fields.add(name)
+                    superclass_fields.append(field)
+                else:
+                    (field,) = (a for a in all_fields if a.name == name)
+                    all_fields.remove(field)
+                    superclass_fields.append(field)
+            all_fields = superclass_fields + all_fields
+        return all_fields
+
+    def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None:
         """
         Adds a fields-aware `__init__` method to the class.

         The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
         """
-        pass
+        ctx = self._ctx
+        typed = self.plugin_config.init_typed
+        use_alias = config.allow_population_by_field_name is not True
+        force_all_optional = is_settings or bool(
+            config.has_alias_generator and not config.allow_population_by_field_name
+        )
+        init_arguments = self.get_field_arguments(
+            fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias
+        )
+        if not self.should_init_forbid_extra(fields, config):
+            var = Var('kwargs')
+            init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
+
+        if '__init__' not in ctx.cls.info.names:
+            add_method(ctx, '__init__', init_arguments, NoneType())

-    def add_construct_method(self, fields: List['PydanticModelField']) ->None:
+    def add_construct_method(self, fields: List['PydanticModelField']) -> None:
         """
         Adds a fully typed `construct` classmethod to the class.

         Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
         and does not treat settings fields as optional.
         """
-        pass
+        ctx = self._ctx
+        set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')])
+        optional_set_str = UnionType([set_str, NoneType()])
+        fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
+        construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False)
+        construct_arguments = [fields_set_argument] + construct_arguments
+
+        obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object')
+        self_tvar_name = '_PydanticBaseModel'  # Make sure it does not conflict with other names in the class
+        tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name
+        if MYPY_VERSION_TUPLE >= (1, 4):
+            tvd = TypeVarType(
+                self_tvar_name,
+                tvar_fullname,
+                TypeVarId(-1),
+                [],
+                obj_type,
+                AnyType(TypeOfAny.from_omitted_generics),  # type: ignore[arg-type]
+            )
+            self_tvar_expr = TypeVarExpr(
+                self_tvar_name,
+                tvar_fullname,
+                [],
+                obj_type,
+                AnyType(TypeOfAny.from_omitted_generics),  # type: ignore[arg-type]
+            )
+        else:
+            tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type)
+            self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type)
+        ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr)
+
+        # Backward-compatible with TypeVarDef from Mypy 0.910.
+        if isinstance(tvd, TypeVarType):
+            self_type = tvd
+        else:
+            self_type = TypeVarType(tvd)

-    def set_frozen(self, fields: List['PydanticModelField'], frozen: bool
-        ) ->None:
+        add_method(
+            ctx,
+            'construct',
+            construct_arguments,
+            return_type=self_type,
+            self_type=self_type,
+            tvar_def=tvd,
+            is_classmethod=True,
+        )
+
+    def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None:
         """
         Marks all fields as properties so that attempts to set them trigger mypy errors.

         This is the same approach used by the attrs and dataclasses plugins.
         """
-        pass
-
-    def get_config_update(self, substmt: AssignmentStmt) ->Optional[
-        'ModelConfigData']:
+        ctx = self._ctx
+        info = ctx.cls.info
+        for field in fields:
+            sym_node = info.names.get(field.name)
+            if sym_node is not None:
+                var = sym_node.node
+                if isinstance(var, Var):
+                    var.is_property = frozen
+                elif isinstance(var, PlaceholderNode) and not ctx.api.final_iteration:
+                    # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
+                    ctx.api.defer()
+                else:  # pragma: no cover
+                    # I don't know whether it's possible to hit this branch, but I've added it for safety
+                    try:
+                        var_str = str(var)
+                    except TypeError:
+                        # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
+                        var_str = repr(var)
+                    detail = f'sym_node.node: {var_str} (of type {var.__class__})'
+                    error_unexpected_behavior(detail, ctx.api, ctx.cls)
+            else:
+                var = field.to_var(info, use_alias=False)
+                var.info = info
+                var.is_property = frozen
+                var._fullname = get_fullname(info) + '.' + get_name(var)
+                info.names[get_name(var)] = SymbolTableNode(MDEF, var)
+
+    def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']:
         """
         Determines the config update due to a single statement in the Config class definition.

         Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
         """
-        pass
+        lhs = substmt.lvalues[0]
+        if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields):
+            return None
+        if lhs.name == 'extra':
+            if isinstance(substmt.rvalue, StrExpr):
+                forbid_extra = substmt.rvalue.value == 'forbid'
+            elif isinstance(substmt.rvalue, MemberExpr):
+                forbid_extra = substmt.rvalue.name == 'forbid'
+            else:
+                error_invalid_config_value(lhs.name, self._ctx.api, substmt)
+                return None
+            return ModelConfigData(forbid_extra=forbid_extra)
+        if lhs.name == 'alias_generator':
+            has_alias_generator = True
+            if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None':
+                has_alias_generator = False
+            return ModelConfigData(has_alias_generator=has_alias_generator)
+        if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'):
+            return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'})
+        error_invalid_config_value(lhs.name, self._ctx.api, substmt)
+        return None

     @staticmethod
-    def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr
-        ) ->bool:
+    def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool:
         """
         Returns a boolean indicating whether the field defined in `stmt` is a required field.
         """
-        pass
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only, so only non-required if Optional
+            value_type = get_proper_type(cls.info[lhs.name].type)
+            return not PydanticModelTransformer.type_has_implicit_default(value_type)
+        if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
+            # The "default value" is a call to `Field`; at this point, the field is
+            # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory
+            # is specified.
+            for arg, name in zip(expr.args, expr.arg_names):
+                # If name is None, then this arg is the default because it is the only positional argument.
+                if name is None or name == 'default':
+                    return arg.__class__ is EllipsisExpr
+                if name == 'default_factory':
+                    return False
+            # In this case, default and default_factory are not specified, so we need to look at the annotation
+            value_type = get_proper_type(cls.info[lhs.name].type)
+            return not PydanticModelTransformer.type_has_implicit_default(value_type)
+        # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
+        return isinstance(expr, EllipsisExpr)

     @staticmethod
-    def type_has_implicit_default(type_: Optional[ProperType]) ->bool:
+    def type_has_implicit_default(type_: Optional[ProperType]) -> bool:
         """
         Returns True if the passed type will be given an implicit default value.

         In pydantic v1, this is the case for Optional types and Any (with default value None).
         """
-        pass
+        if isinstance(type_, AnyType):
+            # Annotated as Any
+            return True
+        if isinstance(type_, UnionType) and any(
+            isinstance(item, NoneType) or isinstance(item, AnyType) for item in type_.items
+        ):
+            # Annotated as Optional, or otherwise having NoneType or AnyType in the union
+            return True
+        return False

     @staticmethod
-    def get_alias_info(stmt: AssignmentStmt) ->Tuple[Optional[str], bool]:
+    def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]:
         """
         Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.

         `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
         If `has_dynamic_alias` is True, `alias` will be None.
         """
-        pass
-
-    def get_field_arguments(self, fields: List['PydanticModelField'], typed:
-        bool, force_all_optional: bool, use_alias: bool) ->List[Argument]:
+        expr = stmt.rvalue
+        if isinstance(expr, TempNode):
+            # TempNode means annotation-only
+            return None, False
+
+        if not (
+            isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
+        ):
+            # Assigned value is not a call to pydantic.fields.Field
+            return None, False
+
+        for i, arg_name in enumerate(expr.arg_names):
+            if arg_name != 'alias':
+                continue
+            arg = expr.args[i]
+            if isinstance(arg, StrExpr):
+                return arg.value, False
+            else:
+                return None, True
+        return None, False
+
+    def get_field_arguments(
+        self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool
+    ) -> List[Argument]:
         """
         Helper function used during the construction of the `__init__` and `construct` method signatures.

         Returns a list of mypy Argument instances for use in the generated signatures.
         """
-        pass
+        info = self._ctx.cls.info
+        arguments = [
+            field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias)
+            for field in fields
+            if not (use_alias and field.has_dynamic_alias)
+        ]
+        return arguments

-    def should_init_forbid_extra(self, fields: List['PydanticModelField'],
-        config: 'ModelConfigData') ->bool:
+    def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool:
         """
         Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature

         We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
         *unless* a required dynamic alias is present (since then we can't determine a valid signature).
         """
-        pass
+        if not config.allow_population_by_field_name:
+            if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
+                return False
+        if config.forbid_extra:
+            return True
+        return self.plugin_config.init_forbid_extra

     @staticmethod
-    def is_dynamic_alias_present(fields: List['PydanticModelField'],
-        has_alias_generator: bool) ->bool:
+    def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool:
         """
         Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
         determined during static analysis.
         """
-        pass
+        for field in fields:
+            if field.has_dynamic_alias:
+                return True
+        if has_alias_generator:
+            for field in fields:
+                if field.alias is None:
+                    return True
+        return False


 class PydanticModelField:
-
-    def __init__(self, name: str, is_required: bool, alias: Optional[str],
-        has_dynamic_alias: bool, line: int, column: int):
+    def __init__(
+        self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int
+    ):
         self.name = name
         self.is_required = is_required
         self.alias = alias
@@ -270,13 +722,42 @@ class PydanticModelField:
         self.line = line
         self.column = column

+    def to_var(self, info: TypeInfo, use_alias: bool) -> Var:
+        name = self.name
+        if use_alias and self.alias is not None:
+            name = self.alias
+        return Var(name, info[self.name].type)

-class ModelConfigData:
+    def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument:
+        if typed and info[self.name].type is not None:
+            type_annotation = info[self.name].type
+        else:
+            type_annotation = AnyType(TypeOfAny.explicit)
+        return Argument(
+            variable=self.to_var(info, use_alias),
+            type_annotation=type_annotation,
+            initializer=None,
+            kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED,
+        )
+
+    def serialize(self) -> JsonDict:
+        return self.__dict__
+
+    @classmethod
+    def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField':
+        return cls(**data)

-    def __init__(self, forbid_extra: Optional[bool]=None, allow_mutation:
-        Optional[bool]=None, frozen: Optional[bool]=None, orm_mode:
-        Optional[bool]=None, allow_population_by_field_name: Optional[bool]
-        =None, has_alias_generator: Optional[bool]=None):
+
+class ModelConfigData:
+    def __init__(
+        self,
+        forbid_extra: Optional[bool] = None,
+        allow_mutation: Optional[bool] = None,
+        frozen: Optional[bool] = None,
+        orm_mode: Optional[bool] = None,
+        allow_population_by_field_name: Optional[bool] = None,
+        has_alias_generator: Optional[bool] = None,
+    ):
         self.forbid_extra = forbid_extra
         self.allow_mutation = allow_mutation
         self.frozen = frozen
@@ -284,39 +765,181 @@ class ModelConfigData:
         self.allow_population_by_field_name = allow_population_by_field_name
         self.has_alias_generator = has_alias_generator

+    def set_values_dict(self) -> Dict[str, Any]:
+        return {k: v for k, v in self.__dict__.items() if v is not None}
+
+    def update(self, config: Optional['ModelConfigData']) -> None:
+        if config is None:
+            return
+        for k, v in config.set_values_dict().items():
+            setattr(self, k, v)
+
+    def setdefault(self, key: str, value: Any) -> None:
+        if getattr(self, key) is None:
+            setattr(self, key, value)
+

 ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic')
 ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic')
-ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed',
-    'Pydantic')
-ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior',
-    'Pydantic')
-ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed',
-    'Pydantic')
-ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults',
-    'Pydantic')
-
-
-def add_method(ctx: ClassDefContext, name: str, args: List[Argument],
-    return_type: Type, self_type: Optional[Type]=None, tvar_def: Optional[
-    TypeVarDef]=None, is_classmethod: bool=False, is_new: bool=False) ->None:
+ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic')
+ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic')
+ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic')
+ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic')
+
+
+def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None:
+    api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM)
+
+
+def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG)
+
+
+def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS)
+
+
+def error_unexpected_behavior(
+    detail: str, api: Union[CheckerPluginInterface, SemanticAnalyzerPluginInterface], context: Context
+) -> None:  # pragma: no cover
+    # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
+    link = 'https://github.com/pydantic/pydantic/issues/new/choose'
+    full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
+    full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
+    api.fail(full_message, context, code=ERROR_UNEXPECTED)
+
+
+def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
+    api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED)
+
+
+def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None:
+    api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS)
+
+
+def add_method(
+    ctx: ClassDefContext,
+    name: str,
+    args: List[Argument],
+    return_type: Type,
+    self_type: Optional[Type] = None,
+    tvar_def: Optional[TypeVarDef] = None,
+    is_classmethod: bool = False,
+    is_new: bool = False,
+    # is_staticmethod: bool = False,
+) -> None:
     """
     Adds a new method to a class.

     This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged
     """
-    pass
-
-
-def get_fullname(x: Union[FuncBase, SymbolNode]) ->str:
+    info = ctx.cls.info
+
+    # First remove any previously generated methods with the same name
+    # to avoid clashes and problems in the semantic analyzer.
+    if name in info.names:
+        sym = info.names[name]
+        if sym.plugin_generated and isinstance(sym.node, FuncDef):
+            ctx.cls.defs.body.remove(sym.node)  # pragma: no cover
+
+    self_type = self_type or fill_typevars(info)
+    if is_classmethod or is_new:
+        first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)]
+    # elif is_staticmethod:
+    #     first = []
+    else:
+        self_type = self_type or fill_typevars(info)
+        first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
+    args = first + args
+    arg_types, arg_names, arg_kinds = [], [], []
+    for arg in args:
+        assert arg.type_annotation, 'All arguments must be fully typed.'
+        arg_types.append(arg.type_annotation)
+        arg_names.append(get_name(arg.variable))
+        arg_kinds.append(arg.kind)
+
+    function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function')
+    signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
+    if tvar_def:
+        signature.variables = [tvar_def]
+
+    func = FuncDef(name, args, Block([PassStmt()]))
+    func.info = info
+    func.type = set_callable_name(signature, func)
+    func.is_class = is_classmethod
+    # func.is_static = is_staticmethod
+    func._fullname = get_fullname(info) + '.' + name
+    func.line = info.line
+
+    # NOTE: we would like the plugin generated node to dominate, but we still
+    # need to keep any existing definitions so they get semantically analyzed.
+    if name in info.names:
+        # Get a nice unique name instead.
+        r_name = get_unique_redefinition_name(name, info.names)
+        info.names[r_name] = info.names[name]
+
+    if is_classmethod:  # or is_staticmethod:
+        func.is_decorated = True
+        v = Var(name, func.type)
+        v.info = info
+        v._fullname = func._fullname
+        # if is_classmethod:
+        v.is_classmethod = True
+        dec = Decorator(func, [NameExpr('classmethod')], v)
+        # else:
+        #     v.is_staticmethod = True
+        #     dec = Decorator(func, [NameExpr('staticmethod')], v)
+
+        dec.line = info.line
+        sym = SymbolTableNode(MDEF, dec)
+    else:
+        sym = SymbolTableNode(MDEF, func)
+    sym.plugin_generated = True
+
+    info.names[name] = sym
+    info.defn.defs.body.append(func)
+
+
+def get_fullname(x: Union[FuncBase, SymbolNode]) -> str:
     """
     Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
     """
-    pass
+    fn = x.fullname
+    if callable(fn):  # pragma: no cover
+        return fn()
+    return fn


-def get_name(x: Union[FuncBase, SymbolNode]) ->str:
+def get_name(x: Union[FuncBase, SymbolNode]) -> str:
     """
     Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
     """
-    pass
+    fn = x.name
+    if callable(fn):  # pragma: no cover
+        return fn()
+    return fn
+
+
+def parse_toml(config_file: str) -> Optional[Dict[str, Any]]:
+    if not config_file.endswith('.toml'):
+        return None
+
+    read_mode = 'rb'
+    if sys.version_info >= (3, 11):
+        import tomllib as toml_
+    else:
+        try:
+            import tomli as toml_
+        except ImportError:
+            # older versions of mypy have toml as a dependency, not tomli
+            read_mode = 'r'
+            try:
+                import toml as toml_  # type: ignore[no-redef]
+            except ImportError:  # pragma: no cover
+                import warnings
+
+                warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
+                return None
+
+    with open(config_file, read_mode) as rf:
+        return toml_.load(rf)  # type: ignore[arg-type]
diff --git a/pydantic/v1/networks.py b/pydantic/v1/networks.py
index 8106fecb6..ba07b7486 100644
--- a/pydantic/v1/networks.py
+++ b/pydantic/v1/networks.py
@@ -1,19 +1,47 @@
 import re
-from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, _BaseAddress, _BaseNetwork
-from typing import TYPE_CHECKING, Any, Collection, Dict, Generator, List, Match, Optional, Pattern, Set, Tuple, Type, Union, cast, no_type_check
+from ipaddress import (
+    IPv4Address,
+    IPv4Interface,
+    IPv4Network,
+    IPv6Address,
+    IPv6Interface,
+    IPv6Network,
+    _BaseAddress,
+    _BaseNetwork,
+)
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Collection,
+    Dict,
+    Generator,
+    List,
+    Match,
+    Optional,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    Union,
+    cast,
+    no_type_check,
+)
+
 from pydantic.v1 import errors
 from pydantic.v1.utils import Representation, update_not_none
 from pydantic.v1.validators import constr_length_validator, str_validator
+
 if TYPE_CHECKING:
     import email_validator
     from typing_extensions import TypedDict
+
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.fields import ModelField
     from pydantic.v1.typing import AnyCallable
-    CallableGenerator = Generator[AnyCallable, None, None]

+    CallableGenerator = Generator[AnyCallable, None, None]

-    class Parts(TypedDict, total=(False)):
+    class Parts(TypedDict, total=False):
         scheme: str
         user: Optional[str]
         password: Optional[str]
@@ -25,71 +53,152 @@ if TYPE_CHECKING:
         query: Optional[str]
         fragment: Optional[str]

-
-    class HostParts(TypedDict, total=(False)):
+    class HostParts(TypedDict, total=False):
         host: str
         tld: Optional[str]
         host_type: Optional[str]
         port: Optional[str]
         rebuild: bool
+
 else:
     email_validator = None

-
     class Parts(dict):
         pass
-NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[
-    str, int]]]
-__all__ = ['AnyUrl', 'AnyHttpUrl', 'FileUrl', 'HttpUrl', 'stricturl',
-    'EmailStr', 'NameEmail', 'IPvAnyAddress', 'IPvAnyInterface',
-    'IPvAnyNetwork', 'PostgresDsn', 'CockroachDsn', 'AmqpDsn', 'RedisDsn',
-    'MongoDsn', 'KafkaDsn', 'validate_email']
+
+
+NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]]
+
+__all__ = [
+    'AnyUrl',
+    'AnyHttpUrl',
+    'FileUrl',
+    'HttpUrl',
+    'stricturl',
+    'EmailStr',
+    'NameEmail',
+    'IPvAnyAddress',
+    'IPvAnyInterface',
+    'IPvAnyNetwork',
+    'PostgresDsn',
+    'CockroachDsn',
+    'AmqpDsn',
+    'RedisDsn',
+    'MongoDsn',
+    'KafkaDsn',
+    'validate_email',
+]
+
 _url_regex_cache = None
 _multi_host_url_regex_cache = None
 _ascii_domain_regex_cache = None
 _int_domain_regex_cache = None
 _host_regex_cache = None
-_host_regex = (
-    '(?:(?P<ipv4>(?:\\d{1,3}\\.){3}\\d{1,3})(?=$|[/:#?])|(?P<ipv6>\\[[A-F0-9]*:[A-F0-9:]+\\])(?=$|[/:#?])|(?P<domain>[^\\s/:?#]+))?(?::(?P<port>\\d+))?'
-    )
-_scheme_regex = '(?:(?P<scheme>[a-z][a-z0-9+\\-.]+)://)?'
-_user_info_regex = '(?:(?P<user>[^\\s:/]*)(?::(?P<password>[^\\s/]*))?@)?'
-_path_regex = '(?P<path>/[^\\s?#]*)?'
-_query_regex = '(?:\\?(?P<query>[^\\s#]*))?'
-_fragment_regex = '(?:#(?P<fragment>[^\\s#]*))?'

-
-def multi_host_url_regex() ->Pattern[str]:
+_host_regex = (
+    r'(?:'
+    r'(?P<ipv4>(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|'  # ipv4
+    r'(?P<ipv6>\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|'  # ipv6
+    r'(?P<domain>[^\s/:?#]+)'  # domain, validation occurs later
+    r')?'
+    r'(?::(?P<port>\d+))?'  # port
+)
+_scheme_regex = r'(?:(?P<scheme>[a-z][a-z0-9+\-.]+)://)?'  # scheme https://tools.ietf.org/html/rfc3986#appendix-A
+_user_info_regex = r'(?:(?P<user>[^\s:/]*)(?::(?P<password>[^\s/]*))?@)?'
+_path_regex = r'(?P<path>/[^\s?#]*)?'
+_query_regex = r'(?:\?(?P<query>[^\s#]*))?'
+_fragment_regex = r'(?:#(?P<fragment>[^\s#]*))?'
+
+
+def url_regex() -> Pattern[str]:
+    global _url_regex_cache
+    if _url_regex_cache is None:
+        _url_regex_cache = re.compile(
+            rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}',
+            re.IGNORECASE,
+        )
+    return _url_regex_cache
+
+
+def multi_host_url_regex() -> Pattern[str]:
     """
     Compiled multi host url regex.

     Additionally to `url_regex` it allows to match multiple hosts.
     E.g. host1.db.net,host2.db.net
     """
-    pass
+    global _multi_host_url_regex_cache
+    if _multi_host_url_regex_cache is None:
+        _multi_host_url_regex_cache = re.compile(
+            rf'{_scheme_regex}{_user_info_regex}'
+            r'(?P<hosts>([^/]*))'  # validation occurs later
+            rf'{_path_regex}{_query_regex}{_fragment_regex}',
+            re.IGNORECASE,
+        )
+    return _multi_host_url_regex_cache
+
+
+def ascii_domain_regex() -> Pattern[str]:
+    global _ascii_domain_regex_cache
+    if _ascii_domain_regex_cache is None:
+        ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?'
+        ascii_domain_ending = r'(?P<tld>\.[a-z]{2,63})?\.?'
+        _ascii_domain_regex_cache = re.compile(
+            fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE
+        )
+    return _ascii_domain_regex_cache
+
+
+def int_domain_regex() -> Pattern[str]:
+    global _int_domain_regex_cache
+    if _int_domain_regex_cache is None:
+        int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?'
+        int_domain_ending = r'(?P<tld>(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?'
+        _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE)
+    return _int_domain_regex_cache
+
+
+def host_regex() -> Pattern[str]:
+    global _host_regex_cache
+    if _host_regex_cache is None:
+        _host_regex_cache = re.compile(
+            _host_regex,
+            re.IGNORECASE,
+        )
+    return _host_regex_cache


 class AnyUrl(str):
     strip_whitespace = True
     min_length = 1
-    max_length = 2 ** 16
+    max_length = 2**16
     allowed_schemes: Optional[Collection[str]] = None
     tld_required: bool = False
     user_required: bool = False
     host_required: bool = True
     hidden_parts: Set[str] = set()
-    __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type',
-        'port', 'path', 'query', 'fragment')
+
+    __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment')

     @no_type_check
-    def __new__(cls, url: Optional[str], **kwargs) ->object:
+    def __new__(cls, url: Optional[str], **kwargs) -> object:
         return str.__new__(cls, cls.build(**kwargs) if url is None else url)

-    def __init__(self, url: str, *, scheme: str, user: Optional[str]=None,
-        password: Optional[str]=None, host: Optional[str]=None, tld:
-        Optional[str]=None, host_type: str='domain', port: Optional[str]=
-        None, path: Optional[str]=None, query: Optional[str]=None, fragment:
-        Optional[str]=None) ->None:
+    def __init__(
+        self,
+        url: str,
+        *,
+        scheme: str,
+        user: Optional[str] = None,
+        password: Optional[str] = None,
+        host: Optional[str] = None,
+        tld: Optional[str] = None,
+        host_type: str = 'domain',
+        port: Optional[str] = None,
+        path: Optional[str] = None,
+        query: Optional[str] = None,
+        fragment: Optional[str] = None,
+    ) -> None:
         str.__init__(url)
         self.scheme = scheme
         self.user = user
@@ -103,75 +212,301 @@ class AnyUrl(str):
         self.fragment = fragment

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minLength=cls.min_length, maxLength=
-            cls.max_length, format='uri')
+    def build(
+        cls,
+        *,
+        scheme: str,
+        user: Optional[str] = None,
+        password: Optional[str] = None,
+        host: str,
+        port: Optional[str] = None,
+        path: Optional[str] = None,
+        query: Optional[str] = None,
+        fragment: Optional[str] = None,
+        **_kwargs: str,
+    ) -> str:
+        parts = Parts(
+            scheme=scheme,
+            user=user,
+            password=password,
+            host=host,
+            port=port,
+            path=path,
+            query=query,
+            fragment=fragment,
+            **_kwargs,  # type: ignore[misc]
+        )
+
+        url = scheme + '://'
+        if user:
+            url += user
+        if password:
+            url += ':' + password
+        if user or password:
+            url += '@'
+        url += host
+        if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port):
+            url += ':' + port
+        if path:
+            url += path
+        if query:
+            url += '?' + query
+        if fragment:
+            url += '#' + fragment
+        return url

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri')
+
+    @classmethod
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

     @classmethod
-    def _build_url(cls, m: Match[str], url: str, parts: 'Parts') ->'AnyUrl':
+    def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl':
+        if value.__class__ == cls:
+            return value
+        value = str_validator(value)
+        if cls.strip_whitespace:
+            value = value.strip()
+        url: str = cast(str, constr_length_validator(value, field, config))
+
+        m = cls._match_url(url)
+        # the regex should always match, if it doesn't please report with details of the URL tried
+        assert m, 'URL regex failed unexpectedly'
+
+        original_parts = cast('Parts', m.groupdict())
+        parts = cls.apply_default_parts(original_parts)
+        parts = cls.validate_parts(parts)
+
+        if m.end() != len(url):
+            raise errors.UrlExtraError(extra=url[m.end() :])
+
+        return cls._build_url(m, url, parts)
+
+    @classmethod
+    def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl':
         """
         Validate hosts and build the AnyUrl object. Split from `validate` so this method
         can be altered in `MultiHostDsn`.
         """
-        pass
+        host, tld, host_type, rebuild = cls.validate_host(parts)
+
+        return cls(
+            None if rebuild else url,
+            scheme=parts['scheme'],
+            user=parts['user'],
+            password=parts['password'],
+            host=host,
+            tld=tld,
+            host_type=host_type,
+            port=parts['port'],
+            path=parts['path'],
+            query=parts['query'],
+            fragment=parts['fragment'],
+        )
+
+    @staticmethod
+    def _match_url(url: str) -> Optional[Match[str]]:
+        return url_regex().match(url)
+
+    @staticmethod
+    def _validate_port(port: Optional[str]) -> None:
+        if port is not None and int(port) > 65_535:
+            raise errors.UrlPortError()

     @classmethod
-    def validate_parts(cls, parts: 'Parts', validate_port: bool=True
-        ) ->'Parts':
+    def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts':
         """
         A method used to validate parts of a URL.
         Could be overridden to set default values for parts if missing
         """
-        pass
+        scheme = parts['scheme']
+        if scheme is None:
+            raise errors.UrlSchemeError()
+
+        if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes:
+            raise errors.UrlSchemePermittedError(set(cls.allowed_schemes))
+
+        if validate_port:
+            cls._validate_port(parts['port'])
+
+        user = parts['user']
+        if cls.user_required and user is None:
+            raise errors.UrlUserInfoError()

-    def __repr__(self) ->str:
-        extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.
-            __slots__ if getattr(self, n) is not None)
+        return parts
+
+    @classmethod
+    def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]:
+        tld, host_type, rebuild = None, None, False
+        for f in ('domain', 'ipv4', 'ipv6'):
+            host = parts[f]  # type: ignore[literal-required]
+            if host:
+                host_type = f
+                break
+
+        if host is None:
+            if cls.host_required:
+                raise errors.UrlHostError()
+        elif host_type == 'domain':
+            is_international = False
+            d = ascii_domain_regex().fullmatch(host)
+            if d is None:
+                d = int_domain_regex().fullmatch(host)
+                if d is None:
+                    raise errors.UrlHostError()
+                is_international = True
+
+            tld = d.group('tld')
+            if tld is None and not is_international:
+                d = int_domain_regex().fullmatch(host)
+                assert d is not None
+                tld = d.group('tld')
+                is_international = True
+
+            if tld is not None:
+                tld = tld[1:]
+            elif cls.tld_required:
+                raise errors.UrlHostTldError()
+
+            if is_international:
+                host_type = 'int_domain'
+                rebuild = True
+                host = host.encode('idna').decode('ascii')
+                if tld is not None:
+                    tld = tld.encode('idna').decode('ascii')
+
+        return host, tld, host_type, rebuild  # type: ignore
+
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {}
+
+    @classmethod
+    def apply_default_parts(cls, parts: 'Parts') -> 'Parts':
+        for key, value in cls.get_default_parts(parts).items():
+            if not parts[key]:  # type: ignore[literal-required]
+                parts[key] = value  # type: ignore[literal-required]
+        return parts
+
+    def __repr__(self) -> str:
+        extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None)
         return f'{self.__class__.__name__}({super().__repr__()}, {extra})'


 class AnyHttpUrl(AnyUrl):
     allowed_schemes = {'http', 'https'}
+
     __slots__ = ()


 class HttpUrl(AnyHttpUrl):
     tld_required = True
+    # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers
     max_length = 2083
     hidden_parts = {'port'}

+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {'port': '80' if parts['scheme'] == 'http' else '443'}
+

 class FileUrl(AnyUrl):
     allowed_schemes = {'file'}
     host_required = False
+
     __slots__ = ()


 class MultiHostDsn(AnyUrl):
     __slots__ = AnyUrl.__slots__ + ('hosts',)

-    def __init__(self, *args: Any, hosts: Optional[List['HostParts']]=None,
-        **kwargs: Any):
+    def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any):
         super().__init__(*args, **kwargs)
         self.hosts = hosts

+    @staticmethod
+    def _match_url(url: str) -> Optional[Match[str]]:
+        return multi_host_url_regex().match(url)
+
+    @classmethod
+    def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts':
+        return super().validate_parts(parts, validate_port=False)
+
+    @classmethod
+    def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn':
+        hosts_parts: List['HostParts'] = []
+        host_re = host_regex()
+        for host in m.groupdict()['hosts'].split(','):
+            d: Parts = host_re.match(host).groupdict()  # type: ignore
+            host, tld, host_type, rebuild = cls.validate_host(d)
+            port = d.get('port')
+            cls._validate_port(port)
+            hosts_parts.append(
+                {
+                    'host': host,
+                    'host_type': host_type,
+                    'tld': tld,
+                    'rebuild': rebuild,
+                    'port': port,
+                }
+            )
+
+        if len(hosts_parts) > 1:
+            return cls(
+                None if any([hp['rebuild'] for hp in hosts_parts]) else url,
+                scheme=parts['scheme'],
+                user=parts['user'],
+                password=parts['password'],
+                path=parts['path'],
+                query=parts['query'],
+                fragment=parts['fragment'],
+                host_type=None,
+                hosts=hosts_parts,
+            )
+        else:
+            # backwards compatibility with single host
+            host_part = hosts_parts[0]
+            return cls(
+                None if host_part['rebuild'] else url,
+                scheme=parts['scheme'],
+                user=parts['user'],
+                password=parts['password'],
+                host=host_part['host'],
+                tld=host_part['tld'],
+                host_type=host_part['host_type'],
+                port=host_part.get('port'),
+                path=parts['path'],
+                query=parts['query'],
+                fragment=parts['fragment'],
+            )
+

 class PostgresDsn(MultiHostDsn):
-    allowed_schemes = {'postgres', 'postgresql', 'postgresql+asyncpg',
-        'postgresql+pg8000', 'postgresql+psycopg', 'postgresql+psycopg2',
-        'postgresql+psycopg2cffi', 'postgresql+py-postgresql',
-        'postgresql+pygresql'}
+    allowed_schemes = {
+        'postgres',
+        'postgresql',
+        'postgresql+asyncpg',
+        'postgresql+pg8000',
+        'postgresql+psycopg',
+        'postgresql+psycopg2',
+        'postgresql+psycopg2cffi',
+        'postgresql+py-postgresql',
+        'postgresql+pygresql',
+    }
     user_required = True
+
     __slots__ = ()


 class CockroachDsn(AnyUrl):
-    allowed_schemes = {'cockroachdb', 'cockroachdb+psycopg2',
-        'cockroachdb+asyncpg'}
+    allowed_schemes = {
+        'cockroachdb',
+        'cockroachdb+psycopg2',
+        'cockroachdb+asyncpg',
+    }
     user_required = True


@@ -185,27 +520,83 @@ class RedisDsn(AnyUrl):
     allowed_schemes = {'redis', 'rediss'}
     host_required = False

+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '',
+            'port': '6379',
+            'path': '/0',
+        }
+

 class MongoDsn(AnyUrl):
     allowed_schemes = {'mongodb'}

+    # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes
+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'port': '27017',
+        }
+

 class KafkaDsn(AnyUrl):
     allowed_schemes = {'kafka'}

+    @staticmethod
+    def get_default_parts(parts: 'Parts') -> 'Parts':
+        return {
+            'domain': 'localhost',
+            'port': '9092',
+        }
+
+
+def stricturl(
+    *,
+    strip_whitespace: bool = True,
+    min_length: int = 1,
+    max_length: int = 2**16,
+    tld_required: bool = True,
+    host_required: bool = True,
+    allowed_schemes: Optional[Collection[str]] = None,
+) -> Type[AnyUrl]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        min_length=min_length,
+        max_length=max_length,
+        tld_required=tld_required,
+        host_required=host_required,
+        allowed_schemes=allowed_schemes,
+    )
+    return type('UrlValue', (AnyUrl,), namespace)
+

-class EmailStr(str):
+def import_email_validator() -> None:
+    global email_validator
+    try:
+        import email_validator
+    except ImportError as e:
+        raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e

+
+class EmailStr(str):
     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='email')

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
+        # included here and below so the error happens straight away
         import_email_validator()
+
         yield str_validator
         yield cls.validate

+    @classmethod
+    def validate(cls, value: Union[str]) -> str:
+        return validate_email(value)[1]
+

 class NameEmail(Representation):
     __slots__ = 'name', 'email'
@@ -214,20 +605,27 @@ class NameEmail(Representation):
         self.name = name
         self.email = email

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, NameEmail) and (self.name, self.email) == (
-            other.name, other.email)
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email)

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='name-email')

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         import_email_validator()
+
         yield cls.validate

-    def __str__(self) ->str:
+    @classmethod
+    def validate(cls, value: Any) -> 'NameEmail':
+        if value.__class__ == cls:
+            return value
+        value = str_validator(value)
+        return cls(*validate_email(value))
+
+    def __str__(self) -> str:
         return f'{self.name} <{self.email}>'


@@ -235,45 +633,82 @@ class IPvAnyAddress(_BaseAddress):
     __slots__ = ()

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='ipvanyaddress')

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

+    @classmethod
+    def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]:
+        try:
+            return IPv4Address(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Address(value)
+        except ValueError:
+            raise errors.IPvAnyAddressError()
+

 class IPvAnyInterface(_BaseAddress):
     __slots__ = ()

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='ipvanyinterface')

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

+    @classmethod
+    def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]:
+        try:
+            return IPv4Interface(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Interface(value)
+        except ValueError:
+            raise errors.IPvAnyInterfaceError()

-class IPvAnyNetwork(_BaseNetwork):

+class IPvAnyNetwork(_BaseNetwork):  # type: ignore
     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
         field_schema.update(type='string', format='ipvanynetwork')

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

+    @classmethod
+    def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]:
+        # Assume IP Network is defined with a default value for ``strict`` argument.
+        # Define your own class if you want to specify network address check strictness.
+        try:
+            return IPv4Network(value)
+        except ValueError:
+            pass
+
+        try:
+            return IPv6Network(value)
+        except ValueError:
+            raise errors.IPvAnyNetworkError()
+

-pretty_email_regex = re.compile('([\\w ]*?) *<(.*)> *')
+pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *')
 MAX_EMAIL_LENGTH = 2048
 """Maximum length for an email.
 A somewhat arbitrary but very generous number compared to what is allowed by most implementations.
 """


-def validate_email(value: Union[str]) ->Tuple[str, str]:
+def validate_email(value: Union[str]) -> Tuple[str, str]:
     """
     Email address validation using https://pypi.org/project/email-validator/
     Notes:
@@ -281,4 +716,32 @@ def validate_email(value: Union[str]) ->Tuple[str, str]:
     * "John Doe <local_part@domain.com>" style "pretty" email addresses are processed
     * spaces are striped from the beginning and end of addresses but no error is raised
     """
-    pass
+    if email_validator is None:
+        import_email_validator()
+
+    if len(value) > MAX_EMAIL_LENGTH:
+        raise errors.EmailError()
+
+    m = pretty_email_regex.fullmatch(value)
+    name: Union[str, None] = None
+    if m:
+        name, value = m.groups()
+    email = value.strip()
+    try:
+        parts = email_validator.validate_email(email, check_deliverability=False)
+    except email_validator.EmailNotValidError as e:
+        raise errors.EmailError from e
+
+    if hasattr(parts, 'normalized'):
+        # email-validator >= 2
+        email = parts.normalized
+        assert email is not None
+        name = name or parts.local_part
+        return name, email
+    else:
+        # email-validator >1, <2
+        at_index = email.index('@')
+        local_part = email[:at_index]  # RFC 5321, local part must be case-sensitive.
+        global_part = email[at_index:].lower()
+
+        return name or local_part, local_part + global_part
diff --git a/pydantic/v1/parse.py b/pydantic/v1/parse.py
index 1cbc25179..431d75a64 100644
--- a/pydantic/v1/parse.py
+++ b/pydantic/v1/parse.py
@@ -3,9 +3,64 @@ import pickle
 from enum import Enum
 from pathlib import Path
 from typing import Any, Callable, Union
+
 from pydantic.v1.types import StrBytes


 class Protocol(str, Enum):
     json = 'json'
     pickle = 'pickle'
+
+
+def load_str_bytes(
+    b: StrBytes,
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    if proto is None and content_type:
+        if content_type.endswith(('json', 'javascript')):
+            pass
+        elif allow_pickle and content_type.endswith('pickle'):
+            proto = Protocol.pickle
+        else:
+            raise TypeError(f'Unknown content-type: {content_type}')
+
+    proto = proto or Protocol.json
+
+    if proto == Protocol.json:
+        if isinstance(b, bytes):
+            b = b.decode(encoding)
+        return json_loads(b)
+    elif proto == Protocol.pickle:
+        if not allow_pickle:
+            raise RuntimeError('Trying to decode with pickle with allow_pickle=False')
+        bb = b if isinstance(b, bytes) else b.encode()
+        return pickle.loads(bb)
+    else:
+        raise TypeError(f'Unknown protocol: {proto}')
+
+
+def load_file(
+    path: Union[str, Path],
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+) -> Any:
+    path = Path(path)
+    b = path.read_bytes()
+    if content_type is None:
+        if path.suffix in ('.js', '.json'):
+            proto = Protocol.json
+        elif path.suffix == '.pkl':
+            proto = Protocol.pickle
+
+    return load_str_bytes(
+        b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads
+    )
diff --git a/pydantic/v1/schema.py b/pydantic/v1/schema.py
index 073f563af..bac4c0ac5 100644
--- a/pydantic/v1/schema.py
+++ b/pydantic/v1/schema.py
@@ -7,28 +7,114 @@ from decimal import Decimal
 from enum import Enum
 from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
 from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, FrozenSet, Generic, Iterable, List, Optional, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union, cast
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generic,
+    Iterable,
+    List,
+    Optional,
+    Pattern,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
 from uuid import UUID
+
 from typing_extensions import Annotated, Literal
-from pydantic.v1.fields import MAPPING_LIKE_SHAPES, SHAPE_DEQUE, SHAPE_FROZENSET, SHAPE_GENERIC, SHAPE_ITERABLE, SHAPE_LIST, SHAPE_SEQUENCE, SHAPE_SET, SHAPE_SINGLETON, SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS, FieldInfo, ModelField
+
+from pydantic.v1.fields import (
+    MAPPING_LIKE_SHAPES,
+    SHAPE_DEQUE,
+    SHAPE_FROZENSET,
+    SHAPE_GENERIC,
+    SHAPE_ITERABLE,
+    SHAPE_LIST,
+    SHAPE_SEQUENCE,
+    SHAPE_SET,
+    SHAPE_SINGLETON,
+    SHAPE_TUPLE,
+    SHAPE_TUPLE_ELLIPSIS,
+    FieldInfo,
+    ModelField,
+)
 from pydantic.v1.json import pydantic_encoder
 from pydantic.v1.networks import AnyUrl, EmailStr
-from pydantic.v1.types import ConstrainedDecimal, ConstrainedFloat, ConstrainedFrozenSet, ConstrainedInt, ConstrainedList, ConstrainedSet, ConstrainedStr, SecretBytes, SecretStr, StrictBytes, StrictStr, conbytes, condecimal, confloat, confrozenset, conint, conlist, conset, constr
-from pydantic.v1.typing import all_literal_values, get_args, get_origin, get_sub_types, is_callable_type, is_literal_type, is_namedtuple, is_none_type, is_union
+from pydantic.v1.types import (
+    ConstrainedDecimal,
+    ConstrainedFloat,
+    ConstrainedFrozenSet,
+    ConstrainedInt,
+    ConstrainedList,
+    ConstrainedSet,
+    ConstrainedStr,
+    SecretBytes,
+    SecretStr,
+    StrictBytes,
+    StrictStr,
+    conbytes,
+    condecimal,
+    confloat,
+    confrozenset,
+    conint,
+    conlist,
+    conset,
+    constr,
+)
+from pydantic.v1.typing import (
+    all_literal_values,
+    get_args,
+    get_origin,
+    get_sub_types,
+    is_callable_type,
+    is_literal_type,
+    is_namedtuple,
+    is_none_type,
+    is_union,
+)
 from pydantic.v1.utils import ROOT_KEY, get_model, lenient_issubclass
+
 if TYPE_CHECKING:
     from pydantic.v1.dataclasses import Dataclass
     from pydantic.v1.main import BaseModel
+
 default_prefix = '#/definitions/'
 default_ref_template = '#/definitions/{model}'
+
 TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]]
 TypeModelSet = Set[TypeModelOrEnum]


-def schema(models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], *,
-    by_alias: bool=True, title: Optional[str]=None, description: Optional[
-    str]=None, ref_prefix: Optional[str]=None, ref_template: str=
-    default_ref_template) ->Dict[str, Any]:
+def _apply_modify_schema(
+    modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any]
+) -> None:
+    from inspect import signature
+
+    sig = signature(modify_schema)
+    args = set(sig.parameters.keys())
+    if 'field' in args or 'kwargs' in args:
+        modify_schema(field_schema, field=field)
+    else:
+        modify_schema(field_schema)
+
+
+def schema(
+    models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]],
+    *,
+    by_alias: bool = True,
+    title: Optional[str] = None,
+    description: Optional[str] = None,
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+) -> Dict[str, Any]:
     """
     Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions``
     top-level JSON key, including their sub-models.
@@ -48,12 +134,37 @@ def schema(models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], *,
     :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for
       the models and sub-models passed in ``models``.
     """
-    pass
-
-
-def model_schema(model: Union[Type['BaseModel'], Type['Dataclass']],
-    by_alias: bool=True, ref_prefix: Optional[str]=None, ref_template: str=
-    default_ref_template) ->Dict[str, Any]:
+    clean_models = [get_model(model) for model in models]
+    flat_models = get_flat_models_from_models(clean_models)
+    model_name_map = get_model_name_map(flat_models)
+    definitions = {}
+    output_schema: Dict[str, Any] = {}
+    if title:
+        output_schema['title'] = title
+    if description:
+        output_schema['description'] = description
+    for model in clean_models:
+        m_schema, m_definitions, m_nested_models = model_process_schema(
+            model,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+        )
+        definitions.update(m_definitions)
+        model_name = model_name_map[model]
+        definitions[model_name] = m_schema
+    if definitions:
+        output_schema['definitions'] = definitions
+    return output_schema
+
+
+def model_schema(
+    model: Union[Type['BaseModel'], Type['Dataclass']],
+    by_alias: bool = True,
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+) -> Dict[str, Any]:
     """
     Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
     JSON key.
@@ -70,13 +181,53 @@ def model_schema(model: Union[Type['BaseModel'], Type['Dataclass']],
       sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
     :return: dict with the JSON Schema for the passed ``model``
     """
-    pass
-
-
-def field_schema(field: ModelField, *, by_alias: bool=True, model_name_map:
-    Dict[TypeModelOrEnum, str], ref_prefix: Optional[str]=None,
-    ref_template: str=default_ref_template, known_models: Optional[
-    TypeModelSet]=None) ->Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
+    model = get_model(model)
+    flat_models = get_flat_models_from_model(model)
+    model_name_map = get_model_name_map(flat_models)
+    model_name = model_name_map[model]
+    m_schema, m_definitions, nested_models = model_process_schema(
+        model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template
+    )
+    if model_name in nested_models:
+        # model_name is in Nested models, it has circular references
+        m_definitions[model_name] = m_schema
+        m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False)
+    if m_definitions:
+        m_schema.update({'definitions': m_definitions})
+    return m_schema
+
+
+def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]:
+    # If no title is explicitly set, we don't set title in the schema for enums.
+    # The behaviour is the same as `BaseModel` reference, where the default title
+    # is in the definitions part of the schema.
+    schema_: Dict[str, Any] = {}
+    if field.field_info.title or not lenient_issubclass(field.type_, Enum):
+        schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ')
+
+    if field.field_info.title:
+        schema_overrides = True
+
+    if field.field_info.description:
+        schema_['description'] = field.field_info.description
+        schema_overrides = True
+
+    if not field.required and field.default is not None and not is_callable_type(field.outer_type_):
+        schema_['default'] = encode_default(field.default)
+        schema_overrides = True
+
+    return schema_, schema_overrides
+
+
+def field_schema(
+    field: ModelField,
+    *,
+    by_alias: bool = True,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+    known_models: Optional[TypeModelSet] = None,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     Process a Pydantic field and return a tuple with a JSON Schema for it as the first item.
     Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field
@@ -94,29 +245,81 @@ def field_schema(field: ModelField, *, by_alias: bool=True, model_name_map:
     :param known_models: used to solve circular references
     :return: tuple of the schema for this field and additional definitions
     """
-    pass
-
-
-numeric_types = int, float, Decimal
-_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...
-    ] = (('max_length', numeric_types, 'maxLength'), ('min_length',
-    numeric_types, 'minLength'), ('regex', str, 'pattern'))
-_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...
-    ] = (('gt', numeric_types, 'exclusiveMinimum'), ('lt', numeric_types,
-    'exclusiveMaximum'), ('ge', numeric_types, 'minimum'), ('le',
-    numeric_types, 'maximum'), ('multiple_of', numeric_types, 'multipleOf'))
-
-
-def get_field_schema_validations(field: ModelField) ->Dict[str, Any]:
+    s, schema_overrides = get_field_info_schema(field)
+
+    validation_schema = get_field_schema_validations(field)
+    if validation_schema:
+        s.update(validation_schema)
+        schema_overrides = True
+
+    f_schema, f_definitions, f_nested_models = field_type_schema(
+        field,
+        by_alias=by_alias,
+        model_name_map=model_name_map,
+        schema_overrides=schema_overrides,
+        ref_prefix=ref_prefix,
+        ref_template=ref_template,
+        known_models=known_models or set(),
+    )
+
+    # $ref will only be returned when there are no schema_overrides
+    if '$ref' in f_schema:
+        return f_schema, f_definitions, f_nested_models
+    else:
+        s.update(f_schema)
+        return s, f_definitions, f_nested_models
+
+
+numeric_types = (int, float, Decimal)
+_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
+    ('max_length', numeric_types, 'maxLength'),
+    ('min_length', numeric_types, 'minLength'),
+    ('regex', str, 'pattern'),
+)
+
+_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
+    ('gt', numeric_types, 'exclusiveMinimum'),
+    ('lt', numeric_types, 'exclusiveMaximum'),
+    ('ge', numeric_types, 'minimum'),
+    ('le', numeric_types, 'maximum'),
+    ('multiple_of', numeric_types, 'multipleOf'),
+)
+
+
+def get_field_schema_validations(field: ModelField) -> Dict[str, Any]:
     """
     Get the JSON Schema validation keywords for a ``field`` with an annotation of
     a Pydantic ``FieldInfo`` with validation arguments.
     """
-    pass
-
-
-def get_model_name_map(unique_models: TypeModelSet) ->Dict[TypeModelOrEnum, str
-    ]:
+    f_schema: Dict[str, Any] = {}
+
+    if lenient_issubclass(field.type_, Enum):
+        # schema is already updated by `enum_process_schema`; just update with field extra
+        if field.field_info.extra:
+            f_schema.update(field.field_info.extra)
+        return f_schema
+
+    if lenient_issubclass(field.type_, (str, bytes)):
+        for attr_name, t, keyword in _str_types_attrs:
+            attr = getattr(field.field_info, attr_name, None)
+            if isinstance(attr, t):
+                f_schema[keyword] = attr
+    if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool):
+        for attr_name, t, keyword in _numeric_types_attrs:
+            attr = getattr(field.field_info, attr_name, None)
+            if isinstance(attr, t):
+                f_schema[keyword] = attr
+    if field.field_info is not None and field.field_info.const:
+        f_schema['const'] = field.default
+    if field.field_info.extra:
+        f_schema.update(field.field_info.extra)
+    modify_schema = getattr(field.outer_type_, '__modify_schema__', None)
+    if modify_schema:
+        _apply_modify_schema(modify_schema, field, f_schema)
+    return f_schema
+
+
+def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]:
     """
     Process a set of models and generate unique names for them to be used as keys in the JSON Schema
     definitions. By default the names are the same as the class name. But if two models in different Python
@@ -126,11 +329,24 @@ def get_model_name_map(unique_models: TypeModelSet) ->Dict[TypeModelOrEnum, str
     :param unique_models: a Python set of models
     :return: dict mapping models to names
     """
-    pass
-
-
-def get_flat_models_from_model(model: Type['BaseModel'], known_models:
-    Optional[TypeModelSet]=None) ->TypeModelSet:
+    name_model_map = {}
+    conflicting_names: Set[str] = set()
+    for model in unique_models:
+        model_name = normalize_name(model.__name__)
+        if model_name in conflicting_names:
+            model_name = get_long_model_name(model)
+            name_model_map[model_name] = model
+        elif model_name in name_model_map:
+            conflicting_names.add(model_name)
+            conflicting_model = name_model_map.pop(model_name)
+            name_model_map[get_long_model_name(conflicting_model)] = conflicting_model
+            name_model_map[get_long_model_name(model)] = model
+        else:
+            name_model_map[model_name] = model
+    return {v: k for k, v in name_model_map.items()}
+
+
+def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet:
     """
     Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass
     model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also
@@ -141,11 +357,16 @@ def get_flat_models_from_model(model: Type['BaseModel'], known_models:
     :param known_models: used to solve circular references
     :return: a set with the initial model and all its sub-models
     """
-    pass
+    known_models = known_models or set()
+    flat_models: TypeModelSet = set()
+    flat_models.add(model)
+    known_models |= flat_models
+    fields = cast(Sequence[ModelField], model.__fields__.values())
+    flat_models |= get_flat_models_from_fields(fields, known_models=known_models)
+    return flat_models


-def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet
-    ) ->TypeModelSet:
+def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet:
     """
     Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel
     (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree.
@@ -157,11 +378,24 @@ def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet
     :param known_models: used to solve circular references
     :return: a set with the model used in the declaration for this field, if any, and all its sub-models
     """
-    pass
+    from pydantic.v1.main import BaseModel
+
+    flat_models: TypeModelSet = set()
+
+    field_type = field.type_
+    if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
+        field_type = field_type.__pydantic_model__

+    if field.sub_fields and not lenient_issubclass(field_type, BaseModel):
+        flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models)
+    elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models:
+        flat_models |= get_flat_models_from_model(field_type, known_models=known_models)
+    elif lenient_issubclass(field_type, Enum):
+        flat_models.add(field_type)
+    return flat_models

-def get_flat_models_from_fields(fields: Sequence[ModelField], known_models:
-    TypeModelSet) ->TypeModelSet:
+
+def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet:
     """
     Take a list of Pydantic  ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel``
     (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree.
@@ -173,37 +407,157 @@ def get_flat_models_from_fields(fields: Sequence[ModelField], known_models:
     :param known_models: used to solve circular references
     :return: a set with any model declared in the fields, and all their sub-models
     """
-    pass
+    flat_models: TypeModelSet = set()
+    for field in fields:
+        flat_models |= get_flat_models_from_field(field, known_models=known_models)
+    return flat_models


-def get_flat_models_from_models(models: Sequence[Type['BaseModel']]
-    ) ->TypeModelSet:
+def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet:
     """
     Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass
     a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has
     a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
     """
-    pass
-
-
-def field_type_schema(field: ModelField, *, by_alias: bool, model_name_map:
-    Dict[TypeModelOrEnum, str], ref_template: str, schema_overrides: bool=
-    False, ref_prefix: Optional[str]=None, known_models: TypeModelSet) ->Tuple[
-    Dict[str, Any], Dict[str, Any], Set[str]]:
+    flat_models: TypeModelSet = set()
+    for model in models:
+        flat_models |= get_flat_models_from_model(model)
+    return flat_models
+
+
+def get_long_model_name(model: TypeModelOrEnum) -> str:
+    return f'{model.__module__}__{model.__qualname__}'.replace('.', '__')
+
+
+def field_type_schema(
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     Used by ``field_schema()``, you probably should be using that function.

     Take a single ``field`` and generate the schema for its type only, not including additional
     information as title, etc. Also return additional schema definitions, from sub-models.
     """
-    pass
-
-
-def model_process_schema(model: TypeModelOrEnum, *, by_alias: bool=True,
-    model_name_map: Dict[TypeModelOrEnum, str], ref_prefix: Optional[str]=
-    None, ref_template: str=default_ref_template, known_models: Optional[
-    TypeModelSet]=None, field: Optional[ModelField]=None) ->Tuple[Dict[str,
-    Any], Dict[str, Any], Set[str]]:
+    from pydantic.v1.main import BaseModel  # noqa: F811
+
+    definitions = {}
+    nested_models: Set[str] = set()
+    f_schema: Dict[str, Any]
+    if field.shape in {
+        SHAPE_LIST,
+        SHAPE_TUPLE_ELLIPSIS,
+        SHAPE_SEQUENCE,
+        SHAPE_SET,
+        SHAPE_FROZENSET,
+        SHAPE_ITERABLE,
+        SHAPE_DEQUE,
+    }:
+        items_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        f_schema = {'type': 'array', 'items': items_schema}
+        if field.shape in {SHAPE_SET, SHAPE_FROZENSET}:
+            f_schema['uniqueItems'] = True
+
+    elif field.shape in MAPPING_LIKE_SHAPES:
+        f_schema = {'type': 'object'}
+        key_field = cast(ModelField, field.key_field)
+        regex = getattr(key_field.type_, 'regex', None)
+        items_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        if regex:
+            # Dict keys have a regex pattern
+            # items_schema might be a schema or empty dict, add it either way
+            f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema}
+        if items_schema:
+            # The dict values are not simply Any, so they need a schema
+            f_schema['additionalProperties'] = items_schema
+    elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)):
+        sub_schema = []
+        sub_fields = cast(List[ModelField], field.sub_fields)
+        for sf in sub_fields:
+            sf_schema, sf_definitions, sf_nested_models = field_type_schema(
+                sf,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+            definitions.update(sf_definitions)
+            nested_models.update(sf_nested_models)
+            sub_schema.append(sf_schema)
+
+        sub_fields_len = len(sub_fields)
+        if field.shape == SHAPE_GENERIC:
+            all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema}
+            f_schema = {'allOf': [all_of_schemas]}
+        else:
+            f_schema = {
+                'type': 'array',
+                'minItems': sub_fields_len,
+                'maxItems': sub_fields_len,
+            }
+            if sub_fields_len >= 1:
+                f_schema['items'] = sub_schema
+    else:
+        assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape
+        f_schema, f_definitions, f_nested_models = field_singleton_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+
+    # check field type to avoid repeated calls to the same __modify_schema__ method
+    if field.type_ != field.outer_type_:
+        if field.shape == SHAPE_GENERIC:
+            field_type = field.type_
+        else:
+            field_type = field.outer_type_
+        modify_schema = getattr(field_type, '__modify_schema__', None)
+        if modify_schema:
+            _apply_modify_schema(modify_schema, field, f_schema)
+    return f_schema, definitions, nested_models
+
+
+def model_process_schema(
+    model: TypeModelOrEnum,
+    *,
+    by_alias: bool = True,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_prefix: Optional[str] = None,
+    ref_template: str = default_ref_template,
+    known_models: Optional[TypeModelSet] = None,
+    field: Optional[ModelField] = None,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     Used by ``model_schema()``, you probably should be using that function.

@@ -211,105 +565,442 @@ def model_process_schema(model: TypeModelOrEnum, *, by_alias: bool=True,
     sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All
     the definitions are returned as the second value.
     """
-    pass
-
-
-def model_type_schema(model: Type['BaseModel'], *, by_alias: bool,
-    model_name_map: Dict[TypeModelOrEnum, str], ref_template: str,
-    ref_prefix: Optional[str]=None, known_models: TypeModelSet) ->Tuple[
-    Dict[str, Any], Dict[str, Any], Set[str]]:
+    from inspect import getdoc, signature
+
+    known_models = known_models or set()
+    if lenient_issubclass(model, Enum):
+        model = cast(Type[Enum], model)
+        s = enum_process_schema(model, field=field)
+        return s, {}, set()
+    model = cast(Type['BaseModel'], model)
+    s = {'title': model.__config__.title or model.__name__}
+    doc = getdoc(model)
+    if doc:
+        s['description'] = doc
+    known_models.add(model)
+    m_schema, m_definitions, nested_models = model_type_schema(
+        model,
+        by_alias=by_alias,
+        model_name_map=model_name_map,
+        ref_prefix=ref_prefix,
+        ref_template=ref_template,
+        known_models=known_models,
+    )
+    s.update(m_schema)
+    schema_extra = model.__config__.schema_extra
+    if callable(schema_extra):
+        if len(signature(schema_extra).parameters) == 1:
+            schema_extra(s)
+        else:
+            schema_extra(s, model)
+    else:
+        s.update(schema_extra)
+    return s, m_definitions, nested_models
+
+
+def model_type_schema(
+    model: Type['BaseModel'],
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     You probably should be using ``model_schema()``, this function is indirectly used by that function.

     Take a single ``model`` and generate the schema for its type only, not including additional
     information as title, etc. Also return additional schema definitions, from sub-models.
     """
-    pass
-
-
-def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField]=None
-    ) ->Dict[str, Any]:
+    properties = {}
+    required = []
+    definitions: Dict[str, Any] = {}
+    nested_models: Set[str] = set()
+    for k, f in model.__fields__.items():
+        try:
+            f_schema, f_definitions, f_nested_models = field_schema(
+                f,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+        except SkipField as skip:
+            warnings.warn(skip.message, UserWarning)
+            continue
+        definitions.update(f_definitions)
+        nested_models.update(f_nested_models)
+        if by_alias:
+            properties[f.alias] = f_schema
+            if f.required:
+                required.append(f.alias)
+        else:
+            properties[k] = f_schema
+            if f.required:
+                required.append(k)
+    if ROOT_KEY in properties:
+        out_schema = properties[ROOT_KEY]
+        out_schema['title'] = model.__config__.title or model.__name__
+    else:
+        out_schema = {'type': 'object', 'properties': properties}
+        if required:
+            out_schema['required'] = required
+    if model.__config__.extra == 'forbid':
+        out_schema['additionalProperties'] = False
+    return out_schema, definitions, nested_models
+
+
+def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]:
     """
     Take a single `enum` and generate its schema.

     This is similar to the `model_process_schema` function, but applies to ``Enum`` objects.
     """
-    pass
-
-
-def field_singleton_sub_fields_schema(field: ModelField, *, by_alias: bool,
-    model_name_map: Dict[TypeModelOrEnum, str], ref_template: str,
-    schema_overrides: bool=False, ref_prefix: Optional[str]=None,
-    known_models: TypeModelSet) ->Tuple[Dict[str, Any], Dict[str, Any], Set
-    [str]]:
+    import inspect
+
+    schema_: Dict[str, Any] = {
+        'title': enum.__name__,
+        # Python assigns all enums a default docstring value of 'An enumeration', so
+        # all enums will have a description field even if not explicitly provided.
+        'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'),
+        # Add enum values and the enum field type to the schema.
+        'enum': [item.value for item in cast(Iterable[Enum], enum)],
+    }
+
+    add_field_type_to_schema(enum, schema_)
+
+    modify_schema = getattr(enum, '__modify_schema__', None)
+    if modify_schema:
+        _apply_modify_schema(modify_schema, field, schema_)
+
+    return schema_
+
+
+def field_singleton_sub_fields_schema(
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     This function is indirectly used by ``field_schema()``, you probably should be using that function.

     Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their
     schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``.
     """
-    pass
-
-
-field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ((Path, {
-    'type': 'string', 'format': 'path'}), (datetime, {'type': 'string',
-    'format': 'date-time'}), (date, {'type': 'string', 'format': 'date'}),
-    (time, {'type': 'string', 'format': 'time'}), (timedelta, {'type':
-    'number', 'format': 'time-delta'}), (IPv4Network, {'type': 'string',
-    'format': 'ipv4network'}), (IPv6Network, {'type': 'string', 'format':
-    'ipv6network'}), (IPv4Interface, {'type': 'string', 'format':
-    'ipv4interface'}), (IPv6Interface, {'type': 'string', 'format':
-    'ipv6interface'}), (IPv4Address, {'type': 'string', 'format': 'ipv4'}),
-    (IPv6Address, {'type': 'string', 'format': 'ipv6'}), (Pattern, {'type':
-    'string', 'format': 'regex'}), (str, {'type': 'string'}), (bytes, {
-    'type': 'string', 'format': 'binary'}), (bool, {'type': 'boolean'}), (
-    int, {'type': 'integer'}), (float, {'type': 'number'}), (Decimal, {
-    'type': 'number'}), (UUID, {'type': 'string', 'format': 'uuid'}), (dict,
-    {'type': 'object'}), (list, {'type': 'array', 'items': {}}), (tuple, {
-    'type': 'array', 'items': {}}), (set, {'type': 'array', 'items': {},
-    'uniqueItems': True}), (frozenset, {'type': 'array', 'items': {},
-    'uniqueItems': True}))
+    sub_fields = cast(List[ModelField], field.sub_fields)
+    definitions = {}
+    nested_models: Set[str] = set()
+    if len(sub_fields) == 1:
+        return field_type_schema(
+            sub_fields[0],
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+    else:
+        s: Dict[str, Any] = {}
+        # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object
+        field_has_discriminator: bool = field.discriminator_key is not None
+        if field_has_discriminator:
+            assert field.sub_fields_mapping is not None
+
+            discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {}
+
+            for discriminator_value, sub_field in field.sub_fields_mapping.items():
+                if isinstance(discriminator_value, Enum):
+                    discriminator_value = str(discriminator_value.value)
+                # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many
+                if is_union(get_origin(sub_field.type_)):
+                    sub_models = get_sub_types(sub_field.type_)
+                    discriminator_models_refs[discriminator_value] = {
+                        model_name_map[sub_model]: get_schema_ref(
+                            model_name_map[sub_model], ref_prefix, ref_template, False
+                        )
+                        for sub_model in sub_models
+                    }
+                else:
+                    sub_field_type = sub_field.type_
+                    if hasattr(sub_field_type, '__pydantic_model__'):
+                        sub_field_type = sub_field_type.__pydantic_model__
+
+                    discriminator_model_name = model_name_map[sub_field_type]
+                    discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False)
+                    discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref']
+
+            s['discriminator'] = {
+                'propertyName': field.discriminator_alias,
+                'mapping': discriminator_models_refs,
+            }
+
+        sub_field_schemas = []
+        for sf in sub_fields:
+            sub_schema, sub_definitions, sub_nested_models = field_type_schema(
+                sf,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                schema_overrides=schema_overrides,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+            definitions.update(sub_definitions)
+            if schema_overrides and 'allOf' in sub_schema:
+                # if the sub_field is a referenced schema we only need the referenced
+                # object. Otherwise we will end up with several allOf inside anyOf/oneOf.
+                # See https://github.com/pydantic/pydantic/issues/1209
+                sub_schema = sub_schema['allOf'][0]
+
+            if sub_schema.keys() == {'discriminator', 'oneOf'}:
+                # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere
+                sub_schema.pop('discriminator')
+            sub_field_schemas.append(sub_schema)
+            nested_models.update(sub_nested_models)
+        s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas
+        return s, definitions, nested_models
+
+
+# Order is important, e.g. subclasses of str must go before str
+# this is used only for standard library types, custom types should use __modify_schema__ instead
+field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = (
+    (Path, {'type': 'string', 'format': 'path'}),
+    (datetime, {'type': 'string', 'format': 'date-time'}),
+    (date, {'type': 'string', 'format': 'date'}),
+    (time, {'type': 'string', 'format': 'time'}),
+    (timedelta, {'type': 'number', 'format': 'time-delta'}),
+    (IPv4Network, {'type': 'string', 'format': 'ipv4network'}),
+    (IPv6Network, {'type': 'string', 'format': 'ipv6network'}),
+    (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}),
+    (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}),
+    (IPv4Address, {'type': 'string', 'format': 'ipv4'}),
+    (IPv6Address, {'type': 'string', 'format': 'ipv6'}),
+    (Pattern, {'type': 'string', 'format': 'regex'}),
+    (str, {'type': 'string'}),
+    (bytes, {'type': 'string', 'format': 'binary'}),
+    (bool, {'type': 'boolean'}),
+    (int, {'type': 'integer'}),
+    (float, {'type': 'number'}),
+    (Decimal, {'type': 'number'}),
+    (UUID, {'type': 'string', 'format': 'uuid'}),
+    (dict, {'type': 'object'}),
+    (list, {'type': 'array', 'items': {}}),
+    (tuple, {'type': 'array', 'items': {}}),
+    (set, {'type': 'array', 'items': {}, 'uniqueItems': True}),
+    (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}),
+)
+
 json_scheme = {'type': 'string', 'format': 'json-string'}


-def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) ->None:
+def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None:
     """
     Update the given `schema` with the type-specific metadata for the given `field_type`.

     This function looks through `field_class_to_schema` for a class that matches the given `field_type`,
     and then modifies the given `schema` with the information from that type.
     """
-    pass
-
-
-def field_singleton_schema(field: ModelField, *, by_alias: bool,
-    model_name_map: Dict[TypeModelOrEnum, str], ref_template: str,
-    schema_overrides: bool=False, ref_prefix: Optional[str]=None,
-    known_models: TypeModelSet) ->Tuple[Dict[str, Any], Dict[str, Any], Set
-    [str]]:
+    for type_, t_schema in field_class_to_schema:
+        # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class
+        if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern:
+            schema_.update(t_schema)
+            break
+
+
+def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]:
+    if ref_prefix:
+        schema_ref = {'$ref': ref_prefix + name}
+    else:
+        schema_ref = {'$ref': ref_template.format(model=name)}
+    return {'allOf': [schema_ref]} if schema_overrides else schema_ref
+
+
+def field_singleton_schema(  # noqa: C901 (ignore complexity)
+    field: ModelField,
+    *,
+    by_alias: bool,
+    model_name_map: Dict[TypeModelOrEnum, str],
+    ref_template: str,
+    schema_overrides: bool = False,
+    ref_prefix: Optional[str] = None,
+    known_models: TypeModelSet,
+) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
     """
     This function is indirectly used by ``field_schema()``, you should probably be using that function.

     Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models.
     """
-    pass
-
+    from pydantic.v1.main import BaseModel

-def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field:
-    ModelField) ->ModelField:
+    definitions: Dict[str, Any] = {}
+    nested_models: Set[str] = set()
+    field_type = field.type_
+
+    # Recurse into this field if it contains sub_fields and is NOT a
+    # BaseModel OR that BaseModel is a const
+    if field.sub_fields and (
+        (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel)
+    ):
+        return field_singleton_sub_fields_schema(
+            field,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            schema_overrides=schema_overrides,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+        )
+    if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type:
+        return {}, definitions, nested_models  # no restrictions
+    if is_none_type(field_type):
+        return {'type': 'null'}, definitions, nested_models
+    if is_callable_type(field_type):
+        raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.')
+    f_schema: Dict[str, Any] = {}
+    if field.field_info is not None and field.field_info.const:
+        f_schema['const'] = field.default
+
+    if is_literal_type(field_type):
+        values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type))
+
+        if len({v.__class__ for v in values}) > 1:
+            return field_schema(
+                multitypes_literal_field_for_schema(values, field),
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+            )
+
+        # All values have the same type
+        field_type = values[0].__class__
+        f_schema['enum'] = list(values)
+        add_field_type_to_schema(field_type, f_schema)
+    elif lenient_issubclass(field_type, Enum):
+        enum_name = model_name_map[field_type]
+        f_schema, schema_overrides = get_field_info_schema(field, schema_overrides)
+        f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides))
+        definitions[enum_name] = enum_process_schema(field_type, field=field)
+    elif is_namedtuple(field_type):
+        sub_schema, *_ = model_process_schema(
+            field_type.__pydantic_model__,
+            by_alias=by_alias,
+            model_name_map=model_name_map,
+            ref_prefix=ref_prefix,
+            ref_template=ref_template,
+            known_models=known_models,
+            field=field,
+        )
+        items_schemas = list(sub_schema['properties'].values())
+        f_schema.update(
+            {
+                'type': 'array',
+                'items': items_schemas,
+                'minItems': len(items_schemas),
+                'maxItems': len(items_schemas),
+            }
+        )
+    elif not hasattr(field_type, '__pydantic_model__'):
+        add_field_type_to_schema(field_type, f_schema)
+
+        modify_schema = getattr(field_type, '__modify_schema__', None)
+        if modify_schema:
+            _apply_modify_schema(modify_schema, field, f_schema)
+
+    if f_schema:
+        return f_schema, definitions, nested_models
+
+    # Handle dataclass-based models
+    if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
+        field_type = field_type.__pydantic_model__
+
+    if issubclass(field_type, BaseModel):
+        model_name = model_name_map[field_type]
+        if field_type not in known_models:
+            sub_schema, sub_definitions, sub_nested_models = model_process_schema(
+                field_type,
+                by_alias=by_alias,
+                model_name_map=model_name_map,
+                ref_prefix=ref_prefix,
+                ref_template=ref_template,
+                known_models=known_models,
+                field=field,
+            )
+            definitions.update(sub_definitions)
+            definitions[model_name] = sub_schema
+            nested_models.update(sub_nested_models)
+        else:
+            nested_models.add(model_name)
+        schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides)
+        return schema_ref, definitions, nested_models
+
+    # For generics with no args
+    args = get_args(field_type)
+    if args is not None and not args and Generic in field_type.__bases__:
+        return f_schema, definitions, nested_models
+
+    raise ValueError(f'Value not declarable with JSON Schema, field: {field}')
+
+
+def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField:
     """
     To support `Literal` with values of different types, we split it into multiple `Literal` with same type
     e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]`
     """
-    pass
+    literal_distinct_types = defaultdict(list)
+    for v in values:
+        literal_distinct_types[v.__class__].append(v)
+    distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values())
+
+    return ModelField(
+        name=field.name,
+        type_=Union[tuple(distinct_literals)],  # type: ignore
+        class_validators=field.class_validators,
+        model_config=field.model_config,
+        default=field.default,
+        required=field.required,
+        alias=field.alias,
+        field_info=field.field_info,
+    )
+
+
+def encode_default(dft: Any) -> Any:
+    from pydantic.v1.main import BaseModel
+
+    if isinstance(dft, BaseModel) or is_dataclass(dft):
+        dft = cast('dict[str, Any]', pydantic_encoder(dft))

+    if isinstance(dft, dict):
+        return {encode_default(k): encode_default(v) for k, v in dft.items()}
+    elif isinstance(dft, Enum):
+        return dft.value
+    elif isinstance(dft, (int, float, str)):
+        return dft
+    elif isinstance(dft, (list, tuple)):
+        t = dft.__class__
+        seq_args = (encode_default(v) for v in dft)
+        return t(*seq_args) if is_namedtuple(t) else t(seq_args)
+    elif dft is None:
+        return None
+    else:
+        return pydantic_encoder(dft)

-_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float:
-    confloat, Decimal: condecimal}

+_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal}

-def get_annotation_from_field_info(annotation: Any, field_info: FieldInfo,
-    field_name: str, validate_assignment: bool=False) ->Type[Any]:
+
+def get_annotation_from_field_info(
+    annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False
+) -> Type[Any]:
     """
     Get an annotation with validation implemented for numbers and strings based on the field_info.
     :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
@@ -318,11 +1009,25 @@ def get_annotation_from_field_info(annotation: Any, field_info: FieldInfo,
     :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment
     :return: the same ``annotation`` if unmodified or a new annotation with validation in place
     """
-    pass
+    constraints = field_info.get_constraints()
+    used_constraints: Set[str] = set()
+    if constraints:
+        annotation, used_constraints = get_annotation_with_constraints(annotation, field_info)
+    if validate_assignment:
+        used_constraints.add('allow_mutation')
+
+    unused_constraints = constraints - used_constraints
+    if unused_constraints:
+        raise ValueError(
+            f'On field "{field_name}" the following field constraints are set but not enforced: '
+            f'{", ".join(unused_constraints)}. '
+            f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints'
+        )
+
+    return annotation


-def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo
-    ) ->Tuple[Type[Any], Set[str]]:
+def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]:  # noqa: C901
     """
     Get an annotation with used constraints implemented for numbers and strings based on the field_info.

@@ -330,14 +1035,123 @@ def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo
     :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
     :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints.
     """
-    pass
-
-
-def normalize_name(name: str) ->str:
+    used_constraints: Set[str] = set()
+
+    def go(type_: Any) -> Type[Any]:
+        if (
+            is_literal_type(type_)
+            or isinstance(type_, ForwardRef)
+            or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet))
+        ):
+            return type_
+        origin = get_origin(type_)
+        if origin is not None:
+            args: Tuple[Any, ...] = get_args(type_)
+            if any(isinstance(a, ForwardRef) for a in args):
+                # forward refs cause infinite recursion below
+                return type_
+
+            if origin is Annotated:
+                return go(args[0])
+            if is_union(origin):
+                return Union[tuple(go(a) for a in args)]  # type: ignore
+
+            if issubclass(origin, List) and (
+                field_info.min_items is not None
+                or field_info.max_items is not None
+                or field_info.unique_items is not None
+            ):
+                used_constraints.update({'min_items', 'max_items', 'unique_items'})
+                return conlist(
+                    go(args[0]),
+                    min_items=field_info.min_items,
+                    max_items=field_info.max_items,
+                    unique_items=field_info.unique_items,
+                )
+
+            if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None):
+                used_constraints.update({'min_items', 'max_items'})
+                return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
+
+            if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None):
+                used_constraints.update({'min_items', 'max_items'})
+                return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
+
+            for t in (Tuple, List, Set, FrozenSet, Sequence):
+                if issubclass(origin, t):  # type: ignore
+                    return t[tuple(go(a) for a in args)]  # type: ignore
+
+            if issubclass(origin, Dict):
+                return Dict[args[0], go(args[1])]  # type: ignore
+
+        attrs: Optional[Tuple[str, ...]] = None
+        constraint_func: Optional[Callable[..., type]] = None
+        if isinstance(type_, type):
+            if issubclass(type_, (SecretStr, SecretBytes)):
+                attrs = ('max_length', 'min_length')
+
+                def constraint_func(**kw: Any) -> Type[Any]:  # noqa: F811
+                    return type(type_.__name__, (type_,), kw)
+
+            elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)):
+                attrs = ('max_length', 'min_length', 'regex')
+                if issubclass(type_, StrictStr):
+
+                    def constraint_func(**kw: Any) -> Type[Any]:
+                        return type(type_.__name__, (type_,), kw)
+
+                else:
+                    constraint_func = constr
+            elif issubclass(type_, bytes):
+                attrs = ('max_length', 'min_length', 'regex')
+                if issubclass(type_, StrictBytes):
+
+                    def constraint_func(**kw: Any) -> Type[Any]:
+                        return type(type_.__name__, (type_,), kw)
+
+                else:
+                    constraint_func = conbytes
+            elif issubclass(type_, numeric_types) and not issubclass(
+                type_,
+                (
+                    ConstrainedInt,
+                    ConstrainedFloat,
+                    ConstrainedDecimal,
+                    ConstrainedList,
+                    ConstrainedSet,
+                    ConstrainedFrozenSet,
+                    bool,
+                ),
+            ):
+                # Is numeric type
+                attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of')
+                if issubclass(type_, float):
+                    attrs += ('allow_inf_nan',)
+                if issubclass(type_, Decimal):
+                    attrs += ('max_digits', 'decimal_places')
+                numeric_type = next(t for t in numeric_types if issubclass(type_, t))  # pragma: no branch
+                constraint_func = _map_types_constraint[numeric_type]
+
+        if attrs:
+            used_constraints.update(set(attrs))
+            kwargs = {
+                attr_name: attr
+                for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs)
+                if attr is not None
+            }
+            if kwargs:
+                constraint_func = cast(Callable[..., type], constraint_func)
+                return constraint_func(**kwargs)
+        return type_
+
+    return go(annotation), used_constraints
+
+
+def normalize_name(name: str) -> str:
     """
     Normalizes the given name. This can be applied to either a model *or* enum.
     """
-    pass
+    return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name)


 class SkipField(Exception):
@@ -345,5 +1159,5 @@ class SkipField(Exception):
     Utility exception used to exclude fields from schema.
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         self.message = message
diff --git a/pydantic/v1/tools.py b/pydantic/v1/tools.py
index d15515365..6838a23ec 100644
--- a/pydantic/v1/tools.py
+++ b/pydantic/v1/tools.py
@@ -2,24 +2,91 @@ import json
 from functools import lru_cache
 from pathlib import Path
 from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union
+
 from pydantic.v1.parse import Protocol, load_file, load_str_bytes
 from pydantic.v1.types import StrBytes
 from pydantic.v1.typing import display_as_type
-__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of',
-    'schema_json_of')
+
+__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of')
+
 NameFactory = Union[str, Callable[[Type[Any]], str]]
+
 if TYPE_CHECKING:
     from pydantic.v1.typing import DictStrAny
+
+
+def _generate_parsing_type_name(type_: Any) -> str:
+    return f'ParsingModel[{display_as_type(type_)}]'
+
+
+@lru_cache(maxsize=2048)
+def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any:
+    from pydantic.v1.main import create_model
+
+    if type_name is None:
+        type_name = _generate_parsing_type_name
+    if not isinstance(type_name, str):
+        type_name = type_name(type_)
+    return create_model(type_name, __root__=(type_, ...))
+
+
 T = TypeVar('T')


-def schema_of(type_: Any, *, title: Optional[NameFactory]=None, **
-    schema_kwargs: Any) ->'DictStrAny':
+def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T:
+    model_type = _get_parsing_type(type_, type_name=type_name)  # type: ignore[arg-type]
+    return model_type(__root__=obj).__root__
+
+
+def parse_file_as(
+    type_: Type[T],
+    path: Union[str, Path],
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+    type_name: Optional[NameFactory] = None,
+) -> T:
+    obj = load_file(
+        path,
+        proto=proto,
+        content_type=content_type,
+        encoding=encoding,
+        allow_pickle=allow_pickle,
+        json_loads=json_loads,
+    )
+    return parse_obj_as(type_, obj, type_name=type_name)
+
+
+def parse_raw_as(
+    type_: Type[T],
+    b: StrBytes,
+    *,
+    content_type: str = None,
+    encoding: str = 'utf8',
+    proto: Protocol = None,
+    allow_pickle: bool = False,
+    json_loads: Callable[[str], Any] = json.loads,
+    type_name: Optional[NameFactory] = None,
+) -> T:
+    obj = load_str_bytes(
+        b,
+        proto=proto,
+        content_type=content_type,
+        encoding=encoding,
+        allow_pickle=allow_pickle,
+        json_loads=json_loads,
+    )
+    return parse_obj_as(type_, obj, type_name=type_name)
+
+
+def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny':
     """Generate a JSON schema (as dict) for the passed model or dynamically generated one"""
-    pass
+    return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs)


-def schema_json_of(type_: Any, *, title: Optional[NameFactory]=None, **
-    schema_json_kwargs: Any) ->str:
+def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str:
     """Generate a JSON schema (as JSON) for the passed model or dynamically generated one"""
-    pass
+    return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs)
diff --git a/pydantic/v1/types.py b/pydantic/v1/types.py
index 5c45c9e09..0cd789a44 100644
--- a/pydantic/v1/types.py
+++ b/pydantic/v1/types.py
@@ -7,25 +7,109 @@ from decimal import Decimal, InvalidOperation
 from enum import Enum
 from pathlib import Path
 from types import new_class
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, FrozenSet, List, Optional, Pattern, Set, Tuple, Type, TypeVar, Union, cast, overload
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Dict,
+    FrozenSet,
+    List,
+    Optional,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+    overload,
+)
 from uuid import UUID
 from weakref import WeakSet
+
 from pydantic.v1 import errors
 from pydantic.v1.datetime_parse import parse_date
 from pydantic.v1.utils import import_string, update_not_none
-from pydantic.v1.validators import bytes_validator, constr_length_validator, constr_lower, constr_strip_whitespace, constr_upper, decimal_validator, float_finite_validator, float_validator, frozenset_validator, int_validator, list_validator, number_multiple_validator, number_size_validator, path_exists_validator, path_validator, set_validator, str_validator, strict_bytes_validator, strict_float_validator, strict_int_validator, strict_str_validator
-__all__ = ['NoneStr', 'NoneBytes', 'StrBytes', 'NoneStrBytes', 'StrictStr',
-    'ConstrainedBytes', 'conbytes', 'ConstrainedList', 'conlist',
-    'ConstrainedSet', 'conset', 'ConstrainedFrozenSet', 'confrozenset',
-    'ConstrainedStr', 'constr', 'PyObject', 'ConstrainedInt', 'conint',
-    'PositiveInt', 'NegativeInt', 'NonNegativeInt', 'NonPositiveInt',
-    'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat',
-    'NonNegativeFloat', 'NonPositiveFloat', 'FiniteFloat',
-    'ConstrainedDecimal', 'condecimal', 'UUID1', 'UUID3', 'UUID4', 'UUID5',
-    'FilePath', 'DirectoryPath', 'Json', 'JsonWrapper', 'SecretField',
-    'SecretStr', 'SecretBytes', 'StrictBool', 'StrictBytes', 'StrictInt',
-    'StrictFloat', 'PaymentCardNumber', 'ByteSize', 'PastDate',
-    'FutureDate', 'ConstrainedDate', 'condate']
+from pydantic.v1.validators import (
+    bytes_validator,
+    constr_length_validator,
+    constr_lower,
+    constr_strip_whitespace,
+    constr_upper,
+    decimal_validator,
+    float_finite_validator,
+    float_validator,
+    frozenset_validator,
+    int_validator,
+    list_validator,
+    number_multiple_validator,
+    number_size_validator,
+    path_exists_validator,
+    path_validator,
+    set_validator,
+    str_validator,
+    strict_bytes_validator,
+    strict_float_validator,
+    strict_int_validator,
+    strict_str_validator,
+)
+
+__all__ = [
+    'NoneStr',
+    'NoneBytes',
+    'StrBytes',
+    'NoneStrBytes',
+    'StrictStr',
+    'ConstrainedBytes',
+    'conbytes',
+    'ConstrainedList',
+    'conlist',
+    'ConstrainedSet',
+    'conset',
+    'ConstrainedFrozenSet',
+    'confrozenset',
+    'ConstrainedStr',
+    'constr',
+    'PyObject',
+    'ConstrainedInt',
+    'conint',
+    'PositiveInt',
+    'NegativeInt',
+    'NonNegativeInt',
+    'NonPositiveInt',
+    'ConstrainedFloat',
+    'confloat',
+    'PositiveFloat',
+    'NegativeFloat',
+    'NonNegativeFloat',
+    'NonPositiveFloat',
+    'FiniteFloat',
+    'ConstrainedDecimal',
+    'condecimal',
+    'UUID1',
+    'UUID3',
+    'UUID4',
+    'UUID5',
+    'FilePath',
+    'DirectoryPath',
+    'Json',
+    'JsonWrapper',
+    'SecretField',
+    'SecretStr',
+    'SecretBytes',
+    'StrictBool',
+    'StrictBytes',
+    'StrictInt',
+    'StrictFloat',
+    'PaymentCardNumber',
+    'ByteSize',
+    'PastDate',
+    'FutureDate',
+    'ConstrainedDate',
+    'condate',
+]
+
 NoneStr = Optional[str]
 NoneBytes = Optional[bytes]
 StrBytes = Union[str, bytes]
@@ -35,54 +119,84 @@ OptionalIntFloat = Union[OptionalInt, float]
 OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal]
 OptionalDate = Optional[date]
 StrIntFloat = Union[str, int, float]
+
 if TYPE_CHECKING:
     from typing_extensions import Annotated
+
     from pydantic.v1.dataclasses import Dataclass
     from pydantic.v1.main import BaseModel
     from pydantic.v1.typing import CallableGenerator
+
     ModelOrDc = Type[Union[BaseModel, Dataclass]]
+
 T = TypeVar('T')
 _DEFINED_TYPES: 'WeakSet[type]' = WeakSet()


-class ConstrainedNumberMeta(type):
+@overload
+def _registered(typ: Type[T]) -> Type[T]:
+    pass
+
+
+@overload
+def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta':
+    pass
+
+
+def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']:
+    # In order to generate valid examples of constrained types, Hypothesis needs
+    # to inspect the type object - so we keep a weakref to each contype object
+    # until it can be registered.  When (or if) our Hypothesis plugin is loaded,
+    # it monkeypatches this function.
+    # If Hypothesis is never used, the total effect is to keep a weak reference
+    # which has minimal memory usage and doesn't even affect garbage collection.
+    _DEFINED_TYPES.add(typ)
+    return typ

-    def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]
-        ) ->'ConstrainedInt':
+
+class ConstrainedNumberMeta(type):
+    def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt':  # type: ignore
         new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct))
+
         if new_cls.gt is not None and new_cls.ge is not None:
-            raise errors.ConfigError(
-                'bounds gt and ge cannot be specified at the same time')
+            raise errors.ConfigError('bounds gt and ge cannot be specified at the same time')
         if new_cls.lt is not None and new_cls.le is not None:
-            raise errors.ConfigError(
-                'bounds lt and le cannot be specified at the same time')
-        return _registered(new_cls)
+            raise errors.ConfigError('bounds lt and le cannot be specified at the same time')
+
+        return _registered(new_cls)  # type: ignore
+

+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

 if TYPE_CHECKING:
     StrictBool = bool
 else:

-
     class StrictBool(int):
         """
         StrictBool to allow for bools which are not type-coerced.
         """

         @classmethod
-        def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
             field_schema.update(type='boolean')

         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield cls.validate

         @classmethod
-        def validate(cls, value: Any) ->bool:
+        def validate(cls, value: Any) -> bool:
             """
             Ensure that we only allow bools.
             """
-            pass
+            if isinstance(value, bool):
+                return value
+
+            raise errors.StrictBoolError()
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


 class ConstrainedInt(int, metaclass=ConstrainedNumberMeta):
@@ -94,18 +208,37 @@ class ConstrainedInt(int, metaclass=ConstrainedNumberMeta):
     multiple_of: OptionalInt = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, exclusiveMinimum=cls.gt,
-            exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le,
-            multipleOf=cls.multiple_of)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield strict_int_validator if cls.strict else int_validator
         yield number_size_validator
         yield number_multiple_validator


+def conint(
+    *,
+    strict: bool = False,
+    gt: Optional[int] = None,
+    ge: Optional[int] = None,
+    lt: Optional[int] = None,
+    le: Optional[int] = None,
+    multiple_of: Optional[int] = None,
+) -> Type[int]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of)
+    return type('ConstrainedIntValue', (ConstrainedInt,), namespace)
+
+
 if TYPE_CHECKING:
     PositiveInt = int
     NegativeInt = int
@@ -114,27 +247,25 @@ if TYPE_CHECKING:
     StrictInt = int
 else:

-
     class PositiveInt(ConstrainedInt):
         gt = 0

-
     class NegativeInt(ConstrainedInt):
         lt = 0

-
     class NonPositiveInt(ConstrainedInt):
         le = 0

-
     class NonNegativeInt(ConstrainedInt):
         ge = 0

-
     class StrictInt(ConstrainedInt):
         strict = True


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta):
     strict: bool = False
     gt: OptionalIntFloat = None
@@ -145,10 +276,16 @@ class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta):
     allow_inf_nan: Optional[bool] = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, exclusiveMinimum=cls.gt,
-            exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le,
-            multipleOf=cls.multiple_of)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )
+        # Modify constraints to account for differences between IEEE floats and JSON
         if field_schema.get('exclusiveMinimum') == -math.inf:
             del field_schema['exclusiveMinimum']
         if field_schema.get('minimum') == -math.inf:
@@ -159,13 +296,28 @@ class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta):
             del field_schema['maximum']

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield strict_float_validator if cls.strict else float_validator
         yield number_size_validator
         yield number_multiple_validator
         yield float_finite_validator


+def confloat(
+    *,
+    strict: bool = False,
+    gt: float = None,
+    ge: float = None,
+    lt: float = None,
+    le: float = None,
+    multiple_of: float = None,
+    allow_inf_nan: Optional[bool] = None,
+) -> Type[float]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan)
+    return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace)
+
+
 if TYPE_CHECKING:
     PositiveFloat = float
     NegativeFloat = float
@@ -175,31 +327,28 @@ if TYPE_CHECKING:
     FiniteFloat = float
 else:

-
     class PositiveFloat(ConstrainedFloat):
         gt = 0

-
     class NegativeFloat(ConstrainedFloat):
         lt = 0

-
     class NonPositiveFloat(ConstrainedFloat):
         le = 0

-
     class NonNegativeFloat(ConstrainedFloat):
         ge = 0

-
     class StrictFloat(ConstrainedFloat):
         strict = True

-
     class FiniteFloat(ConstrainedFloat):
         allow_inf_nan = False


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 class ConstrainedBytes(bytes):
     strip_whitespace = False
     to_upper = False
@@ -209,12 +358,11 @@ class ConstrainedBytes(bytes):
     strict: bool = False

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minLength=cls.min_length, maxLength=
-            cls.max_length)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length)

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield strict_bytes_validator if cls.strict else bytes_validator
         yield constr_strip_whitespace
         yield constr_upper
@@ -222,15 +370,38 @@ class ConstrainedBytes(bytes):
         yield constr_length_validator


+def conbytes(
+    *,
+    strip_whitespace: bool = False,
+    to_upper: bool = False,
+    to_lower: bool = False,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    strict: bool = False,
+) -> Type[bytes]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        to_upper=to_upper,
+        to_lower=to_lower,
+        min_length=min_length,
+        max_length=max_length,
+        strict=strict,
+    )
+    return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace))
+
+
 if TYPE_CHECKING:
     StrictBytes = bytes
 else:

-
     class StrictBytes(ConstrainedBytes):
         strict = True


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 class ConstrainedStr(str):
     strip_whitespace = False
     to_upper = False
@@ -242,12 +413,16 @@ class ConstrainedStr(str):
     strict = False

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minLength=cls.min_length, maxLength=
-            cls.max_length, pattern=cls.regex and cls._get_pattern(cls.regex))
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+            pattern=cls.regex and cls._get_pattern(cls.regex),
+        )

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield strict_str_validator if cls.strict else str_validator
         yield constr_strip_whitespace
         yield constr_upper
@@ -255,82 +430,239 @@ class ConstrainedStr(str):
         yield constr_length_validator
         yield cls.validate

+    @classmethod
+    def validate(cls, value: Union[str]) -> Union[str]:
+        if cls.curtail_length and len(value) > cls.curtail_length:
+            value = value[: cls.curtail_length]
+
+        if cls.regex:
+            if not re.match(cls.regex, value):
+                raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex))
+
+        return value
+
+    @staticmethod
+    def _get_pattern(regex: Union[str, Pattern[str]]) -> str:
+        return regex if isinstance(regex, str) else regex.pattern
+
+
+def constr(
+    *,
+    strip_whitespace: bool = False,
+    to_upper: bool = False,
+    to_lower: bool = False,
+    strict: bool = False,
+    min_length: Optional[int] = None,
+    max_length: Optional[int] = None,
+    curtail_length: Optional[int] = None,
+    regex: Optional[str] = None,
+) -> Type[str]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        strip_whitespace=strip_whitespace,
+        to_upper=to_upper,
+        to_lower=to_lower,
+        strict=strict,
+        min_length=min_length,
+        max_length=max_length,
+        curtail_length=curtail_length,
+        regex=regex and re.compile(regex),
+    )
+    return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace))
+

 if TYPE_CHECKING:
     StrictStr = str
 else:

-
     class StrictStr(ConstrainedStr):
         strict = True


-class ConstrainedSet(set):
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+# This types superclass should be Set[T], but cython chokes on that...
+class ConstrainedSet(set):  # type: ignore
+    # Needed for pydantic to detect that this is a set
     __origin__ = set
-    __args__: Set[Type[T]]
+    __args__: Set[Type[T]]  # type: ignore
+
     min_items: Optional[int] = None
     max_items: Optional[int] = None
-    item_type: Type[T]
+    item_type: Type[T]  # type: ignore

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.set_length_validator

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.
-            max_items)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
+
+    @classmethod
+    def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]':
+        if v is None:
+            return None
+
+        v = set_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.SetMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.SetMaxLengthError(limit_value=cls.max_items)
+
+        return v
+

+def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace))

-class ConstrainedFrozenSet(frozenset):
+
+# This types superclass should be FrozenSet[T], but cython chokes on that...
+class ConstrainedFrozenSet(frozenset):  # type: ignore
+    # Needed for pydantic to detect that this is a set
     __origin__ = frozenset
-    __args__: FrozenSet[Type[T]]
+    __args__: FrozenSet[Type[T]]  # type: ignore
+
     min_items: Optional[int] = None
     max_items: Optional[int] = None
-    item_type: Type[T]
+    item_type: Type[T]  # type: ignore

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.frozenset_length_validator

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.
-            max_items)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
+
+    @classmethod
+    def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]':
+        if v is None:
+            return None
+
+        v = frozenset_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.FrozenSetMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items)
+
+        return v
+
+
+def confrozenset(
+    item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None
+) -> Type[FrozenSet[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace))


-class ConstrainedList(list):
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+# This types superclass should be List[T], but cython chokes on that...
+class ConstrainedList(list):  # type: ignore
+    # Needed for pydantic to detect that this is a list
     __origin__ = list
-    __args__: Tuple[Type[T], ...]
+    __args__: Tuple[Type[T], ...]  # type: ignore
+
     min_items: Optional[int] = None
     max_items: Optional[int] = None
     unique_items: Optional[bool] = None
-    item_type: Type[T]
+    item_type: Type[T]  # type: ignore

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.list_length_validator
         if cls.unique_items:
             yield cls.unique_items_validator

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.
-            max_items, uniqueItems=cls.unique_items)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items)
+
+    @classmethod
+    def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
+        if v is None:
+            return None
+
+        v = list_validator(v)
+        v_len = len(v)
+
+        if cls.min_items is not None and v_len < cls.min_items:
+            raise errors.ListMinLengthError(limit_value=cls.min_items)
+
+        if cls.max_items is not None and v_len > cls.max_items:
+            raise errors.ListMaxLengthError(limit_value=cls.max_items)
+
+        return v
+
+    @classmethod
+    def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
+        if v is None:
+            return None
+
+        for i, value in enumerate(v, start=1):
+            if value in v[i:]:
+                raise errors.ListUniqueItemsError()
+
+        return v
+
+
+def conlist(
+    item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None
+) -> Type[List[T]]:
+    # __args__ is needed to conform to typing generics api
+    namespace = dict(
+        min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,)
+    )
+    # We use new_class to be able to deal with Generic types
+    return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace))
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


 if TYPE_CHECKING:
     PyObject = Callable[..., Any]
 else:

-
     class PyObject:
         validate_always = True

         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield cls.validate

+        @classmethod
+        def validate(cls, value: Any) -> Any:
+            if isinstance(value, Callable):
+                return value
+
+            try:
+                value = str_validator(value)
+            except errors.StrError:
+                raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable')
+
+            try:
+                return import_string(value)
+            except ImportError as e:
+                raise errors.PyObjectError(error_message=str(e))
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

 class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta):
     gt: OptionalIntFloatDecimal = None
@@ -342,18 +674,82 @@ class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta):
     multiple_of: OptionalIntFloatDecimal = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, exclusiveMinimum=cls.gt,
-            exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le,
-            multipleOf=cls.multiple_of)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            exclusiveMinimum=cls.gt,
+            exclusiveMaximum=cls.lt,
+            minimum=cls.ge,
+            maximum=cls.le,
+            multipleOf=cls.multiple_of,
+        )

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield decimal_validator
         yield number_size_validator
         yield number_multiple_validator
         yield cls.validate

+    @classmethod
+    def validate(cls, value: Decimal) -> Decimal:
+        try:
+            normalized_value = value.normalize()
+        except InvalidOperation:
+            normalized_value = value
+        digit_tuple, exponent = normalized_value.as_tuple()[1:]
+        if exponent in {'F', 'n', 'N'}:
+            raise errors.DecimalIsNotFiniteError()
+
+        if exponent >= 0:
+            # A positive exponent adds that many trailing zeros.
+            digits = len(digit_tuple) + exponent
+            decimals = 0
+        else:
+            # If the absolute value of the negative exponent is larger than the
+            # number of digits, then it's the same as the number of digits,
+            # because it'll consume all of the digits in digit_tuple and then
+            # add abs(exponent) - len(digit_tuple) leading zeros after the
+            # decimal point.
+            if abs(exponent) > len(digit_tuple):
+                digits = decimals = abs(exponent)
+            else:
+                digits = len(digit_tuple)
+                decimals = abs(exponent)
+        whole_digits = digits - decimals
+
+        if cls.max_digits is not None and digits > cls.max_digits:
+            raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits)
+
+        if cls.decimal_places is not None and decimals > cls.decimal_places:
+            raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places)
+
+        if cls.max_digits is not None and cls.decimal_places is not None:
+            expected = cls.max_digits - cls.decimal_places
+            if whole_digits > expected:
+                raise errors.DecimalWholeDigitsError(whole_digits=expected)
+
+        return value
+
+
+def condecimal(
+    *,
+    gt: Decimal = None,
+    ge: Decimal = None,
+    lt: Decimal = None,
+    le: Decimal = None,
+    max_digits: Optional[int] = None,
+    decimal_places: Optional[int] = None,
+    multiple_of: Decimal = None,
+) -> Type[Decimal]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(
+        gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of
+    )
+    return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace)
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

 if TYPE_CHECKING:
     UUID1 = UUID
@@ -362,83 +758,95 @@ if TYPE_CHECKING:
     UUID5 = UUID
 else:

-
     class UUID1(UUID):
         _required_version = 1

         @classmethod
-        def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-            field_schema.update(type='string', format=
-                f'uuid{cls._required_version}')
-
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+            field_schema.update(type='string', format=f'uuid{cls._required_version}')

     class UUID3(UUID1):
         _required_version = 3

-
     class UUID4(UUID1):
         _required_version = 4

-
     class UUID5(UUID1):
         _required_version = 5
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
 if TYPE_CHECKING:
     FilePath = Path
     DirectoryPath = Path
 else:

-
     class FilePath(Path):
-
         @classmethod
-        def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
             field_schema.update(format='file-path')

         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield path_validator
             yield path_exists_validator
             yield cls.validate

+        @classmethod
+        def validate(cls, value: Path) -> Path:
+            if not value.is_file():
+                raise errors.PathNotAFileError(path=value)
+
+            return value

     class DirectoryPath(Path):
-
         @classmethod
-        def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
             field_schema.update(format='directory-path')

         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield path_validator
             yield path_exists_validator
             yield cls.validate

+        @classmethod
+        def validate(cls, value: Path) -> Path:
+            if not value.is_dir():
+                raise errors.PathNotADirectoryError(path=value)
+
+            return value
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

 class JsonWrapper:
     pass


 class JsonMeta(type):
-
-    def __getitem__(self, t: Type[Any]) ->Type[JsonWrapper]:
+    def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]:
         if t is Any:
-            return Json
-        return _registered(type('JsonWrapperValue', (JsonWrapper,), {
-            'inner_type': t}))
+            return Json  # allow Json[Any] to replecate plain Json
+        return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t}))


 if TYPE_CHECKING:
-    Json = Annotated[T, ...]
-else:
+    Json = Annotated[T, ...]  # Json[list[str]] will be recognized by type checkers as list[str]

+else:

     class Json(metaclass=JsonMeta):
-
         @classmethod
-        def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
+        def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
             field_schema.update(type='string', format='json-string')


+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
 class SecretField(abc.ABC):
     """
     Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`,
@@ -450,72 +858,120 @@ class SecretField(abc.ABC):
           https://github.com/cython/cython/issues/2753).
     """

-    def __eq__(self, other: Any) ->bool:
-        return isinstance(other, self.__class__) and self.get_secret_value(
-            ) == other.get_secret_value()
+    def __eq__(self, other: Any) -> bool:
+        return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value()

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return '**********' if self.get_secret_value() else ''

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.get_secret_value())

+    @abc.abstractmethod
+    def get_secret_value(self) -> Any:  # pragma: no cover
+        ...
+

 class SecretStr(SecretField):
     min_length: OptionalInt = None
     max_length: OptionalInt = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, type='string', writeOnly=True, format
-            ='password', minLength=cls.min_length, maxLength=cls.max_length)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            type='string',
+            writeOnly=True,
+            format='password',
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+        )

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate
         yield constr_length_validator

+    @classmethod
+    def validate(cls, value: Any) -> 'SecretStr':
+        if isinstance(value, cls):
+            return value
+        value = str_validator(value)
+        return cls(value)
+
     def __init__(self, value: str):
         self._secret_value = value

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f"SecretStr('{self}')"

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self._secret_value)

+    def display(self) -> str:
+        warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning)
+        return str(self)
+
+    def get_secret_value(self) -> str:
+        return self._secret_value
+

 class SecretBytes(SecretField):
     min_length: OptionalInt = None
     max_length: OptionalInt = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, type='string', writeOnly=True, format
-            ='password', minLength=cls.min_length, maxLength=cls.max_length)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(
+            field_schema,
+            type='string',
+            writeOnly=True,
+            format='password',
+            minLength=cls.min_length,
+            maxLength=cls.max_length,
+        )

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate
         yield constr_length_validator

+    @classmethod
+    def validate(cls, value: Any) -> 'SecretBytes':
+        if isinstance(value, cls):
+            return value
+        value = bytes_validator(value)
+        return cls(value)
+
     def __init__(self, value: bytes):
         self._secret_value = value

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f"SecretBytes(b'{self}')"

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self._secret_value)

+    def display(self) -> str:
+        warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning)
+        return str(self)
+
+    def get_secret_value(self) -> bytes:
+        return self._secret_value
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+

 class PaymentCardBrand(str, Enum):
+    # If you add another card type, please also add it to the
+    # Hypothesis strategy in `pydantic._hypothesis_plugin`.
     amex = 'American Express'
     mastercard = 'Mastercard'
     visa = 'Visa'
     other = 'other'

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.value


@@ -523,6 +979,7 @@ class PaymentCardNumber(str):
     """
     Based on: https://en.wikipedia.org/wiki/Payment_card_number
     """
+
     strip_whitespace: ClassVar[bool] = True
     min_length: ClassVar[int] = 12
     max_length: ClassVar[int] = 19
@@ -536,7 +993,7 @@ class PaymentCardNumber(str):
         self.brand = self._get_brand(card_number)

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield str_validator
         yield constr_strip_whitespace
         yield constr_length_validator
@@ -545,59 +1002,180 @@ class PaymentCardNumber(str):
         yield cls
         yield cls.validate_length_for_brand

+    @property
+    def masked(self) -> str:
+        num_masked = len(self) - 10  # len(bin) + len(last4) == 10
+        return f'{self.bin}{"*" * num_masked}{self.last4}'
+
+    @classmethod
+    def validate_digits(cls, card_number: str) -> str:
+        if not card_number.isdigit():
+            raise errors.NotDigitError
+        return card_number
+
     @classmethod
-    def validate_luhn_check_digit(cls, card_number: str) ->str:
+    def validate_luhn_check_digit(cls, card_number: str) -> str:
         """
         Based on: https://en.wikipedia.org/wiki/Luhn_algorithm
         """
-        pass
+        sum_ = int(card_number[-1])
+        length = len(card_number)
+        parity = length % 2
+        for i in range(length - 1):
+            digit = int(card_number[i])
+            if i % 2 == parity:
+                digit *= 2
+            if digit > 9:
+                digit -= 9
+            sum_ += digit
+        valid = sum_ % 10 == 0
+        if not valid:
+            raise errors.LuhnValidationError
+        return card_number

     @classmethod
-    def validate_length_for_brand(cls, card_number: 'PaymentCardNumber'
-        ) ->'PaymentCardNumber':
+    def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber':
         """
         Validate length based on BIN for major brands:
         https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN)
         """
-        pass
-
-
-BYTE_SIZES = {'b': 1, 'kb': 10 ** 3, 'mb': 10 ** 6, 'gb': 10 ** 9, 'tb': 10 **
-    12, 'pb': 10 ** 15, 'eb': 10 ** 18, 'kib': 2 ** 10, 'mib': 2 ** 20,
-    'gib': 2 ** 30, 'tib': 2 ** 40, 'pib': 2 ** 50, 'eib': 2 ** 60}
-BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in
-    k})
-byte_string_re = re.compile('^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?', re.IGNORECASE)
+        required_length: Union[None, int, str] = None
+        if card_number.brand in PaymentCardBrand.mastercard:
+            required_length = 16
+            valid = len(card_number) == required_length
+        elif card_number.brand == PaymentCardBrand.visa:
+            required_length = '13, 16 or 19'
+            valid = len(card_number) in {13, 16, 19}
+        elif card_number.brand == PaymentCardBrand.amex:
+            required_length = 15
+            valid = len(card_number) == required_length
+        else:
+            valid = True
+        if not valid:
+            raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length)
+        return card_number
+
+    @staticmethod
+    def _get_brand(card_number: str) -> PaymentCardBrand:
+        if card_number[0] == '4':
+            brand = PaymentCardBrand.visa
+        elif 51 <= int(card_number[:2]) <= 55:
+            brand = PaymentCardBrand.mastercard
+        elif card_number[:2] in {'34', '37'}:
+            brand = PaymentCardBrand.amex
+        else:
+            brand = PaymentCardBrand.other
+        return brand
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+BYTE_SIZES = {
+    'b': 1,
+    'kb': 10**3,
+    'mb': 10**6,
+    'gb': 10**9,
+    'tb': 10**12,
+    'pb': 10**15,
+    'eb': 10**18,
+    'kib': 2**10,
+    'mib': 2**20,
+    'gib': 2**30,
+    'tib': 2**40,
+    'pib': 2**50,
+    'eib': 2**60,
+}
+BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k})
+byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE)


 class ByteSize(int):
-
     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield cls.validate

+    @classmethod
+    def validate(cls, v: StrIntFloat) -> 'ByteSize':
+        try:
+            return cls(int(v))
+        except ValueError:
+            pass
+
+        str_match = byte_string_re.match(str(v))
+        if str_match is None:
+            raise errors.InvalidByteSize()
+
+        scalar, unit = str_match.groups()
+        if unit is None:
+            unit = 'b'
+
+        try:
+            unit_mult = BYTE_SIZES[unit.lower()]
+        except KeyError:
+            raise errors.InvalidByteSizeUnit(unit=unit)
+
+        return cls(int(float(scalar) * unit_mult))
+
+    def human_readable(self, decimal: bool = False) -> str:
+        if decimal:
+            divisor = 1000
+            units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
+            final_unit = 'EB'
+        else:
+            divisor = 1024
+            units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB']
+            final_unit = 'EiB'
+
+        num = float(self)
+        for unit in units:
+            if abs(num) < divisor:
+                return f'{num:0.1f}{unit}'
+            num /= divisor
+
+        return f'{num:0.1f}{final_unit}'
+
+    def to(self, unit: str) -> float:
+        try:
+            unit_div = BYTE_SIZES[unit.lower()]
+        except KeyError:
+            raise errors.InvalidByteSizeUnit(unit=unit)
+
+        return self / unit_div
+
+
+# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

 if TYPE_CHECKING:
     PastDate = date
     FutureDate = date
 else:

-
     class PastDate(date):
-
         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield parse_date
             yield cls.validate

+        @classmethod
+        def validate(cls, value: date) -> date:
+            if value >= date.today():
+                raise errors.DateNotInThePastError()

-    class FutureDate(date):
+            return value

+    class FutureDate(date):
         @classmethod
-        def __get_validators__(cls) ->'CallableGenerator':
+        def __get_validators__(cls) -> 'CallableGenerator':
             yield parse_date
             yield cls.validate

+        @classmethod
+        def validate(cls, value: date) -> date:
+            if value <= date.today():
+                raise errors.DateNotInTheFutureError()
+
+            return value
+

 class ConstrainedDate(date, metaclass=ConstrainedNumberMeta):
     gt: OptionalDate = None
@@ -606,11 +1184,22 @@ class ConstrainedDate(date, metaclass=ConstrainedNumberMeta):
     le: OptionalDate = None

     @classmethod
-    def __modify_schema__(cls, field_schema: Dict[str, Any]) ->None:
-        update_not_none(field_schema, exclusiveMinimum=cls.gt,
-            exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le)
+    def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
+        update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le)

     @classmethod
-    def __get_validators__(cls) ->'CallableGenerator':
+    def __get_validators__(cls) -> 'CallableGenerator':
         yield parse_date
         yield number_size_validator
+
+
+def condate(
+    *,
+    gt: date = None,
+    ge: date = None,
+    lt: date = None,
+    le: date = None,
+) -> Type[date]:
+    # use kwargs then define conf in a dict to aid with IDE type hinting
+    namespace = dict(gt=gt, ge=ge, lt=lt, le=le)
+    return type('ConstrainedDateValue', (ConstrainedDate,), namespace)
diff --git a/pydantic/v1/typing.py b/pydantic/v1/typing.py
index 72034371e..7dd341ce0 100644
--- a/pydantic/v1/typing.py
+++ b/pydantic/v1/typing.py
@@ -2,66 +2,168 @@ import sys
 import typing
 from collections.abc import Callable
 from os import PathLike
-from typing import TYPE_CHECKING, AbstractSet, Any, Callable as TypingCallable, ClassVar, Dict, ForwardRef, Generator, Iterable, List, Mapping, NewType, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, _eval_type, cast, get_type_hints
-from typing_extensions import Annotated, Final, Literal, NotRequired as TypedDictNotRequired, Required as TypedDictRequired
+from typing import (  # type: ignore
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable as TypingCallable,
+    ClassVar,
+    Dict,
+    ForwardRef,
+    Generator,
+    Iterable,
+    List,
+    Mapping,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    _eval_type,
+    cast,
+    get_type_hints,
+)
+
+from typing_extensions import (
+    Annotated,
+    Final,
+    Literal,
+    NotRequired as TypedDictNotRequired,
+    Required as TypedDictRequired,
+)
+
 try:
-    from typing import _TypingBase as typing_base
+    from typing import _TypingBase as typing_base  # type: ignore
 except ImportError:
-    from typing import _Final as typing_base
+    from typing import _Final as typing_base  # type: ignore
+
 try:
-    from typing import GenericAlias as TypingGenericAlias
+    from typing import GenericAlias as TypingGenericAlias  # type: ignore
 except ImportError:
+    # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
     TypingGenericAlias = ()
+
 try:
-    from types import UnionType as TypesUnionType
+    from types import UnionType as TypesUnionType  # type: ignore
 except ImportError:
+    # python < 3.10 does not have UnionType (str | int, byte | bool and so on)
     TypesUnionType = ()
+
+
 if sys.version_info < (3, 9):
+
+    def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+        return type_._evaluate(globalns, localns)
+
+else:
+
+    def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+        # Even though it is the right signature for python 3.9, mypy complains with
+        # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast...
+        # Python 3.13/3.12.4+ made `recursive_guard` a kwarg, so name it explicitly to avoid:
+        # TypeError: ForwardRef._evaluate() missing 1 required keyword-only argument: 'recursive_guard'
+        return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set())
+
+
 if sys.version_info < (3, 9):
+    # Ensure we always get all the whole `Annotated` hint, not just the annotated type.
+    # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`,
+    # so it already returns the full annotation
     get_all_type_hints = get_type_hints
+
+else:
+
+    def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any:
+        return get_type_hints(obj, globalns, localns, include_extras=True)
+
+
 _T = TypeVar('_T')
+
 AnyCallable = TypingCallable[..., Any]
 NoArgAnyCallable = TypingCallable[[], Any]
+
+# workaround for https://github.com/python/mypy/issues/9496
 AnyArgTCallable = TypingCallable[..., _T]
+
+
+# Annotated[...] is implemented by returning an instance of one of these classes, depending on
+# python/typing_extensions version.
 AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'}
+
+
 LITERAL_TYPES: Set[Any] = {Literal}
 if hasattr(typing, 'Literal'):
     LITERAL_TYPES.add(typing.Literal)
+
+
 if sys.version_info < (3, 8):
+
+    def get_origin(t: Type[Any]) -> Optional[Type[Any]]:
+        if type(t).__name__ in AnnotatedTypeNames:
+            # weirdly this is a runtime requirement, as well as for mypy
+            return cast(Type[Any], Annotated)
+        return getattr(t, '__origin__', None)
+
 else:
     from typing import get_origin as _typing_get_origin

-    def get_origin(tp: Type[Any]) ->Optional[Type[Any]]:
+    def get_origin(tp: Type[Any]) -> Optional[Type[Any]]:
         """
         We can't directly use `typing.get_origin` since we need a fallback to support
         custom generic classes like `ConstrainedList`
         It should be useless once https://github.com/cython/cython/issues/3537 is
         solved and https://github.com/pydantic/pydantic/pull/1753 is merged.
         """
-        pass
+        if type(tp).__name__ in AnnotatedTypeNames:
+            return cast(Type[Any], Annotated)  # mypy complains about _SpecialForm
+        return _typing_get_origin(tp) or getattr(tp, '__origin__', None)
+
+
 if sys.version_info < (3, 8):
     from typing import _GenericAlias

-    def get_args(t: Type[Any]) ->Tuple[Any, ...]:
+    def get_args(t: Type[Any]) -> Tuple[Any, ...]:
         """Compatibility version of get_args for python 3.7.

         Mostly compatible with the python 3.8 `typing` module version
         and able to handle almost all use cases.
         """
-        pass
+        if type(t).__name__ in AnnotatedTypeNames:
+            return t.__args__ + t.__metadata__
+        if isinstance(t, _GenericAlias):
+            res = t.__args__
+            if t.__origin__ is Callable and res and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return getattr(t, '__args__', ())
+
 else:
     from typing import get_args as _typing_get_args

-    def _generic_get_args(tp: Type[Any]) ->Tuple[Any, ...]:
+    def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]:
         """
         In python 3.9, `typing.Dict`, `typing.List`, ...
         do have an empty `__args__` by default (instead of the generic ~T for example).
         In order to still support `Dict` for example and consider it as `Dict[Any, Any]`,
         we retrieve the `_nparams` value that tells us how many parameters it needs.
         """
-        pass
-
-    def get_args(tp: Type[Any]) ->Tuple[Any, ...]:
+        if hasattr(tp, '_nparams'):
+            return (Any,) * tp._nparams
+        # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple`
+        # in python 3.10- but now returns () for `tuple` and `Tuple`.
+        # This will probably be clarified in pydantic v2
+        try:
+            if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]:  # type: ignore[misc]
+                return ((),)
+        # there is a TypeError when compiled with cython
+        except TypeError:  # pragma: no cover
+            pass
+        return ()
+
+    def get_args(tp: Type[Any]) -> Tuple[Any, ...]:
         """Get type arguments with all substitutions performed.

         For unions, basic simplifications used by Union constructor are performed.
@@ -72,22 +174,29 @@ else:
             get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
             get_args(Callable[[], T][int]) == ([], int)
         """
-        pass
+        if type(tp).__name__ in AnnotatedTypeNames:
+            return tp.__args__ + tp.__metadata__
+        # the fallback is needed for the same reasons as `get_origin` (see above)
+        return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp)
+
+
 if sys.version_info < (3, 9):

-    def convert_generics(tp: Type[Any]) ->Type[Any]:
+    def convert_generics(tp: Type[Any]) -> Type[Any]:
         """Python 3.9 and older only supports generics from `typing` module.
         They convert strings to ForwardRef automatically.

         Examples::
             typing.List['Hero'] == typing.List[ForwardRef('Hero')]
         """
-        pass
+        return tp
+
 else:
-    from typing import _UnionGenericAlias
+    from typing import _UnionGenericAlias  # type: ignore
+
     from typing_extensions import _AnnotatedAlias

-    def convert_generics(tp: Type[Any]) ->Type[Any]:
+    def convert_generics(tp: Type[Any]) -> Type[Any]:
         """
         Recursively searches for `str` type hints and replaces them with ForwardRef.

@@ -97,16 +206,60 @@ else:
             convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')]
             convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int
         """
-        pass
+        origin = get_origin(tp)
+        if not origin or not hasattr(tp, '__args__'):
+            return tp
+
+        args = get_args(tp)
+
+        # typing.Annotated needs special treatment
+        if origin is Annotated:
+            return _AnnotatedAlias(convert_generics(args[0]), args[1:])
+
+        # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)`
+        converted = tuple(
+            ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg)
+            for arg in args
+        )
+
+        if converted == args:
+            return tp
+        elif isinstance(tp, TypingGenericAlias):
+            return TypingGenericAlias(origin, converted)
+        elif isinstance(tp, TypesUnionType):
+            # recreate types.UnionType (PEP604, Python >= 3.10)
+            return _UnionGenericAlias(origin, converted)
+        else:
+            try:
+                setattr(tp, '__args__', converted)
+            except AttributeError:
+                pass
+            return tp
+
+
 if sys.version_info < (3, 10):
-    WithArgsTypes = TypingGenericAlias,
+
+    def is_union(tp: Optional[Type[Any]]) -> bool:
+        return tp is Union
+
+    WithArgsTypes = (TypingGenericAlias,)
+
 else:
     import types
     import typing
-    WithArgsTypes = typing._GenericAlias, types.GenericAlias, types.UnionType
+
+    def is_union(tp: Optional[Type[Any]]) -> bool:
+        return tp is Union or tp is types.UnionType  # noqa: E721
+
+    WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType)
+
+
 StrPath = Union[str, PathLike]
+
+
 if TYPE_CHECKING:
     from pydantic.v1.fields import ModelField
+
     TupleGenerator = Generator[Tuple[str, Any], None, None]
     DictStrAny = Dict[str, Any]
     DictAny = Dict[Any, Any]
@@ -118,116 +271,338 @@ if TYPE_CHECKING:
     MappingIntStrAny = Mapping[IntStr, Any]
     CallableGenerator = Generator[AnyCallable, None, None]
     ReprArgs = Sequence[Tuple[Optional[str], Any]]
+
     MYPY = False
     if MYPY:
         AnyClassMethod = classmethod[Any]
     else:
+        # classmethod[TargetType, CallableParamSpecType, CallableReturnType]
         AnyClassMethod = classmethod[Any, Any, Any]
-__all__ = ('AnyCallable', 'NoArgAnyCallable', 'NoneType', 'is_none_type',
-    'display_as_type', 'resolve_annotations', 'is_callable_type',
-    'is_literal_type', 'all_literal_values', 'is_namedtuple',
-    'is_typeddict', 'is_typeddict_special', 'is_new_type',
-    'new_type_supertype', 'is_classvar', 'is_finalvar',
-    'update_field_forward_refs', 'update_model_forward_refs',
-    'TupleGenerator', 'DictStrAny', 'DictAny', 'SetStr', 'ListStr',
-    'IntStr', 'AbstractSetIntStr', 'DictIntStrAny', 'CallableGenerator',
-    'ReprArgs', 'AnyClassMethod', 'CallableGenerator', 'WithArgsTypes',
-    'get_args', 'get_origin', 'get_sub_types', 'typing_base',
-    'get_all_type_hints', 'is_union', 'StrPath', 'MappingIntStrAny')
+
+__all__ = (
+    'AnyCallable',
+    'NoArgAnyCallable',
+    'NoneType',
+    'is_none_type',
+    'display_as_type',
+    'resolve_annotations',
+    'is_callable_type',
+    'is_literal_type',
+    'all_literal_values',
+    'is_namedtuple',
+    'is_typeddict',
+    'is_typeddict_special',
+    'is_new_type',
+    'new_type_supertype',
+    'is_classvar',
+    'is_finalvar',
+    'update_field_forward_refs',
+    'update_model_forward_refs',
+    'TupleGenerator',
+    'DictStrAny',
+    'DictAny',
+    'SetStr',
+    'ListStr',
+    'IntStr',
+    'AbstractSetIntStr',
+    'DictIntStrAny',
+    'CallableGenerator',
+    'ReprArgs',
+    'AnyClassMethod',
+    'CallableGenerator',
+    'WithArgsTypes',
+    'get_args',
+    'get_origin',
+    'get_sub_types',
+    'typing_base',
+    'get_all_type_hints',
+    'is_union',
+    'StrPath',
+    'MappingIntStrAny',
+)
+
+
 NoneType = None.__class__
+
+
 NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None])
+
+
 if sys.version_info < (3, 8):
+    # Even though this implementation is slower, we need it for python 3.7:
+    # In python 3.7 "Literal" is not a builtin type and uses a different
+    # mechanism.
+    # for this reason `Literal[None] is Literal[None]` evaluates to `False`,
+    # breaking the faster implementation used for the other python versions.
+
+    def is_none_type(type_: Any) -> bool:
+        return type_ in NONE_TYPES
+
 elif sys.version_info[:2] == (3, 8):

+    def is_none_type(type_: Any) -> bool:
+        for none_type in NONE_TYPES:
+            if type_ is none_type:
+                return True
+        # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey
+        # can change on very subtle changes like use of types in other modules,
+        # hopefully this check avoids that issue.
+        if is_literal_type(type_):  # pragma: no cover
+            return all_literal_values(type_) == (None,)
+        return False
+
+else:
+
+    def is_none_type(type_: Any) -> bool:
+        return type_ in NONE_TYPES
+

-def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name:
-    Optional[str]) ->Dict[str, Type[Any]]:
+def display_as_type(v: Type[Any]) -> str:
+    if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type):
+        v = v.__class__
+
+    if is_union(get_origin(v)):
+        return f'Union[{", ".join(map(display_as_type, get_args(v)))}]'
+
+    if isinstance(v, WithArgsTypes):
+        # Generic alias are constructs like `list[int]`
+        return str(v).replace('typing.', '')
+
+    try:
+        return v.__name__
+    except AttributeError:
+        # happens with typing objects
+        return str(v).replace('typing.', '')
+
+
+def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]:
     """
     Partially taken from typing.get_type_hints.

     Resolve string or ForwardRef annotations into type objects if possible.
     """
-    pass
-
-
-def all_literal_values(type_: Type[Any]) ->Tuple[Any, ...]:
+    base_globals: Optional[Dict[str, Any]] = None
+    if module_name:
+        try:
+            module = sys.modules[module_name]
+        except KeyError:
+            # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363
+            pass
+        else:
+            base_globals = module.__dict__
+
+    annotations = {}
+    for name, value in raw_annotations.items():
+        if isinstance(value, str):
+            if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1):
+                value = ForwardRef(value, is_argument=False, is_class=True)
+            else:
+                value = ForwardRef(value, is_argument=False)
+        try:
+            if sys.version_info >= (3, 13):
+                value = _eval_type(value, base_globals, None, type_params=())
+            else:
+                value = _eval_type(value, base_globals, None)
+        except NameError:
+            # this is ok, it can be fixed with update_forward_refs
+            pass
+        annotations[name] = value
+    return annotations
+
+
+def is_callable_type(type_: Type[Any]) -> bool:
+    return type_ is Callable or get_origin(type_) is Callable
+
+
+def is_literal_type(type_: Type[Any]) -> bool:
+    return Literal is not None and get_origin(type_) in LITERAL_TYPES
+
+
+def literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
+    return get_args(type_)
+
+
+def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
     """
     This method is used to retrieve all Literal values as
     Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
     e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`
     """
-    pass
+    if not is_literal_type(type_):
+        return (type_,)
+
+    values = literal_values(type_)
+    return tuple(x for value in values for x in all_literal_values(value))


-def is_namedtuple(type_: Type[Any]) ->bool:
+def is_namedtuple(type_: Type[Any]) -> bool:
     """
     Check if a given class is a named tuple.
     It can be either a `typing.NamedTuple` or `collections.namedtuple`
     """
-    pass
+    from pydantic.v1.utils import lenient_issubclass
+
+    return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')


-def is_typeddict(type_: Type[Any]) ->bool:
+def is_typeddict(type_: Type[Any]) -> bool:
     """
     Check if a given class is a typed dict (from `typing` or `typing_extensions`)
     In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict)
     """
-    pass
+    from pydantic.v1.utils import lenient_issubclass
+
+    return lenient_issubclass(type_, dict) and hasattr(type_, '__total__')
+
+
+def _check_typeddict_special(type_: Any) -> bool:
+    return type_ is TypedDictRequired or type_ is TypedDictNotRequired


-def is_typeddict_special(type_: Any) ->bool:
+def is_typeddict_special(type_: Any) -> bool:
     """
     Check if type is a TypedDict special form (Required or NotRequired).
     """
-    pass
+    return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_))


 test_type = NewType('test_type', str)


-def is_new_type(type_: Type[Any]) ->bool:
+def is_new_type(type_: Type[Any]) -> bool:
     """
     Check whether type_ was created using typing.NewType
     """
-    pass
+    return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__')  # type: ignore


-def _check_finalvar(v: Optional[Type[Any]]) ->bool:
+def new_type_supertype(type_: Type[Any]) -> Type[Any]:
+    while hasattr(type_, '__supertype__'):
+        type_ = type_.__supertype__
+    return type_
+
+
+def _check_classvar(v: Optional[Type[Any]]) -> bool:
+    if v is None:
+        return False
+
+    return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar'
+
+
+def _check_finalvar(v: Optional[Type[Any]]) -> bool:
     """
     Check if a given type is a `typing.Final` type.
     """
-    pass
+    if v is None:
+        return False
+
+    return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final')
+
+
+def is_classvar(ann_type: Type[Any]) -> bool:
+    if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
+        return True
+
+    # this is an ugly workaround for class vars that contain forward references and are therefore themselves
+    # forward references, see #3679
+    if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['):
+        return True
+
+    return False
+
+
+def is_finalvar(ann_type: Type[Any]) -> bool:
+    return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type))


-def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any
-    ) ->None:
+def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None:
     """
     Try to update ForwardRefs on fields based on this ModelField, globalns and localns.
     """
-    pass
-
-
-def update_model_forward_refs(model: Type[Any], fields: Iterable[
-    'ModelField'], json_encoders: Dict[Union[Type[Any], str, ForwardRef],
-    AnyCallable], localns: 'DictStrAny', exc_to_suppress: Tuple[Type[
-    BaseException], ...]=()) ->None:
+    prepare = False
+    if field.type_.__class__ == ForwardRef:
+        prepare = True
+        field.type_ = evaluate_forwardref(field.type_, globalns, localns or None)
+    if field.outer_type_.__class__ == ForwardRef:
+        prepare = True
+        field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None)
+    if prepare:
+        field.prepare()
+
+    if field.sub_fields:
+        for sub_f in field.sub_fields:
+            update_field_forward_refs(sub_f, globalns=globalns, localns=localns)
+
+    if field.discriminator_key is not None:
+        field.prepare_discriminated_union_sub_fields()
+
+
+def update_model_forward_refs(
+    model: Type[Any],
+    fields: Iterable['ModelField'],
+    json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable],
+    localns: 'DictStrAny',
+    exc_to_suppress: Tuple[Type[BaseException], ...] = (),
+) -> None:
     """
     Try to update model fields ForwardRefs based on model and localns.
     """
-    pass
+    if model.__module__ in sys.modules:
+        globalns = sys.modules[model.__module__].__dict__.copy()
+    else:
+        globalns = {}
+
+    globalns.setdefault(model.__name__, model)
+
+    for f in fields:
+        try:
+            update_field_forward_refs(f, globalns=globalns, localns=localns)
+        except exc_to_suppress:
+            pass
+
+    for key in set(json_encoders.keys()):
+        if isinstance(key, str):
+            fr: ForwardRef = ForwardRef(key)
+        elif isinstance(key, ForwardRef):
+            fr = key
+        else:
+            continue

+        try:
+            new_key = evaluate_forwardref(fr, globalns, localns or None)
+        except exc_to_suppress:  # pragma: no cover
+            continue

-def get_class(type_: Type[Any]) ->Union[None, bool, Type[Any]]:
+        json_encoders[new_key] = json_encoders.pop(key)
+
+
+def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]:
     """
     Tries to get the class of a Type[T] annotation. Returns True if Type is used
     without brackets. Otherwise returns None.
     """
-    pass
+    if type_ is type:
+        return True
+
+    if get_origin(type_) is None:
+        return None
+
+    args = get_args(type_)
+    if not args or not isinstance(args[0], type):
+        return True
+    else:
+        return args[0]


-def get_sub_types(tp: Any) ->List[Any]:
+def get_sub_types(tp: Any) -> List[Any]:
     """
     Return all the types that are allowed by type `tp`
     `tp` can be a `Union` of allowed types or an `Annotated` type
     """
-    pass
+    origin = get_origin(tp)
+    if origin is Annotated:
+        return get_sub_types(get_args(tp)[0])
+    elif is_union(origin):
+        return [x for t in get_args(tp) for x in get_sub_types(t)]
+    else:
+        return [tp]
diff --git a/pydantic/v1/utils.py b/pydantic/v1/utils.py
index effb78d4d..a09997bc5 100644
--- a/pydantic/v1/utils.py
+++ b/pydantic/v1/utils.py
@@ -5,107 +5,346 @@ from collections import OrderedDict, defaultdict, deque
 from copy import deepcopy
 from itertools import islice, zip_longest
 from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
-from typing import TYPE_CHECKING, AbstractSet, Any, Callable, Collection, Dict, Generator, Iterable, Iterator, List, Mapping, NoReturn, Optional, Set, Tuple, Type, TypeVar, Union
+from typing import (
+    TYPE_CHECKING,
+    AbstractSet,
+    Any,
+    Callable,
+    Collection,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    NoReturn,
+    Optional,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+
 from typing_extensions import Annotated
+
 from pydantic.v1.errors import ConfigError
-from pydantic.v1.typing import NoneType, WithArgsTypes, all_literal_values, display_as_type, get_args, get_origin, is_literal_type, is_union
+from pydantic.v1.typing import (
+    NoneType,
+    WithArgsTypes,
+    all_literal_values,
+    display_as_type,
+    get_args,
+    get_origin,
+    is_literal_type,
+    is_union,
+)
 from pydantic.v1.version import version_info
+
 if TYPE_CHECKING:
     from inspect import Signature
     from pathlib import Path
+
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.dataclasses import Dataclass
     from pydantic.v1.fields import ModelField
     from pydantic.v1.main import BaseModel
     from pydantic.v1.typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs
-    RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple
-        [str, Any, Any]]]
-__all__ = ('import_string', 'sequence_like', 'validate_field_name',
-    'lenient_isinstance', 'lenient_issubclass', 'in_ipython',
-    'is_valid_identifier', 'deep_update', 'update_not_none',
-    'almost_equal_floats', 'get_model', 'to_camel', 'is_valid_field',
-    'smart_deepcopy', 'PyObjectStr', 'Representation', 'GetterDict',
-    'ValueItems', 'version_info', 'ClassAttribute', 'path_type', 'ROOT_KEY',
-    'get_unique_discriminator_alias', 'get_discriminator_alias_and_values',
-    'DUNDER_ATTRIBUTES')
-ROOT_KEY = '__root__'
-IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {int, float, complex, str,
-    bool, bytes, type, NoneType, FunctionType, BuiltinFunctionType,
-    LambdaType, weakref.ref, CodeType, ModuleType, NotImplemented.__class__,
-    Ellipsis.__class__}
-BUILTIN_COLLECTIONS: Set[Type[Any]] = {list, set, tuple, frozenset, dict,
-    OrderedDict, defaultdict, deque}

+    RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]]
+
+__all__ = (
+    'import_string',
+    'sequence_like',
+    'validate_field_name',
+    'lenient_isinstance',
+    'lenient_issubclass',
+    'in_ipython',
+    'is_valid_identifier',
+    'deep_update',
+    'update_not_none',
+    'almost_equal_floats',
+    'get_model',
+    'to_camel',
+    'is_valid_field',
+    'smart_deepcopy',
+    'PyObjectStr',
+    'Representation',
+    'GetterDict',
+    'ValueItems',
+    'version_info',  # required here to match behaviour in v1.3
+    'ClassAttribute',
+    'path_type',
+    'ROOT_KEY',
+    'get_unique_discriminator_alias',
+    'get_discriminator_alias_and_values',
+    'DUNDER_ATTRIBUTES',
+)

-def import_string(dotted_path: str) ->Any:
+ROOT_KEY = '__root__'
+# these are types that are returned unchanged by deepcopy
+IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {
+    int,
+    float,
+    complex,
+    str,
+    bool,
+    bytes,
+    type,
+    NoneType,
+    FunctionType,
+    BuiltinFunctionType,
+    LambdaType,
+    weakref.ref,
+    CodeType,
+    # note: including ModuleType will differ from behaviour of deepcopy by not producing error.
+    # It might be not a good idea in general, but considering that this function used only internally
+    # against default values of fields, this will allow to actually have a field with module as default value
+    ModuleType,
+    NotImplemented.__class__,
+    Ellipsis.__class__,
+}
+
+# these are types that if empty, might be copied with simple copy() instead of deepcopy()
+BUILTIN_COLLECTIONS: Set[Type[Any]] = {
+    list,
+    set,
+    tuple,
+    frozenset,
+    dict,
+    OrderedDict,
+    defaultdict,
+    deque,
+}
+
+
+def import_string(dotted_path: str) -> Any:
     """
     Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the
     last name in the path. Raise ImportError if the import fails.
     """
-    pass
+    from importlib import import_module
+
+    try:
+        module_path, class_name = dotted_path.strip(' ').rsplit('.', 1)
+    except ValueError as e:
+        raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e

+    module = import_module(module_path)
+    try:
+        return getattr(module, class_name)
+    except AttributeError as e:
+        raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e

-def truncate(v: Union[str], *, max_len: int=80) ->str:
+
+def truncate(v: Union[str], *, max_len: int = 80) -> str:
     """
     Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long
     """
-    pass
+    warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning)
+    if isinstance(v, str) and len(v) > (max_len - 2):
+        # -3 so quote + string + … + quote has correct length
+        return (v[: (max_len - 3)] + '…').__repr__()
+    try:
+        v = v.__repr__()
+    except TypeError:
+        v = v.__class__.__repr__(v)  # in case v is a type
+    if len(v) > max_len:
+        v = v[: max_len - 1] + '…'
+    return v
+

+def sequence_like(v: Any) -> bool:
+    return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))

-def validate_field_name(bases: List[Type['BaseModel']], field_name: str
-    ) ->None:
+
+def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None:
     """
     Ensure that the field's name does not shadow an existing attribute of the model.
     """
-    pass
+    for base in bases:
+        if getattr(base, field_name, None):
+            raise NameError(
+                f'Field name "{field_name}" shadows a BaseModel attribute; '
+                f'use a different field name with "alias=\'{field_name}\'".'
+            )
+
+
+def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
+    try:
+        return isinstance(o, class_or_tuple)  # type: ignore[arg-type]
+    except TypeError:
+        return False
+
+
+def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
+    try:
+        return isinstance(cls, type) and issubclass(cls, class_or_tuple)  # type: ignore[arg-type]
+    except TypeError:
+        if isinstance(cls, WithArgsTypes):
+            return False
+        raise  # pragma: no cover


-def in_ipython() ->bool:
+def in_ipython() -> bool:
     """
     Check whether we're in an ipython environment, including jupyter notebooks.
     """
-    pass
+    try:
+        eval('__IPYTHON__')
+    except NameError:
+        return False
+    else:  # pragma: no cover
+        return True


-def is_valid_identifier(identifier: str) ->bool:
+def is_valid_identifier(identifier: str) -> bool:
     """
     Checks that a string is a valid identifier and not a Python keyword.
     :param identifier: The identifier to test.
     :return: True if the identifier is valid.
     """
-    pass
+    return identifier.isidentifier() and not keyword.iskeyword(identifier)


 KeyType = TypeVar('KeyType')


-def almost_equal_floats(value_1: float, value_2: float, *, delta: float=1e-08
-    ) ->bool:
+def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]:
+    updated_mapping = mapping.copy()
+    for updating_mapping in updating_mappings:
+        for k, v in updating_mapping.items():
+            if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
+                updated_mapping[k] = deep_update(updated_mapping[k], v)
+            else:
+                updated_mapping[k] = v
+    return updated_mapping
+
+
+def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None:
+    mapping.update({k: v for k, v in update.items() if v is not None})
+
+
+def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool:
     """
     Return True if two floats are almost equal
     """
-    pass
+    return abs(value_1 - value_2) <= delta


-def generate_model_signature(init: Callable[..., None], fields: Dict[str,
-    'ModelField'], config: Type['BaseConfig']) ->'Signature':
+def generate_model_signature(
+    init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig']
+) -> 'Signature':
     """
     Generate signature for model based on its fields
     """
-    pass
+    from inspect import Parameter, Signature, signature
+
+    from pydantic.v1.config import Extra
+
+    present_params = signature(init).parameters.values()
+    merged_params: Dict[str, Parameter] = {}
+    var_kw = None
+    use_var_kw = False
+
+    for param in islice(present_params, 1, None):  # skip self arg
+        if param.kind is param.VAR_KEYWORD:
+            var_kw = param
+            continue
+        merged_params[param.name] = param
+
+    if var_kw:  # if custom init has no var_kw, fields which are not declared in it cannot be passed through
+        allow_names = config.allow_population_by_field_name
+        for field_name, field in fields.items():
+            param_name = field.alias
+            if field_name in merged_params or param_name in merged_params:
+                continue
+            elif not is_valid_identifier(param_name):
+                if allow_names and is_valid_identifier(field_name):
+                    param_name = field_name
+                else:
+                    use_var_kw = True
+                    continue
+
+            # TODO: replace annotation with actual expected types once #1055 solved
+            kwargs = {'default': field.default} if not field.required else {}
+            merged_params[param_name] = Parameter(
+                param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs
+            )
+
+    if config.extra is Extra.allow:
+        use_var_kw = True
+
+    if var_kw and use_var_kw:
+        # Make sure the parameter for extra kwargs
+        # does not have the same name as a field
+        default_model_signature = [
+            ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD),
+            ('data', Parameter.VAR_KEYWORD),
+        ]
+        if [(p.name, p.kind) for p in present_params] == default_model_signature:
+            # if this is the standard model signature, use extra_data as the extra args name
+            var_kw_name = 'extra_data'
+        else:
+            # else start from var_kw
+            var_kw_name = var_kw.name
+
+        # generate a name that's definitely unique
+        while var_kw_name in fields:
+            var_kw_name += '_'
+        merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
+
+    return Signature(parameters=list(merged_params.values()), return_annotation=None)
+
+
+def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']:
+    from pydantic.v1.main import BaseModel
+
+    try:
+        model_cls = obj.__pydantic_model__  # type: ignore
+    except AttributeError:
+        model_cls = obj
+
+    if not issubclass(model_cls, BaseModel):
+        raise TypeError('Unsupported type, must be either BaseModel or dataclass')
+    return model_cls
+
+
+def to_camel(string: str) -> str:
+    return ''.join(word.capitalize() for word in string.split('_'))
+
+
+def to_lower_camel(string: str) -> str:
+    if len(string) >= 1:
+        pascal_string = to_camel(string)
+        return pascal_string[0].lower() + pascal_string[1:]
+    return string.lower()


 T = TypeVar('T')


-def unique_list(input_list: Union[List[T], Tuple[T, ...]], *, name_factory:
-    Callable[[T], str]=str) ->List[T]:
+def unique_list(
+    input_list: Union[List[T], Tuple[T, ...]],
+    *,
+    name_factory: Callable[[T], str] = str,
+) -> List[T]:
     """
     Make a list unique while maintaining order.
     We update the list if another one with the same name is set
     (e.g. root validator overridden in subclass)
     """
-    pass
+    result: List[T] = []
+    result_names: List[str] = []
+    for v in input_list:
+        v_name = name_factory(v)
+        if v_name not in result_names:
+            result_names.append(v_name)
+            result.append(v)
+        else:
+            result[result_names.index(v_name)] = v
+
+    return result


 class PyObjectStr(str):
@@ -114,7 +353,7 @@ class PyObjectStr(str):
     representation of something that valid (or pseudo-valid) python.
     """

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return str(self)


@@ -125,9 +364,10 @@ class Representation:
     __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations
     of objects.
     """
+
     __slots__: Tuple[str, ...] = tuple()

-    def __repr_args__(self) ->'ReprArgs':
+    def __repr_args__(self) -> 'ReprArgs':
         """
         Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.

@@ -138,18 +378,16 @@ class Representation:
         attrs = ((s, getattr(self, s)) for s in self.__slots__)
         return [(a, v) for a, v in attrs if v is not None]

-    def __repr_name__(self) ->str:
+    def __repr_name__(self) -> str:
         """
         Name of the instance's class, used in __repr__.
         """
         return self.__class__.__name__

-    def __repr_str__(self, join_str: str) ->str:
-        return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a,
-            v in self.__repr_args__())
+    def __repr_str__(self, join_str: str) -> str:
+        return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())

-    def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) ->Generator[
-        Any, None, None]:
+    def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]:
         """
         Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects
         """
@@ -164,13 +402,13 @@ class Representation:
         yield -1
         yield ')'

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return self.__repr_str__(' ')

-    def __repr__(self) ->str:
-        return f"{self.__repr_name__()}({self.__repr_str__(', ')})"
+    def __repr__(self) -> str:
+        return f'{self.__repr_name__()}({self.__repr_str__(", ")})'

-    def __rich_repr__(self) ->'RichReprResult':
+    def __rich_repr__(self) -> 'RichReprResult':
         """Get fields for Rich library"""
         for name, field_repr in self.__repr_args__():
             if name is None:
@@ -185,48 +423,59 @@ class GetterDict(Representation):

     We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves.
     """
-    __slots__ = '_obj',
+
+    __slots__ = ('_obj',)

     def __init__(self, obj: Any):
         self._obj = obj

-    def __getitem__(self, key: str) ->Any:
+    def __getitem__(self, key: str) -> Any:
         try:
             return getattr(self._obj, key)
         except AttributeError as e:
             raise KeyError(key) from e

-    def extra_keys(self) ->Set[Any]:
+    def get(self, key: Any, default: Any = None) -> Any:
+        return getattr(self._obj, key, default)
+
+    def extra_keys(self) -> Set[Any]:
         """
         We don't want to get any other attributes of obj if the model didn't explicitly ask for them
         """
-        pass
+        return set()

-    def keys(self) ->List[Any]:
+    def keys(self) -> List[Any]:
         """
         Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python
         dictionaries.
         """
-        pass
+        return list(self)
+
+    def values(self) -> List[Any]:
+        return [self[k] for k in self]

-    def __iter__(self) ->Iterator[str]:
+    def items(self) -> Iterator[Tuple[str, Any]]:
+        for k in self:
+            yield k, self.get(k)
+
+    def __iter__(self) -> Iterator[str]:
         for name in dir(self._obj):
             if not name.startswith('_'):
                 yield name

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return sum(1 for _ in self)

-    def __contains__(self, item: Any) ->bool:
+    def __contains__(self, item: Any) -> bool:
         return item in self.keys()

-    def __eq__(self, other: Any) ->bool:
+    def __eq__(self, other: Any) -> bool:
         return dict(self) == dict(other.items())

-    def __repr_args__(self) ->'ReprArgs':
+    def __repr_args__(self) -> 'ReprArgs':
         return [(None, dict(self))]

-    def __repr_name__(self) ->str:
+    def __repr_name__(self) -> str:
         return f'GetterDict[{display_as_type(self._obj)}]'


@@ -234,41 +483,43 @@ class ValueItems(Representation):
     """
     Class for more convenient calculation of excluded or included fields on values.
     """
-    __slots__ = '_items', '_type'

-    def __init__(self, value: Any, items: Union['AbstractSetIntStr',
-        'MappingIntStrAny']) ->None:
+    __slots__ = ('_items', '_type')
+
+    def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None:
         items = self._coerce_items(items)
+
         if isinstance(value, (list, tuple)):
             items = self._normalize_indexes(items, len(value))
+
         self._items: 'MappingIntStrAny' = items

-    def is_excluded(self, item: Any) ->bool:
+    def is_excluded(self, item: Any) -> bool:
         """
         Check if item is fully excluded.

         :param item: key or index of a value
         """
-        pass
+        return self.is_true(self._items.get(item))

-    def is_included(self, item: Any) ->bool:
+    def is_included(self, item: Any) -> bool:
         """
         Check if value is contained in self._items

         :param item: key or index of value
         """
-        pass
+        return item in self._items

-    def for_element(self, e: 'IntStr') ->Optional[Union['AbstractSetIntStr',
-        'MappingIntStrAny']]:
+    def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]:
         """
         :param e: key or index of element on value
         :return: raw values for element if self._items is dict and contain needed element
         """
-        pass

-    def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int
-        ) ->'DictIntStrAny':
+        item = self._items.get(e)
+        return item if not self.is_true(item) else None
+
+    def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny':
         """
         :param items: dict or set of indexes which will be normalized
         :param v_length: length of sequence indexes of which will be
@@ -278,10 +529,37 @@ class ValueItems(Representation):
         >>> self._normalize_indexes({'__all__': True}, 4)
         {0: True, 1: True, 2: True, 3: True}
         """
-        pass
+
+        normalized_items: 'DictIntStrAny' = {}
+        all_items = None
+        for i, v in items.items():
+            if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)):
+                raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
+            if i == '__all__':
+                all_items = self._coerce_value(v)
+                continue
+            if not isinstance(i, int):
+                raise TypeError(
+                    'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
+                    'expected integer keys or keyword "__all__"'
+                )
+            normalized_i = v_length + i if i < 0 else i
+            normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
+
+        if not all_items:
+            return normalized_items
+        if self.is_true(all_items):
+            for i in range(v_length):
+                normalized_items.setdefault(i, ...)
+            return normalized_items
+        for i in range(v_length):
+            normalized_item = normalized_items.setdefault(i, {})
+            if not self.is_true(normalized_item):
+                normalized_items[i] = self.merge(all_items, normalized_item)
+        return normalized_items

     @classmethod
-    def merge(cls, base: Any, override: Any, intersect: bool=False) ->Any:
+    def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
         """
         Merge a ``base`` item with an ``override`` item.

@@ -296,9 +574,54 @@ class ValueItems(Representation):
         set to ``False`` (default) and on the intersection of keys if
         ``intersect`` is set to ``True``.
         """
-        pass
+        override = cls._coerce_value(override)
+        base = cls._coerce_value(base)
+        if override is None:
+            return base
+        if cls.is_true(base) or base is None:
+            return override
+        if cls.is_true(override):
+            return base if intersect else override
+
+        # intersection or union of keys while preserving ordering:
+        if intersect:
+            merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
+        else:
+            merge_keys = list(base) + [k for k in override if k not in base]
+
+        merged: 'DictIntStrAny' = {}
+        for k in merge_keys:
+            merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
+            if merged_item is not None:
+                merged[k] = merged_item
+
+        return merged
+
+    @staticmethod
+    def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny':
+        if isinstance(items, Mapping):
+            pass
+        elif isinstance(items, AbstractSet):
+            items = dict.fromkeys(items, ...)
+        else:
+            class_name = getattr(items, '__class__', '???')
+            assert_never(
+                items,
+                f'Unexpected type of exclude value {class_name}',
+            )
+        return items

-    def __repr_args__(self) ->'ReprArgs':
+    @classmethod
+    def _coerce_value(cls, value: Any) -> Any:
+        if value is None or cls.is_true(value):
+            return value
+        return cls._coerce_items(value)
+
+    @staticmethod
+    def is_true(v: Any) -> bool:
+        return v is True or v is ...
+
+    def __repr_args__(self) -> 'ReprArgs':
         return [(None, self._items)]


@@ -306,50 +629,95 @@ class ClassAttribute:
     """
     Hide class attribute from its instances
     """
-    __slots__ = 'name', 'value'

-    def __init__(self, name: str, value: Any) ->None:
+    __slots__ = (
+        'name',
+        'value',
+    )
+
+    def __init__(self, name: str, value: Any) -> None:
         self.name = name
         self.value = value

-    def __get__(self, instance: Any, owner: Type[Any]) ->None:
+    def __get__(self, instance: Any, owner: Type[Any]) -> None:
         if instance is None:
             return self.value
-        raise AttributeError(
-            f'{self.name!r} attribute of {owner.__name__!r} is class-only')
+        raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')


-path_types = {'is_dir': 'directory', 'is_file': 'file', 'is_mount':
-    'mount point', 'is_symlink': 'symlink', 'is_block_device':
-    'block device', 'is_char_device': 'char device', 'is_fifo': 'FIFO',
-    'is_socket': 'socket'}
+path_types = {
+    'is_dir': 'directory',
+    'is_file': 'file',
+    'is_mount': 'mount point',
+    'is_symlink': 'symlink',
+    'is_block_device': 'block device',
+    'is_char_device': 'char device',
+    'is_fifo': 'FIFO',
+    'is_socket': 'socket',
+}


-def path_type(p: 'Path') ->str:
+def path_type(p: 'Path') -> str:
     """
     Find out what sort of thing a path is.
     """
-    pass
+    assert p.exists(), 'path does not exist'
+    for method, name in path_types.items():
+        if getattr(p, method)():
+            return name
+
+    return 'unknown'


 Obj = TypeVar('Obj')


-def smart_deepcopy(obj: Obj) ->Obj:
+def smart_deepcopy(obj: Obj) -> Obj:
     """
     Return type as is for immutable built-in types
     Use obj.copy() for built-in empty collections
     Use copy.deepcopy() for non-empty collections and unknown objects
     """
-    pass
+
+    obj_type = obj.__class__
+    if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
+        return obj  # fastest case: obj is immutable and not collection therefore will not be copied anyway
+    try:
+        if not obj and obj_type in BUILTIN_COLLECTIONS:
+            # faster way for empty collections, no need to copy its members
+            return obj if obj_type is tuple else obj.copy()  # type: ignore  # tuple doesn't have copy method
+    except (TypeError, ValueError, RuntimeError):
+        # do we really dare to catch ALL errors? Seems a bit risky
+        pass
+
+    return deepcopy(obj)  # slowest way when we actually might need a deepcopy
+
+
+def is_valid_field(name: str) -> bool:
+    if not name.startswith('_'):
+        return True
+    return ROOT_KEY == name
+
+
+DUNDER_ATTRIBUTES = {
+    '__annotations__',
+    '__classcell__',
+    '__doc__',
+    '__module__',
+    '__orig_bases__',
+    '__orig_class__',
+    '__qualname__',
+}
+
+
+def is_valid_private_name(name: str) -> bool:
+    return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES


-DUNDER_ATTRIBUTES = {'__annotations__', '__classcell__', '__doc__',
-    '__module__', '__orig_bases__', '__orig_class__', '__qualname__'}
 _EMPTY = object()


-def all_identical(left: Iterable[Any], right: Iterable[Any]) ->bool:
+def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool:
     """
     Check that the items of `left` are the same objects as those in `right`.

@@ -359,29 +727,77 @@ def all_identical(left: Iterable[Any], right: Iterable[Any]) ->bool:
     >>> all_identical([a, b, [a]], [a, b, [a]])  # new list object, while "equal" is not "identical"
     False
     """
-    pass
+    for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY):
+        if left_item is not right_item:
+            return False
+    return True


-def assert_never(obj: NoReturn, msg: str) ->NoReturn:
+def assert_never(obj: NoReturn, msg: str) -> NoReturn:
     """
     Helper to make sure that we have covered all possible types.

     This is mostly useful for ``mypy``, docs:
     https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks
     """
-    pass
+    raise TypeError(msg)


-def get_unique_discriminator_alias(all_aliases: Collection[str],
-    discriminator_key: str) ->str:
+def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str:
     """Validate that all aliases are the same and if that's the case return the alias"""
-    pass
+    unique_aliases = set(all_aliases)
+    if len(unique_aliases) > 1:
+        raise ConfigError(
+            f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})'
+        )
+    return unique_aliases.pop()


-def get_discriminator_alias_and_values(tp: Any, discriminator_key: str
-    ) ->Tuple[str, Tuple[str, ...]]:
+def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]:
     """
     Get alias and all valid values in the `Literal` type of the discriminator field
     `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many.
     """
-    pass
+    is_root_model = getattr(tp, '__custom_root_type__', False)
+
+    if get_origin(tp) is Annotated:
+        tp = get_args(tp)[0]
+
+    if hasattr(tp, '__pydantic_model__'):
+        tp = tp.__pydantic_model__
+
+    if is_union(get_origin(tp)):
+        alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key)
+        return alias, tuple(v for values in all_values for v in values)
+    elif is_root_model:
+        union_type = tp.__fields__[ROOT_KEY].type_
+        alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key)
+
+        if len(set(all_values)) > 1:
+            raise ConfigError(
+                f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}'
+            )
+
+        return alias, all_values[0]
+
+    else:
+        try:
+            t_discriminator_type = tp.__fields__[discriminator_key].type_
+        except AttributeError as e:
+            raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e
+        except KeyError as e:
+            raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e
+
+        if not is_literal_type(t_discriminator_type):
+            raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`')
+
+        return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type)
+
+
+def _get_union_alias_and_all_values(
+    union_type: Type[Any], discriminator_key: str
+) -> Tuple[str, Tuple[Tuple[str, ...], ...]]:
+    zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)]
+    # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))]
+    all_aliases, all_values = zip(*zipped_aliases_values)
+    return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values
diff --git a/pydantic/v1/validators.py b/pydantic/v1/validators.py
index fd53f500c..7c39aa9a7 100644
--- a/pydantic/v1/validators.py
+++ b/pydantic/v1/validators.py
@@ -7,99 +7,759 @@ from decimal import Decimal, DecimalException
 from enum import Enum, IntEnum
 from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
 from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, ForwardRef, FrozenSet, Generator, Hashable, List, NamedTuple, Pattern, Set, Tuple, Type, TypeVar, Union
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Deque,
+    Dict,
+    ForwardRef,
+    FrozenSet,
+    Generator,
+    Hashable,
+    List,
+    NamedTuple,
+    Pattern,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
 from uuid import UUID
+
 from pydantic.v1 import errors
 from pydantic.v1.datetime_parse import parse_date, parse_datetime, parse_duration, parse_time
-from pydantic.v1.typing import AnyCallable, all_literal_values, display_as_type, get_class, is_callable_type, is_literal_type, is_namedtuple, is_none_type, is_typeddict
+from pydantic.v1.typing import (
+    AnyCallable,
+    all_literal_values,
+    display_as_type,
+    get_class,
+    is_callable_type,
+    is_literal_type,
+    is_namedtuple,
+    is_none_type,
+    is_typeddict,
+)
 from pydantic.v1.utils import almost_equal_floats, lenient_issubclass, sequence_like
+
 if TYPE_CHECKING:
     from typing_extensions import Literal, TypedDict
+
     from pydantic.v1.config import BaseConfig
     from pydantic.v1.fields import ModelField
     from pydantic.v1.types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt
-    ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat,
-        ConstrainedInt]
+
+    ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt]
     AnyOrderedDict = OrderedDict[Any, Any]
     Number = Union[int, float, Decimal]
     StrBytes = Union[str, bytes]
+
+
+def str_validator(v: Any) -> Union[str]:
+    if isinstance(v, str):
+        if isinstance(v, Enum):
+            return v.value
+        else:
+            return v
+    elif isinstance(v, (float, int, Decimal)):
+        # is there anything else we want to add here? If you think so, create an issue.
+        return str(v)
+    elif isinstance(v, (bytes, bytearray)):
+        return v.decode()
+    else:
+        raise errors.StrError()
+
+
+def strict_str_validator(v: Any) -> Union[str]:
+    if isinstance(v, str) and not isinstance(v, Enum):
+        return v
+    raise errors.StrError()
+
+
+def bytes_validator(v: Any) -> Union[bytes]:
+    if isinstance(v, bytes):
+        return v
+    elif isinstance(v, bytearray):
+        return bytes(v)
+    elif isinstance(v, str):
+        return v.encode()
+    elif isinstance(v, (float, int, Decimal)):
+        return str(v).encode()
+    else:
+        raise errors.BytesError()
+
+
+def strict_bytes_validator(v: Any) -> Union[bytes]:
+    if isinstance(v, bytes):
+        return v
+    elif isinstance(v, bytearray):
+        return bytes(v)
+    else:
+        raise errors.BytesError()
+
+
 BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'}
 BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'}
-max_str_int = 4300


-def constant_validator(v: 'Any', field: 'ModelField') ->'Any':
+def bool_validator(v: Any) -> bool:
+    if v is True or v is False:
+        return v
+    if isinstance(v, bytes):
+        v = v.decode()
+    if isinstance(v, str):
+        v = v.lower()
+    try:
+        if v in BOOL_TRUE:
+            return True
+        if v in BOOL_FALSE:
+            return False
+    except TypeError:
+        raise errors.BoolError()
+    raise errors.BoolError()
+
+
+# matches the default limit cpython, see https://github.com/python/cpython/pull/96500
+max_str_int = 4_300
+
+
+def int_validator(v: Any) -> int:
+    if isinstance(v, int) and not (v is True or v is False):
+        return v
+
+    # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778
+    # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10
+    # but better to check here until then.
+    # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation
+    # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to
+    # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson
+    if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int:
+        raise errors.IntegerError()
+
+    try:
+        return int(v)
+    except (TypeError, ValueError, OverflowError):
+        raise errors.IntegerError()
+
+
+def strict_int_validator(v: Any) -> int:
+    if isinstance(v, int) and not (v is True or v is False):
+        return v
+    raise errors.IntegerError()
+
+
+def float_validator(v: Any) -> float:
+    if isinstance(v, float):
+        return v
+
+    try:
+        return float(v)
+    except (TypeError, ValueError):
+        raise errors.FloatError()
+
+
+def strict_float_validator(v: Any) -> float:
+    if isinstance(v, float):
+        return v
+    raise errors.FloatError()
+
+
+def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number':
+    allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None)
+    if allow_inf_nan is None:
+        allow_inf_nan = config.allow_inf_nan
+
+    if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)):
+        raise errors.NumberNotFiniteError()
+    return v
+
+
+def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number':
+    field_type: ConstrainedNumber = field.type_
+    if field_type.multiple_of is not None:
+        mod = float(v) / float(field_type.multiple_of) % 1
+        if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0):
+            raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of)
+    return v
+
+
+def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number':
+    field_type: ConstrainedNumber = field.type_
+    if field_type.gt is not None and not v > field_type.gt:
+        raise errors.NumberNotGtError(limit_value=field_type.gt)
+    elif field_type.ge is not None and not v >= field_type.ge:
+        raise errors.NumberNotGeError(limit_value=field_type.ge)
+
+    if field_type.lt is not None and not v < field_type.lt:
+        raise errors.NumberNotLtError(limit_value=field_type.lt)
+    if field_type.le is not None and not v <= field_type.le:
+        raise errors.NumberNotLeError(limit_value=field_type.le)
+
+    return v
+
+
+def constant_validator(v: 'Any', field: 'ModelField') -> 'Any':
     """Validate ``const`` fields.

     The value provided for a ``const`` field must be equal to the default value
     of the field. This is to support the keyword of the same name in JSON
     Schema.
     """
-    pass
+    if v != field.default:
+        raise errors.WrongConstantError(given=v, permitted=[field.default])
+
+    return v
+
+
+def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes':
+    v_len = len(v)
+
+    min_length = config.min_anystr_length
+    if v_len < min_length:
+        raise errors.AnyStrMinLengthError(limit_value=min_length)
+
+    max_length = config.max_anystr_length
+    if max_length is not None and v_len > max_length:
+        raise errors.AnyStrMaxLengthError(limit_value=max_length)
+
+    return v
+
+
+def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes':
+    return v.strip()
+
+
+def anystr_upper(v: 'StrBytes') -> 'StrBytes':
+    return v.upper()
+
+
+def anystr_lower(v: 'StrBytes') -> 'StrBytes':
+    return v.lower()
+
+
+def ordered_dict_validator(v: Any) -> 'AnyOrderedDict':
+    if isinstance(v, OrderedDict):
+        return v
+
+    try:
+        return OrderedDict(v)
+    except (TypeError, ValueError):
+        raise errors.DictError()
+
+
+def dict_validator(v: Any) -> Dict[Any, Any]:
+    if isinstance(v, dict):
+        return v
+
+    try:
+        return dict(v)
+    except (TypeError, ValueError):
+        raise errors.DictError()
+
+
+def list_validator(v: Any) -> List[Any]:
+    if isinstance(v, list):
+        return v
+    elif sequence_like(v):
+        return list(v)
+    else:
+        raise errors.ListError()
+
+
+def tuple_validator(v: Any) -> Tuple[Any, ...]:
+    if isinstance(v, tuple):
+        return v
+    elif sequence_like(v):
+        return tuple(v)
+    else:
+        raise errors.TupleError()
+
+
+def set_validator(v: Any) -> Set[Any]:
+    if isinstance(v, set):
+        return v
+    elif sequence_like(v):
+        return set(v)
+    else:
+        raise errors.SetError()
+
+
+def frozenset_validator(v: Any) -> FrozenSet[Any]:
+    if isinstance(v, frozenset):
+        return v
+    elif sequence_like(v):
+        return frozenset(v)
+    else:
+        raise errors.FrozenSetError()
+
+
+def deque_validator(v: Any) -> Deque[Any]:
+    if isinstance(v, deque):
+        return v
+    elif sequence_like(v):
+        return deque(v)
+    else:
+        raise errors.DequeError()
+
+
+def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum:
+    try:
+        enum_v = field.type_(v)
+    except ValueError:
+        # field.type_ should be an enum, so will be iterable
+        raise errors.EnumMemberError(enum_values=list(field.type_))
+    return enum_v.value if config.use_enum_values else enum_v
+
+
+def uuid_validator(v: Any, field: 'ModelField') -> UUID:
+    try:
+        if isinstance(v, str):
+            v = UUID(v)
+        elif isinstance(v, (bytes, bytearray)):
+            try:
+                v = UUID(v.decode())
+            except ValueError:
+                # 16 bytes in big-endian order as the bytes argument fail
+                # the above check
+                v = UUID(bytes=v)
+    except ValueError:
+        raise errors.UUIDError()
+
+    if not isinstance(v, UUID):
+        raise errors.UUIDError()
+
+    required_version = getattr(field.type_, '_required_version', None)
+    if required_version and v.version != required_version:
+        raise errors.UUIDVersionError(required_version=required_version)
+
+    return v
+
+
+def decimal_validator(v: Any) -> Decimal:
+    if isinstance(v, Decimal):
+        return v
+    elif isinstance(v, (bytes, bytearray)):
+        v = v.decode()
+
+    v = str(v).strip()
+
+    try:
+        v = Decimal(v)
+    except DecimalException:
+        raise errors.DecimalError()
+
+    if not v.is_finite():
+        raise errors.DecimalIsNotFiniteError()
+
+    return v
+
+
+def hashable_validator(v: Any) -> Hashable:
+    if isinstance(v, Hashable):
+        return v
+
+    raise errors.HashableError()
+

+def ip_v4_address_validator(v: Any) -> IPv4Address:
+    if isinstance(v, IPv4Address):
+        return v

-def ip_v4_network_validator(v: Any) ->IPv4Network:
+    try:
+        return IPv4Address(v)
+    except ValueError:
+        raise errors.IPv4AddressError()
+
+
+def ip_v6_address_validator(v: Any) -> IPv6Address:
+    if isinstance(v, IPv6Address):
+        return v
+
+    try:
+        return IPv6Address(v)
+    except ValueError:
+        raise errors.IPv6AddressError()
+
+
+def ip_v4_network_validator(v: Any) -> IPv4Network:
     """
     Assume IPv4Network initialised with a default ``strict`` argument

     See more:
     https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
     """
-    pass
+    if isinstance(v, IPv4Network):
+        return v
+
+    try:
+        return IPv4Network(v)
+    except ValueError:
+        raise errors.IPv4NetworkError()


-def ip_v6_network_validator(v: Any) ->IPv6Network:
+def ip_v6_network_validator(v: Any) -> IPv6Network:
     """
     Assume IPv6Network initialised with a default ``strict`` argument

     See more:
     https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
     """
-    pass
+    if isinstance(v, IPv6Network):
+        return v
+
+    try:
+        return IPv6Network(v)
+    except ValueError:
+        raise errors.IPv6NetworkError()
+
+
+def ip_v4_interface_validator(v: Any) -> IPv4Interface:
+    if isinstance(v, IPv4Interface):
+        return v
+
+    try:
+        return IPv4Interface(v)
+    except ValueError:
+        raise errors.IPv4InterfaceError()
+

+def ip_v6_interface_validator(v: Any) -> IPv6Interface:
+    if isinstance(v, IPv6Interface):
+        return v

-def callable_validator(v: Any) ->AnyCallable:
+    try:
+        return IPv6Interface(v)
+    except ValueError:
+        raise errors.IPv6InterfaceError()
+
+
+def path_validator(v: Any) -> Path:
+    if isinstance(v, Path):
+        return v
+
+    try:
+        return Path(v)
+    except TypeError:
+        raise errors.PathError()
+
+
+def path_exists_validator(v: Any) -> Path:
+    if not v.exists():
+        raise errors.PathNotExistsError(path=v)
+
+    return v
+
+
+def callable_validator(v: Any) -> AnyCallable:
     """
     Perform a simple check if the value is callable.

     Note: complete matching of argument type hints and return types is not performed
     """
-    pass
+    if callable(v):
+        return v
+
+    raise errors.CallableError(value=v)
+
+
+def enum_validator(v: Any) -> Enum:
+    if isinstance(v, Enum):
+        return v
+
+    raise errors.EnumError(value=v)
+
+
+def int_enum_validator(v: Any) -> IntEnum:
+    if isinstance(v, IntEnum):
+        return v
+
+    raise errors.IntEnumError(value=v)
+
+
+def make_literal_validator(type_: Any) -> Callable[[Any], Any]:
+    permitted_choices = all_literal_values(type_)
+
+    # To have a O(1) complexity and still return one of the values set inside the `Literal`,
+    # we create a dict with the set values (a set causes some problems with the way intersection works).
+    # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`)
+    allowed_choices = {v: v for v in permitted_choices}
+
+    def literal_validator(v: Any) -> Any:
+        try:
+            return allowed_choices[v]
+        except (KeyError, TypeError):
+            raise errors.WrongConstantError(given=v, permitted=permitted_choices)
+
+    return literal_validator
+
+
+def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    v_len = len(v)
+
+    min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length
+    if v_len < min_length:
+        raise errors.AnyStrMinLengthError(limit_value=min_length)
+
+    max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length
+    if max_length is not None and v_len > max_length:
+        raise errors.AnyStrMaxLengthError(limit_value=max_length)
+
+    return v
+
+
+def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace
+    if strip_whitespace:
+        v = v.strip()
+
+    return v
+
+
+def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    upper = field.type_.to_upper or config.anystr_upper
+    if upper:
+        v = v.upper()
+
+    return v
+
+
+def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
+    lower = field.type_.to_lower or config.anystr_lower
+    if lower:
+        v = v.lower()
+    return v
+
+
+def validate_json(v: Any, config: 'BaseConfig') -> Any:
+    if v is None:
+        # pass None through to other validators
+        return v
+    try:
+        return config.json_loads(v)  # type: ignore
+    except ValueError:
+        raise errors.JsonError()
+    except TypeError:
+        raise errors.JsonTypeError()


 T = TypeVar('T')
+
+
+def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]:
+    def arbitrary_type_validator(v: Any) -> T:
+        if isinstance(v, type_):
+            return v
+        raise errors.ArbitraryTypeError(expected_arbitrary_type=type_)
+
+    return arbitrary_type_validator
+
+
+def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]:
+    def class_validator(v: Any) -> Type[T]:
+        if lenient_issubclass(v, type_):
+            return v
+        raise errors.SubclassError(expected_class=type_)
+
+    return class_validator
+
+
+def any_class_validator(v: Any) -> Type[T]:
+    if isinstance(v, type):
+        return v
+    raise errors.ClassError()
+
+
+def none_validator(v: Any) -> 'Literal[None]':
+    if v is None:
+        return v
+    raise errors.NotNoneError()
+
+
+def pattern_validator(v: Any) -> Pattern[str]:
+    if isinstance(v, Pattern):
+        return v
+
+    str_value = str_validator(v)
+
+    try:
+        return re.compile(str_value)
+    except re.error:
+        raise errors.PatternError()
+
+
 NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple)


-class IfConfig:
+def make_namedtuple_validator(
+    namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig']
+) -> Callable[[Tuple[Any, ...]], NamedTupleT]:
+    from pydantic.v1.annotated_types import create_model_from_namedtuple

-    def __init__(self, validator: AnyCallable, *config_attr_names: str,
-        ignored_value: Any=False) ->None:
+    NamedTupleModel = create_model_from_namedtuple(
+        namedtuple_cls,
+        __config__=config,
+        __module__=namedtuple_cls.__module__,
+    )
+    namedtuple_cls.__pydantic_model__ = NamedTupleModel  # type: ignore[attr-defined]
+
+    def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT:
+        annotations = NamedTupleModel.__annotations__
+
+        if len(values) > len(annotations):
+            raise errors.ListMaxLengthError(limit_value=len(annotations))
+
+        dict_values: Dict[str, Any] = dict(zip(annotations, values))
+        validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values))
+        return namedtuple_cls(**validated_dict_values)
+
+    return namedtuple_validator
+
+
+def make_typeddict_validator(
+    typeddict_cls: Type['TypedDict'], config: Type['BaseConfig']  # type: ignore[valid-type]
+) -> Callable[[Any], Dict[str, Any]]:
+    from pydantic.v1.annotated_types import create_model_from_typeddict
+
+    TypedDictModel = create_model_from_typeddict(
+        typeddict_cls,
+        __config__=config,
+        __module__=typeddict_cls.__module__,
+    )
+    typeddict_cls.__pydantic_model__ = TypedDictModel  # type: ignore[attr-defined]
+
+    def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]:  # type: ignore[valid-type]
+        return TypedDictModel.parse_obj(values).dict(exclude_unset=True)
+
+    return typeddict_validator
+
+
+class IfConfig:
+    def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None:
         self.validator = validator
         self.config_attr_names = config_attr_names
         self.ignored_value = ignored_value

+    def check(self, config: Type['BaseConfig']) -> bool:
+        return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names)
+
+
+# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same,
+# IPv4Interface before IPv4Address, etc
+_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [
+    (IntEnum, [int_validator, enum_member_validator]),
+    (Enum, [enum_member_validator]),
+    (
+        str,
+        [
+            str_validator,
+            IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
+            IfConfig(anystr_upper, 'anystr_upper'),
+            IfConfig(anystr_lower, 'anystr_lower'),
+            IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
+        ],
+    ),
+    (
+        bytes,
+        [
+            bytes_validator,
+            IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
+            IfConfig(anystr_upper, 'anystr_upper'),
+            IfConfig(anystr_lower, 'anystr_lower'),
+            IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
+        ],
+    ),
+    (bool, [bool_validator]),
+    (int, [int_validator]),
+    (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]),
+    (Path, [path_validator]),
+    (datetime, [parse_datetime]),
+    (date, [parse_date]),
+    (time, [parse_time]),
+    (timedelta, [parse_duration]),
+    (OrderedDict, [ordered_dict_validator]),
+    (dict, [dict_validator]),
+    (list, [list_validator]),
+    (tuple, [tuple_validator]),
+    (set, [set_validator]),
+    (frozenset, [frozenset_validator]),
+    (deque, [deque_validator]),
+    (UUID, [uuid_validator]),
+    (Decimal, [decimal_validator]),
+    (IPv4Interface, [ip_v4_interface_validator]),
+    (IPv6Interface, [ip_v6_interface_validator]),
+    (IPv4Address, [ip_v4_address_validator]),
+    (IPv6Address, [ip_v6_address_validator]),
+    (IPv4Network, [ip_v4_network_validator]),
+    (IPv6Network, [ip_v6_network_validator]),
+]
+
+
+def find_validators(  # noqa: C901 (ignore complexity)
+    type_: Type[Any], config: Type['BaseConfig']
+) -> Generator[AnyCallable, None, None]:
+    from pydantic.v1.dataclasses import is_builtin_dataclass, make_dataclass_validator
+
+    if type_ is Any or type_ is object:
+        return
+    type_type = type_.__class__
+    if type_type == ForwardRef or type_type == TypeVar:
+        return
+
+    if is_none_type(type_):
+        yield none_validator
+        return
+    if type_ is Pattern or type_ is re.Pattern:
+        yield pattern_validator
+        return
+    if type_ is Hashable or type_ is CollectionsHashable:
+        yield hashable_validator
+        return
+    if is_callable_type(type_):
+        yield callable_validator
+        return
+    if is_literal_type(type_):
+        yield make_literal_validator(type_)
+        return
+    if is_builtin_dataclass(type_):
+        yield from make_dataclass_validator(type_, config)
+        return
+    if type_ is Enum:
+        yield enum_validator
+        return
+    if type_ is IntEnum:
+        yield int_enum_validator
+        return
+    if is_namedtuple(type_):
+        yield tuple_validator
+        yield make_namedtuple_validator(type_, config)
+        return
+    if is_typeddict(type_):
+        yield make_typeddict_validator(type_, config)
+        return
+
+    class_ = get_class(type_)
+    if class_ is not None:
+        if class_ is not Any and isinstance(class_, type):
+            yield make_class_validator(class_)
+        else:
+            yield any_class_validator
+        return
+
+    for val_type, validators in _VALIDATORS:
+        try:
+            if issubclass(type_, val_type):
+                for v in validators:
+                    if isinstance(v, IfConfig):
+                        if v.check(config):
+                            yield v.validator
+                    else:
+                        yield v
+                return
+        except TypeError:
+            raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})')

-_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [(IntEnum, [int_validator,
-    enum_member_validator]), (Enum, [enum_member_validator]), (str, [
-    str_validator, IfConfig(anystr_strip_whitespace,
-    'anystr_strip_whitespace'), IfConfig(anystr_upper, 'anystr_upper'),
-    IfConfig(anystr_lower, 'anystr_lower'), IfConfig(
-    anystr_length_validator, 'min_anystr_length', 'max_anystr_length')]), (
-    bytes, [bytes_validator, IfConfig(anystr_strip_whitespace,
-    'anystr_strip_whitespace'), IfConfig(anystr_upper, 'anystr_upper'),
-    IfConfig(anystr_lower, 'anystr_lower'), IfConfig(
-    anystr_length_validator, 'min_anystr_length', 'max_anystr_length')]), (
-    bool, [bool_validator]), (int, [int_validator]), (float, [
-    float_validator, IfConfig(float_finite_validator, 'allow_inf_nan',
-    ignored_value=True)]), (Path, [path_validator]), (datetime, [
-    parse_datetime]), (date, [parse_date]), (time, [parse_time]), (
-    timedelta, [parse_duration]), (OrderedDict, [ordered_dict_validator]),
-    (dict, [dict_validator]), (list, [list_validator]), (tuple, [
-    tuple_validator]), (set, [set_validator]), (frozenset, [
-    frozenset_validator]), (deque, [deque_validator]), (UUID, [
-    uuid_validator]), (Decimal, [decimal_validator]), (IPv4Interface, [
-    ip_v4_interface_validator]), (IPv6Interface, [ip_v6_interface_validator
-    ]), (IPv4Address, [ip_v4_address_validator]), (IPv6Address, [
-    ip_v6_address_validator]), (IPv4Network, [ip_v4_network_validator]), (
-    IPv6Network, [ip_v6_network_validator])]
+    if config.arbitrary_types_allowed:
+        yield make_arbitrary_type_validator(type_)
+    else:
+        raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config')
diff --git a/pydantic/v1/version.py b/pydantic/v1/version.py
index a33537cac..8127c1139 100644
--- a/pydantic/v1/version.py
+++ b/pydantic/v1/version.py
@@ -1,11 +1,38 @@
 __all__ = 'compiled', 'VERSION', 'version_info'
+
 VERSION = '1.10.17'
+
 try:
-    import cython
+    import cython  # type: ignore
 except ImportError:
     compiled: bool = False
-else:
+else:  # pragma: no cover
     try:
         compiled = cython.compiled
     except AttributeError:
         compiled = False
+
+
+def version_info() -> str:
+    import platform
+    import sys
+    from importlib import import_module
+    from pathlib import Path
+
+    optional_deps = []
+    for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'):
+        try:
+            import_module(p.replace('-', '_'))
+        except ImportError:
+            continue
+        optional_deps.append(p)
+
+    info = {
+        'pydantic version': VERSION,
+        'pydantic compiled': compiled,
+        'install path': Path(__file__).resolve().parent,
+        'python version': sys.version,
+        'platform': platform.platform(),
+        'optional deps. installed': optional_deps,
+    }
+    return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())
diff --git a/pydantic/validate_call_decorator.py b/pydantic/validate_call_decorator.py
index 5eb2596ae..5314c9207 100644
--- a/pydantic/validate_call_decorator.py
+++ b/pydantic/validate_call_decorator.py
@@ -1,17 +1,37 @@
 """Decorator for validating function calls."""
+
 from __future__ import annotations as _annotations
+
 import functools
 from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload
+
 from ._internal import _typing_extra, _validate_call
-__all__ = 'validate_call',
+
+__all__ = ('validate_call',)
+
 if TYPE_CHECKING:
     from .config import ConfigDict
+
     AnyCallableT = TypeVar('AnyCallableT', bound=Callable[..., Any])


-def validate_call(func: (AnyCallableT | None)=None, /, *, config: (
-    ConfigDict | None)=None, validate_return: bool=False) ->(AnyCallableT |
-    Callable[[AnyCallableT], AnyCallableT]):
+@overload
+def validate_call(
+    *, config: ConfigDict | None = None, validate_return: bool = False
+) -> Callable[[AnyCallableT], AnyCallableT]: ...
+
+
+@overload
+def validate_call(func: AnyCallableT, /) -> AnyCallableT: ...
+
+
+def validate_call(
+    func: AnyCallableT | None = None,
+    /,
+    *,
+    config: ConfigDict | None = None,
+    validate_return: bool = False,
+) -> AnyCallableT | Callable[[AnyCallableT], AnyCallableT]:
     """Usage docs: https://docs.pydantic.dev/2.8/concepts/validation_decorator/

     Returns a decorated wrapper around the function that validates the arguments and, optionally, the return value.
@@ -26,4 +46,24 @@ def validate_call(func: (AnyCallableT | None)=None, /, *, config: (
     Returns:
         The decorated function.
     """
-    pass
+    local_ns = _typing_extra.parent_frame_namespace()
+
+    def validate(function: AnyCallableT) -> AnyCallableT:
+        if isinstance(function, (classmethod, staticmethod)):
+            name = type(function).__name__
+            raise TypeError(f'The `@{name}` decorator should be applied after `@validate_call` (put `@{name}` on top)')
+
+        validate_call_wrapper = _validate_call.ValidateCallWrapper(function, config, validate_return, local_ns)
+
+        @functools.wraps(function)
+        def wrapper_function(*args, **kwargs):
+            return validate_call_wrapper(*args, **kwargs)
+
+        wrapper_function.raw_function = function  # type: ignore
+
+        return wrapper_function  # type: ignore
+
+    if func:
+        return validate(func)
+    else:
+        return validate
diff --git a/pydantic/validators.py b/pydantic/validators.py
index 83d7de7a2..7921b04f0 100644
--- a/pydantic/validators.py
+++ b/pydantic/validators.py
@@ -1,3 +1,5 @@
 """The `validators` module is a backport module from V1."""
+
 from ._migration import getattr_migration
+
 __getattr__ = getattr_migration(__name__)
diff --git a/pydantic/version.py b/pydantic/version.py
index d63da2aeb..fa69a3a8f 100644
--- a/pydantic/version.py
+++ b/pydantic/version.py
@@ -1,24 +1,69 @@
 """The `version` module holds the version information for Pydantic."""
+
 from __future__ import annotations as _annotations
+
 __all__ = 'VERSION', 'version_info'
+
 VERSION = '2.8.2'
 """The version of Pydantic."""


-def version_short() ->str:
+def version_short() -> str:
     """Return the `major.minor` part of Pydantic version.

     It returns '2.1' if Pydantic version is '2.1.1'.
     """
-    pass
+    return '.'.join(VERSION.split('.')[:2])


-def version_info() ->str:
+def version_info() -> str:
     """Return complete version information for Pydantic and its dependencies."""
-    pass
+    import importlib.metadata as importlib_metadata
+    import os
+    import platform
+    import sys
+    from pathlib import Path
+
+    import pydantic_core._pydantic_core as pdc
+
+    from ._internal import _git as git
+
+    # get data about packages that are closely related to pydantic, use pydantic or often conflict with pydantic
+    package_names = {
+        'email-validator',
+        'fastapi',
+        'mypy',
+        'pydantic-extra-types',
+        'pydantic-settings',
+        'pyright',
+        'typing_extensions',
+    }
+    related_packages = []
+
+    for dist in importlib_metadata.distributions():
+        name = dist.metadata['Name']
+        if name in package_names:
+            related_packages.append(f'{name}-{dist.version}')
+
+    pydantic_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+    most_recent_commit = (
+        git.git_revision(pydantic_dir) if git.is_git_repo(pydantic_dir) and git.have_git() else 'unknown'
+    )
+
+    info = {
+        'pydantic version': VERSION,
+        'pydantic-core version': pdc.__version__,
+        'pydantic-core build': getattr(pdc, 'build_info', None) or pdc.build_profile,
+        'install path': Path(__file__).resolve().parent,
+        'python version': sys.version,
+        'platform': platform.platform(),
+        'related packages': ' '.join(related_packages),
+        'commit': most_recent_commit,
+    }
+    return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())


-def parse_mypy_version(version: str) ->tuple[int, ...]:
+def parse_mypy_version(version: str) -> tuple[int, ...]:
     """Parse mypy string version to tuple of ints.

     It parses normal version like `0.930` and extra info followed by a `+` sign
@@ -30,4 +75,4 @@ def parse_mypy_version(version: str) ->tuple[int, ...]:
     Returns:
         A tuple of ints. e.g. (0, 930).
     """
-    pass
+    return tuple(map(int, version.partition('+')[0].split('.')))
diff --git a/pydantic/warnings.py b/pydantic/warnings.py
index 0d1b0a6a5..ea9fa6dc9 100644
--- a/pydantic/warnings.py
+++ b/pydantic/warnings.py
@@ -1,8 +1,15 @@
 """Pydantic-specific warnings."""
+
 from __future__ import annotations as _annotations
+
 from .version import version_short
-__all__ = ('PydanticDeprecatedSince20', 'PydanticDeprecationWarning',
-    'PydanticDeprecatedSince26', 'PydanticExperimentalWarning')
+
+__all__ = (
+    'PydanticDeprecatedSince20',
+    'PydanticDeprecationWarning',
+    'PydanticDeprecatedSince26',
+    'PydanticExperimentalWarning',
+)


 class PydanticDeprecationWarning(DeprecationWarning):
@@ -16,40 +23,40 @@ class PydanticDeprecationWarning(DeprecationWarning):
         since: Pydantic version in what the deprecation was introduced.
         expected_removal: Pydantic version in what the corresponding functionality expected to be removed.
     """
+
     message: str
     since: tuple[int, int]
     expected_removal: tuple[int, int]

-    def __init__(self, message: str, *args: object, since: tuple[int, int],
-        expected_removal: (tuple[int, int] | None)=None) ->None:
+    def __init__(
+        self, message: str, *args: object, since: tuple[int, int], expected_removal: tuple[int, int] | None = None
+    ) -> None:
         super().__init__(message, *args)
         self.message = message.rstrip('.')
         self.since = since
-        self.expected_removal = (expected_removal if expected_removal is not
-            None else (since[0] + 1, 0))
+        self.expected_removal = expected_removal if expected_removal is not None else (since[0] + 1, 0)

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         message = (
-            f'{self.message}. Deprecated in Pydantic V{self.since[0]}.{self.since[1]} to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}.'
-            )
+            f'{self.message}. Deprecated in Pydantic V{self.since[0]}.{self.since[1]}'
+            f' to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}.'
+        )
         if self.since == (2, 0):
-            message += (
-                f' See Pydantic V2 Migration Guide at https://errors.pydantic.dev/{version_short()}/migration/'
-                )
+            message += f' See Pydantic V2 Migration Guide at https://errors.pydantic.dev/{version_short()}/migration/'
         return message


 class PydanticDeprecatedSince20(PydanticDeprecationWarning):
     """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.0."""

-    def __init__(self, message: str, *args: object) ->None:
+    def __init__(self, message: str, *args: object) -> None:
         super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0))


 class PydanticDeprecatedSince26(PydanticDeprecationWarning):
     """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.6."""

-    def __init__(self, message: str, *args: object) ->None:
+    def __init__(self, message: str, *args: object) -> None:
         super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0))