Skip to content

back to OpenHands summary

OpenHands: pydantic

Failed to run pytests for test tests

ImportError while loading conftest '/testbed/tests/conftest.py'.
tests/conftest.py:17: in <module>
    from pydantic import GenerateSchema
E   ImportError: cannot import name 'GenerateSchema' from 'pydantic' (/testbed/pydantic/__init__.py)

Patch diff

diff --git a/pydantic/_internal/_generate_schema.py b/pydantic/_internal/_generate_schema.py
index a9c65d49f..3670f2123 100644
--- a/pydantic/_internal/_generate_schema.py
+++ b/pydantic/_internal/_generate_schema.py
@@ -69,7 +69,11 @@ def check_validator_fields_against_field_name(info: FieldDecoratorInfo, field: s
     Returns:
         `True` if field name is in validator fields, `False` otherwise.
     """
-    pass
+    if not info.fields:
+        return True
+    if '*' in info.fields:
+        return True
+    return field in info.fields

 def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields: Iterable[str]) -> None:
     """Check if the defined fields in decorators exist in `fields` param.
@@ -83,7 +87,15 @@ def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields
     Raises:
         PydanticUserError: If one of the field names does not exist in `fields` param.
     """
-    pass
+    fields_set = set(fields)
+    for dec in decorators:
+        if dec.info.check_fields and dec.info.fields and '*' not in dec.info.fields:
+            for field in dec.info.fields:
+                if field not in fields_set:
+                    raise PydanticUserError(
+                        f'Decorators defined with fields {dec.info.fields} but {field} not found in model',
+                        code='validator-field',
+                    )

 def modify_model_json_schema(schema_or_field: CoreSchemaOrField, handler: GetJsonSchemaHandler, *, cls: Any, title: str | None=None) -> JsonSchemaValue:
     """Add title and description for model-like classes' JSON schema.
@@ -97,7 +109,13 @@ def modify_model_json_schema(schema_or_field: CoreSchemaOrField, handler: GetJso
     Returns:
         JsonSchemaValue: The updated JSON schema.
     """
-    pass
+    json_schema = handler(schema_or_field)
+    if title is None:
+        title = cls.__name__
+    json_schema['title'] = title
+    if cls.__doc__:
+        json_schema['description'] = inspect.cleandoc(cls.__doc__)
+    return json_schema
 JsonEncoders = Dict[Type[Any], JsonEncoder]

 def _add_custom_serialization_from_json_encoders(json_encoders: JsonEncoders | None, tp: Any, schema: CoreSchema) -> CoreSchema:
@@ -108,7 +126,16 @@ def _add_custom_serialization_from_json_encoders(json_encoders: JsonEncoders | N
         tp: The type to check for a matching encoder.
         schema: The schema to add the encoder to.
     """
-    pass
+    if not json_encoders:
+        return schema
+
+    for type_, encoder in json_encoders.items():
+        if isinstance(tp, type) and issubclass(tp, type_):
+            return core_schema.json_or_python_schema(
+                json_schema=core_schema.with_info_plain_validator_function(encoder),
+                python_schema=schema,
+            )
+    return schema
 TypesNamespace = Union[Dict[str, Any], None]

 class TypesNamespaceStack:
@@ -123,7 +150,7 @@ def _get_first_non_null(a: Any, b: Any) -> Any:
     Use case: serialization_alias (argument a) and alias (argument b) are both defined, and serialization_alias is ''.
     This function will return serialization_alias, which is the first argument, even though it is an empty string.
     """
-    pass
+    return a if a is not None else b

 class GenerateSchema:
     """Generate core schema for a Pydantic model, dataclass and types like `str`, `datetime`, ... ."""
@@ -139,7 +166,7 @@ class GenerateSchema:

     def str_schema(self) -> CoreSchema:
         """Generate a CoreSchema for `str`"""
-        pass
+        return core_schema.str_schema()

     class CollectedInvalid(Exception):
         pass
@@ -167,22 +194,110 @@ class GenerateSchema:
                 - If `typing.TypedDict` is used instead of `typing_extensions.TypedDict` on Python < 3.12.
                 - If `__modify_schema__` method is used instead of `__get_pydantic_json_schema__`.
         """
-        pass
+        if from_dunder_get_core_schema:
+            schema = self._generate_schema_from_property(obj, obj)
+            if schema is not None:
+                return schema
+
+        if isinstance(obj, str):
+            return self.str_schema()
+
+        if isinstance(obj, type):
+            if obj == str:
+                return self.str_schema()
+            elif obj == bool:
+                return core_schema.bool_schema()
+            elif obj == int:
+                return core_schema.int_schema()
+            elif obj == float:
+                return core_schema.float_schema()
+            elif obj == bytes:
+                return core_schema.bytes_schema()
+            elif obj == list:
+                return core_schema.list_schema(core_schema.any_schema())
+            elif obj == dict:
+                return core_schema.dict_schema(core_schema.any_schema(), core_schema.any_schema())
+            elif obj == set:
+                return core_schema.set_schema(core_schema.any_schema())
+            elif obj == frozenset:
+                return core_schema.frozenset_schema(core_schema.any_schema())
+            elif obj == tuple:
+                return core_schema.tuple_variable_schema(core_schema.any_schema())
+
+        return self.match_type(obj)

     def _model_schema(self, cls: type[BaseModel]) -> core_schema.CoreSchema:
         """Generate schema for a Pydantic model."""
-        pass
+        config_wrapper = self._config_wrapper_stack.get()
+        fields = {}
+        computed_fields = {}
+        validators = []
+        serializers = []
+        model_validators = []
+        model_serializers = []
+
+        # Get fields from parent classes
+        for base in reversed(cls.__mro__[1:]):
+            if hasattr(base, '__pydantic_fields__'):
+                fields.update(base.__pydantic_fields__)
+            if hasattr(base, '__pydantic_computed_fields__'):
+                computed_fields.update(base.__pydantic_computed_fields__)
+            if hasattr(base, '__pydantic_decorators__'):
+                validators.extend(base.__pydantic_decorators__.field_validators)
+                serializers.extend(base.__pydantic_decorators__.field_serializers)
+                model_validators.extend(base.__pydantic_decorators__.model_validators)
+                model_serializers.extend(base.__pydantic_decorators__.model_serializers)
+
+        # Add fields from current class
+        fields.update(cls.__pydantic_fields__)
+        computed_fields.update(cls.__pydantic_computed_fields__)
+        validators.extend(cls.__pydantic_decorators__.field_validators)
+        serializers.extend(cls.__pydantic_decorators__.field_serializers)
+        model_validators.extend(cls.__pydantic_decorators__.model_validators)
+        model_serializers.extend(cls.__pydantic_decorators__.model_serializers)
+
+        # Generate schema for each field
+        field_schemas = {}
+        for field_name, field_info in fields.items():
+            field_schema = self.generate_schema(field_info.annotation)
+            field_schemas[field_name] = field_schema
+
+        # Create model schema
+        schema = core_schema.model_schema(
+            cls,
+            field_schemas,
+            computed_fields=computed_fields,
+            validators=validators,
+            serializers=serializers,
+            model_validators=model_validators,
+            model_serializers=model_serializers,
+            config=config_wrapper.config_dict,
+        )
+
+        return schema

     @staticmethod
     def _get_model_title_from_config(model: type[BaseModel | StandardDataclass], config_wrapper: ConfigWrapper | None=None) -> str | None:
         """Get the title of a model if `model_title_generator` or `title` are set in the config, else return None"""
-        pass
+        if config_wrapper is None:
+            return None
+
+        if config_wrapper.title is not None:
+            return config_wrapper.title
+
+        if config_wrapper.title_generator is not None:
+            return config_wrapper.title_generator(model)
+
+        return None

     def _unpack_refs_defs(self, schema: CoreSchema) -> CoreSchema:
         """Unpack all 'definitions' schemas into `GenerateSchema.defs.definitions`
         and return the inner schema.
         """
-        pass
+        if 'definitions' in schema:
+            self.defs.definitions.update(schema['definitions'])
+            schema = {k: v for k, v in schema.items() if k != 'definitions'}
+        return schema

     def _generate_schema_from_property(self, obj: Any, source: Any) -> core_schema.CoreSchema | None:
         """Try to generate schema from either the `__get_pydantic_core_schema__` function or
@@ -191,7 +306,17 @@ class GenerateSchema:
         Note: `__get_pydantic_core_schema__` takes priority so it can
         decide whether to use a `__pydantic_core_schema__` attribute, or generate a fresh schema.
         """
-        pass
+        if hasattr(obj, '__get_pydantic_core_schema__'):
+            schema = obj.__get_pydantic_core_schema__(source, self)
+            if schema is not None:
+                return schema
+
+        if hasattr(obj, '__pydantic_core_schema__'):
+            schema = obj.__pydantic_core_schema__
+            if schema is not None:
+                return schema
+
+        return None

     def match_type(self, obj: Any) -> core_schema.CoreSchema:
         """Main mapping of types to schemas.
@@ -206,19 +331,148 @@ class GenerateSchema:
         The idea is that we'll evolve this into adding more and more user facing methods over time
         as they get requested and we figure out what the right API for them is.
         """
-        pass
+        if isinstance(obj, type):
+            if issubclass(obj, BaseModel):
+                return self._model_schema(obj)
+            elif issubclass(obj, (list, tuple, set, frozenset)):
+                return core_schema.list_schema(core_schema.any_schema())
+            elif issubclass(obj, dict):
+                return core_schema.dict_schema(core_schema.any_schema(), core_schema.any_schema())
+            elif issubclass(obj, bool):
+                return core_schema.bool_schema()
+            elif issubclass(obj, int):
+                return core_schema.int_schema()
+            elif issubclass(obj, float):
+                return core_schema.float_schema()
+            elif issubclass(obj, str):
+                return core_schema.str_schema()
+            elif issubclass(obj, bytes):
+                return core_schema.bytes_schema()
+
+        if isinstance(obj, _AnnotatedType):
+            return self._annotated_schema(obj)
+
+        if isinstance(obj, ForwardRef):
+            return self._forward_ref_schema(obj)
+
+        if isinstance(obj, TypeVar):
+            return self._type_var_schema(obj)
+
+        if isinstance(obj, TypeAliasType):
+            return self._type_alias_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, Enum):
+            return self._enum_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (list, tuple, set, frozenset)):
+            return self._sequence_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, dict):
+            return self._dict_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (int, float, str, bytes)):
+            return self._primitive_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, bool):
+            return core_schema.bool_schema()
+
+        if isinstance(obj, type) and issubclass(obj, (datetime, date, time, timedelta)):
+            return self._datetime_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, UUID):
+            return self._uuid_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, Path):
+            return self._path_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network)):
+            return self._ip_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Decimal, )):
+            return self._decimal_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Pattern, )):
+            return self._pattern_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Color, )):
+            return self._color_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (NameEmail, )):
+            return self._name_email_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (SecretStr, SecretBytes)):
+            return self._secret_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (DirectoryPath, FilePath)):
+            return self._path_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (RootModel, )):
+            return self._root_model_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (TypedDict, )):
+            return self._typed_dict_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (tuple, )):
+            return self._tuple_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (list, )):
+            return self._list_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (set, frozenset)):
+            return self._set_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (dict, )):
+            return self._dict_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Callable, )):
+            return self._callable_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Generic, )):
+            return self._generic_schema(obj)
+
+        if isinstance(obj, type) and issubclass(obj, (Any, )):
+            return core_schema.any_schema()
+
+        if isinstance(obj, type) and issubclass(obj, (None.__class__, )):
+            return core_schema.none_schema()
+
+        if isinstance(obj, type) and issubclass(obj, (object, )):
+            return core_schema.any_schema()
+
+        raise PydanticSchemaGenerationError(f'Unable to generate schema for {obj}')

     def _generate_td_field_schema(self, name: str, field_info: FieldInfo, decorators: DecoratorInfos, *, required: bool=True) -> core_schema.TypedDictField:
         """Prepare a TypedDictField to represent a model or typeddict field."""
-        pass
+        schema = self.generate_schema(field_info.annotation)
+        return core_schema.typed_dict_field(
+            schema,
+            required=required,
+            serialization=field_info.serialization,
+            validation=field_info.validation,
+            decorators=decorators,
+        )

     def _generate_md_field_schema(self, name: str, field_info: FieldInfo, decorators: DecoratorInfos) -> core_schema.ModelField:
         """Prepare a ModelField to represent a model field."""
-        pass
+        schema = self.generate_schema(field_info.annotation)
+        return core_schema.model_field(
+            schema,
+            serialization=field_info.serialization,
+            validation=field_info.validation,
+            decorators=decorators,
+            name=name,
+        )

     def _generate_dc_field_schema(self, name: str, field_info: FieldInfo, decorators: DecoratorInfos) -> core_schema.DataclassField:
         """Prepare a DataclassField to represent the parameter/field, of a dataclass."""
-        pass
+        schema = self.generate_schema(field_info.annotation)
+        return core_schema.dataclass_field(
+            schema,
+            serialization=field_info.serialization,
+            validation=field_info.validation,
+            decorators=decorators,
+            name=name,
+        )

     @staticmethod
     def _apply_alias_generator_to_field_info(alias_generator: Callable[[str], str] | AliasGenerator, field_info: FieldInfo, field_name: str) -> None:
@@ -229,7 +483,12 @@ class GenerateSchema:
             field_info: The FieldInfo instance to which the alias_generator is (maybe) applied.
             field_name: The name of the field from which to generate the alias.
         """
-        pass
+        if field_info.alias is None and field_info.validation_alias is None and field_info.serialization_alias is None:
+            if isinstance(alias_generator, AliasGenerator):
+                field_info.validation_alias = alias_generator.validation_alias(field_name)
+                field_info.serialization_alias = alias_generator.serialization_alias(field_name)
+            else:
+                field_info.alias = alias_generator(field_name)

     @staticmethod
     def _apply_alias_generator_to_computed_field_info(alias_generator: Callable[[str], str] | AliasGenerator, computed_field_info: ComputedFieldInfo, computed_field_name: str):
@@ -240,7 +499,11 @@ class GenerateSchema:
             computed_field_info: The ComputedFieldInfo instance to which the alias_generator is (maybe) applied.
             computed_field_name: The name of the computed field from which to generate the alias.
         """
-        pass
+        if computed_field_info.alias is None:
+            if isinstance(alias_generator, AliasGenerator):
+                computed_field_info.alias = alias_generator.serialization_alias(computed_field_name)
+            else:
+                computed_field_info.alias = alias_generator(computed_field_name)

     @staticmethod
     def _apply_field_title_generator_to_field_info(config_wrapper: ConfigWrapper, field_info: FieldInfo | ComputedFieldInfo, field_name: str) -> None:
@@ -250,15 +513,36 @@ class GenerateSchema:
             field_info: The FieldInfo or ComputedField instance to which the title_generator is (maybe) applied.
             field_name: The name of the field from which to generate the title.
         """
-        pass
+        if field_info.title is None and config_wrapper.field_title_generator is not None:
+            field_info.title = config_wrapper.field_title_generator(field_name, field_info)

     def _union_schema(self, union_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Union."""
-        pass
+        args = get_args(union_type)
+        if not args:
+            return core_schema.any_schema()
+
+        schemas = []
+        for arg in args:
+            schema = self.generate_schema(arg)
+            schemas.append(schema)
+
+        return core_schema.union_schema(schemas)

     def _literal_schema(self, literal_type: Any) -> CoreSchema:
         """Generate schema for a Literal."""
-        pass
+        args = get_args(literal_type)
+        if not args:
+            return core_schema.any_schema()
+
+        values = []
+        for arg in args:
+            if isinstance(arg, Literal):
+                values.extend(get_args(arg))
+            else:
+                values.append(arg)
+
+        return core_schema.literal_schema(values)

     def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.CoreSchema:
         """Generate schema for a TypedDict.
@@ -276,50 +560,151 @@ class GenerateSchema:
         Hence to avoid creating validators that do not do what users expect we only
         support typing.TypedDict on Python >= 3.12 or typing_extension.TypedDict on all versions
         """
-        pass
+        if not _SUPPORTS_TYPEDDICT and origin.__module__ == 'typing':
+            raise PydanticUserError(
+                'Please use `typing_extensions.TypedDict` instead of `typing.TypedDict` on Python < 3.12.',
+                code='typing-typeddict',
+            )
+
+        fields = {}
+        for field_name, field_type in typed_dict_cls.__annotations__.items():
+            field_schema = self.generate_schema(field_type)
+            fields[field_name] = field_schema
+
+        return core_schema.typed_dict_schema(
+            fields,
+            required_keys=getattr(typed_dict_cls, '__required_keys__', set()),
+            total=getattr(typed_dict_cls, '__total__', True),
+        )

     def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.CoreSchema:
         """Generate schema for a NamedTuple."""
-        pass
+        fields = {}
+        for field_name, field_type in namedtuple_cls.__annotations__.items():
+            field_schema = self.generate_schema(field_type)
+            fields[field_name] = field_schema
+
+        return core_schema.namedtuple_schema(
+            namedtuple_cls,
+            fields,
+            defaults=namedtuple_cls._field_defaults,
+        )

     def _generate_parameter_schema(self, name: str, annotation: type[Any], default: Any=Parameter.empty, mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None=None) -> core_schema.ArgumentsParameter:
         """Prepare a ArgumentsParameter to represent a field in a namedtuple or function signature."""
-        pass
+        schema = self.generate_schema(annotation)
+        return core_schema.arguments_parameter(
+            name=name,
+            schema=schema,
+            mode=mode or 'positional_or_keyword',
+            default=default if default is not Parameter.empty else PydanticUndefined,
+        )

     def _tuple_schema(self, tuple_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Tuple, e.g. `tuple[int, str]` or `tuple[int, ...]`."""
-        pass
+        args = get_args(tuple_type)
+        if not args:
+            return core_schema.tuple_variable_schema(core_schema.any_schema())
+
+        if len(args) == 2 and args[1] is ...:
+            # Handle tuple[int, ...] case
+            item_schema = self.generate_schema(args[0])
+            return core_schema.tuple_variable_schema(item_schema)
+
+        # Handle tuple[int, str] case
+        item_schemas = []
+        for arg in args:
+            schema = self.generate_schema(arg)
+            item_schemas.append(schema)
+
+        return core_schema.tuple_positional_schema(item_schemas)

     def _union_is_subclass_schema(self, union_type: Any) -> core_schema.CoreSchema:
         """Generate schema for `Type[Union[X, ...]]`."""
-        pass
+        args = get_args(union_type)
+        if not args:
+            return core_schema.any_schema()
+
+        schemas = []
+        for arg in args:
+            schema = self.generate_schema(arg)
+            schemas.append(schema)
+
+        return core_schema.union_schema(schemas)

     def _subclass_schema(self, type_: Any) -> core_schema.CoreSchema:
         """Generate schema for a Type, e.g. `Type[int]`."""
-        pass
+        args = get_args(type_)
+        if not args:
+            return core_schema.any_schema()
+
+        schema = self.generate_schema(args[0])
+        return core_schema.is_subclass_schema(schema)

     def _sequence_schema(self, sequence_type: Any) -> core_schema.CoreSchema:
         """Generate schema for a Sequence, e.g. `Sequence[int]`."""
-        pass
+        args = get_args(sequence_type)
+        if not args:
+            return core_schema.list_schema(core_schema.any_schema())
+
+        item_schema = self.generate_schema(args[0])
+        return core_schema.list_schema(item_schema)

     def _iterable_schema(self, type_: Any) -> core_schema.GeneratorSchema:
         """Generate a schema for an `Iterable`."""
-        pass
+        args = get_args(type_)
+        if not args:
+            return core_schema.generator_schema(core_schema.any_schema())
+
+        item_schema = self.generate_schema(args[0])
+        return core_schema.generator_schema(item_schema)

     def _dataclass_schema(self, dataclass: type[StandardDataclass], origin: type[StandardDataclass] | None) -> core_schema.CoreSchema:
         """Generate schema for a dataclass."""
-        pass
+        fields = {}
+        for field_name, field_info in dataclass.__dataclass_fields__.items():
+            field_schema = self.generate_schema(field_info.type)
+            fields[field_name] = field_schema
+
+        return core_schema.dataclass_schema(
+            dataclass,
+            fields,
+            config=self._config_wrapper_stack.get().config_dict,
+        )

     def _callable_schema(self, function: Callable[..., Any]) -> core_schema.CallSchema:
         """Generate schema for a Callable.

         TODO support functional validators once we support them in Config
         """
-        pass
+        args = get_args(function)
+        if not args:
+            return core_schema.call_schema()
+
+        parameters = []
+        for arg in args[:-1]:  # Last arg is return type
+            param_schema = self.generate_schema(arg)
+            parameters.append(param_schema)
+
+        return_schema = self.generate_schema(args[-1])
+        return core_schema.call_schema(parameters=parameters, return_schema=return_schema)

     def _annotated_schema(self, annotated_type: Any) -> core_schema.CoreSchema:
         """Generate schema for an Annotated type, e.g. `Annotated[int, Field(...)]` or `Annotated[int, Gt(0)]`."""
-        pass
+        args = get_args(annotated_type)
+        if not args:
+            return core_schema.any_schema()
+
+        base_schema = self.generate_schema(args[0])
+        metadata = args[1:]
+
+        for meta in metadata:
+            if hasattr(meta, '__get_pydantic_core_schema__'):
+                base_schema = meta.__get_pydantic_core_schema__(base_schema, self)
+            elif hasattr(meta, '__pydantic_core_schema__'):
+                base_schema = meta.__pydantic_core_schema__
+
+        return base_schema

     def _apply_annotations(self, source_type: Any, annotations: list[Any], transform_inner_schema: Callable[[CoreSchema], CoreSchema]=lambda x: x) -> CoreSchema:
         """Apply arguments from `Annotated` or from `FieldInfo` to a schema.
diff --git a/pydantic/_internal/_typing_extra.py b/pydantic/_internal/_typing_extra.py
index 965ece1cf..265fdcd3c 100644
--- a/pydantic/_internal/_typing_extra.py
+++ b/pydantic/_internal/_typing_extra.py
@@ -49,13 +49,27 @@ def all_literal_values(type_: type[Any]) -> list[Any]:
     Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
     e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`.
     """
-    pass
+    if get_origin(type_) in LITERAL_TYPES:
+        values = []
+        for arg in get_args(type_):
+            if get_origin(arg) in LITERAL_TYPES:
+                values.extend(all_literal_values(arg))
+            else:
+                values.append(arg)
+        return values
+    return []

 def is_namedtuple(type_: type[Any]) -> bool:
     """Check if a given class is a named tuple.
     It can be either a `typing.NamedTuple` or `collections.namedtuple`.
     """
-    pass
+    return (
+        isinstance(type_, type)
+        and issubclass(type_, tuple)
+        and hasattr(type_, '_fields')
+        and isinstance(type_._fields, tuple)
+        and all(isinstance(field, str) for field in type_._fields)
+    )
 test_new_type = typing.NewType('test_new_type', str)

 def is_new_type(type_: type[Any]) -> bool:
@@ -63,11 +77,11 @@ def is_new_type(type_: type[Any]) -> bool:

     Can't use isinstance because it fails <3.10.
     """
-    pass
+    return hasattr(type_, '__supertype__') and type_.__module__ == 'typing'

 def _check_finalvar(v: type[Any] | None) -> bool:
     """Check if a given type is a `typing.Final` type."""
-    pass
+    return v is not None and get_origin(v) is Final

 def parent_frame_namespace(*, parent_depth: int=2) -> dict[str, Any] | None:
     """We allow use of items in parent namespace to get around the issue with `get_type_hints` only looking in the
@@ -81,18 +95,40 @@ def parent_frame_namespace(*, parent_depth: int=2) -> dict[str, Any] | None:
     dict of exactly what's in scope. Using `f_back` would work sometimes but would be very wrong and confusing in many
     other cases. See https://discuss.python.org/t/is-there-a-way-to-access-parent-nested-namespaces/20659.
     """
-    pass
+    import inspect
+    frame = inspect.currentframe()
+    try:
+        for _ in range(parent_depth):
+            if frame is None:
+                return None
+            frame = frame.f_back
+        if frame is None:
+            return None
+        return frame.f_locals
+    finally:
+        del frame  # Avoid reference cycles

 def get_cls_type_hints_lenient(obj: Any, globalns: dict[str, Any] | None=None) -> dict[str, Any]:
     """Collect annotations from a class, including those from parent classes.

     Unlike `typing.get_type_hints`, this function will not error if a forward reference is not resolvable.
     """
-    pass
+    hints: dict[str, Any] = {}
+    for base in reversed(getattr(obj, '__mro__', [obj])):
+        base_hints = getattr(base, '__annotations__', {})
+        for name, value in base_hints.items():
+            if isinstance(value, str):
+                hints[name] = eval_type_lenient(value, globalns=globalns)
+            else:
+                hints[name] = value
+    return hints

 def eval_type_lenient(value: Any, globalns: dict[str, Any] | None=None, localns: dict[str, Any] | None=None) -> Any:
     """Behaves like typing._eval_type, except it won't raise an error if a forward reference can't be resolved."""
-    pass
+    try:
+        return eval_type_backport(value, globalns=globalns, localns=localns)
+    except (NameError, AttributeError):
+        return value

 def eval_type_backport(value: Any, globalns: dict[str, Any] | None=None, localns: dict[str, Any] | None=None, type_params: tuple[Any] | None=None) -> Any:
     """Like `typing._eval_type`, but falls back to the `eval_type_backport` package if it's
@@ -101,13 +137,60 @@ def eval_type_backport(value: Any, globalns: dict[str, Any] | None=None, localns
     and `list[X]` into `typing.List[X]` etc. (for all the types made generic in PEP 585)
     if the original syntax is not supported in the current Python version.
     """
-    pass
+    try:
+        from eval_type_backport import eval_type
+        return eval_type(value, globalns=globalns, localns=localns, type_params=type_params)
+    except ImportError:
+        # If eval_type_backport is not installed, fall back to typing._eval_type
+        if type_params is not None:
+            raise TypeError("type_params is only supported with eval_type_backport")
+        if isinstance(value, str):
+            if globalns is None and localns is None:
+                globalns = sys.modules[__name__].__dict__
+            localns = localns or {}
+            return typing._eval_type(value, globalns, localns)
+        return value

 def get_function_type_hints(function: Callable[..., Any], *, include_keys: set[str] | None=None, types_namespace: dict[str, Any] | None=None) -> dict[str, Any]:
     """Like `typing.get_type_hints`, but doesn't convert `X` to `Optional[X]` if the default value is `None`, also
     copes with `partial`.
     """
-    pass
+    if isinstance(function, partial):
+        # Get the type hints from the original function
+        hints = get_function_type_hints(function.func, include_keys=include_keys, types_namespace=types_namespace)
+        # Remove hints for arguments that are already bound
+        if function.keywords:
+            hints = {k: v for k, v in hints.items() if k not in function.keywords}
+        if function.args:
+            # Remove hints for positional arguments that are already bound
+            sig = inspect.signature(function.func)
+            pos_params = [p.name for p in sig.parameters.values() if p.kind in (p.POSITIONAL_ONLY, p.POSITIONAL_OR_KEYWORD)]
+            for i in range(len(function.args)):
+                if i < len(pos_params):
+                    hints.pop(pos_params[i], None)
+        return hints
+
+    # Get the function's annotations
+    annotations = getattr(function, '__annotations__', {})
+    if not annotations:
+        return {}
+
+    # If include_keys is provided, only include those keys
+    if include_keys is not None:
+        annotations = {k: v for k, v in annotations.items() if k in include_keys}
+
+    # Evaluate any string annotations
+    hints = {}
+    for name, value in annotations.items():
+        if isinstance(value, str):
+            try:
+                hints[name] = eval_type_lenient(value, globalns=types_namespace)
+            except (NameError, SyntaxError):
+                hints[name] = value
+        else:
+            hints[name] = value
+
+    return hints
 if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):

     def _make_forward_ref(arg: Any, is_argument: bool=True, *, is_class: bool=False) -> typing.ForwardRef:
@@ -122,7 +205,7 @@ if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1):

         Implemented as EAFP with memory.
         """
-        pass
+        return typing.ForwardRef(arg, is_argument=is_argument)
 else:
     _make_forward_ref = typing.ForwardRef
 if sys.version_info >= (3, 10):
@@ -173,9 +256,60 @@ else:
         - If two dict arguments are passed, they specify globals and
           locals, respectively.
         """
-        pass
+        if hasattr(typing, 'get_type_hints'):
+            # Use the built-in get_type_hints if available
+            return typing.get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
+
+        # Get annotations
+        annotations = getattr(obj, '__annotations__', {})
+        if not annotations:
+            return {}
+
+        # Handle module-level annotations
+        if isinstance(obj, type(sys)):
+            if globalns is None:
+                globalns = obj.__dict__
+            if localns is None:
+                localns = globalns
+        else:
+            # Get globals and locals for classes and functions
+            if globalns is None:
+                if isinstance(obj, type):
+                    globalns = sys.modules[obj.__module__].__dict__
+                else:
+                    globalns = getattr(obj, '__globals__', {})
+            if localns is None:
+                localns = globalns
+
+        # Evaluate string annotations
+        hints = {}
+        for name, value in annotations.items():
+            if isinstance(value, str):
+                try:
+                    value = eval_type_lenient(value, globalns=globalns, localns=localns)
+                except (NameError, SyntaxError):
+                    value = _make_forward_ref(value)
+            hints[name] = value
+
+        # Handle Optional types for attributes with None default values
+        if isinstance(obj, type):
+            for base in reversed(obj.__mro__[1:]):
+                base_hints = get_type_hints(base, globalns=globalns, localns=localns, include_extras=include_extras)
+                hints.update(base_hints)
+
+        return hints
 if sys.version_info >= (3, 10):
+    from typing import Self as _Self
+else:
+    from typing_extensions import Self as _Self
+
+def is_generic_alias(type_: Any) -> bool:
+    """Check if a type is a generic alias (e.g., list[int], tuple[str, ...], etc.)"""
+    return (
+        isinstance(type_, (typing._GenericAlias, types.GenericAlias))
+        or (sys.version_info >= (3, 9) and isinstance(type_, typing._SpecialGenericAlias))
+    )

 def is_self_type(tp: Any) -> bool:
     """Check if a given class is a Self type (from `typing` or `typing_extensions`)"""
-    pass
\ No newline at end of file
+    return tp is _Self
\ No newline at end of file
diff --git a/pydantic/_migration.py b/pydantic/_migration.py
index 96b29f4a8..fcff7d682 100644
--- a/pydantic/_migration.py
+++ b/pydantic/_migration.py
@@ -16,4 +16,42 @@ def getattr_migration(module: str) -> Callable[[str], Any]:
     Returns:
         A callable that will raise an error if the object is not found.
     """
-    pass
\ No newline at end of file
+    def __getattr__(name: str) -> Any:
+        from importlib import import_module
+
+        # Check if the attribute exists in the module's globals
+        module_globals = sys.modules[module].__dict__
+        if name in module_globals:
+            return module_globals[name]
+
+        # Check if it's a moved attribute
+        full_name = f"{module}:{name}"
+        if full_name in MOVED_IN_V2:
+            new_location = MOVED_IN_V2[full_name]
+            new_module, new_name = new_location.split(':')
+            return getattr(import_module(new_module), new_name)
+
+        # Check if it's a deprecated but moved attribute
+        if full_name in DEPRECATED_MOVED_IN_V2:
+            new_location = DEPRECATED_MOVED_IN_V2[full_name]
+            new_module, new_name = new_location.split(':')
+            return getattr(import_module(new_module), new_name)
+
+        # Check if it's redirected to V1
+        if full_name in REDIRECT_TO_V1:
+            new_location = REDIRECT_TO_V1[full_name]
+            new_module, new_name = new_location.split(':')
+            return getattr(import_module(new_module), new_name)
+
+        # Check if it's a removed attribute
+        if full_name in REMOVED_IN_V2:
+            raise PydanticImportError(f"`{full_name}` has been removed in V2.")
+
+        # Special case for BaseSettings
+        if name == 'BaseSettings':
+            raise PydanticImportError("`BaseSettings` has been moved to the `pydantic-settings` package. ")
+
+        # If not found anywhere, raise AttributeError
+        raise AttributeError(f"module '{module}' has no attribute '{name}'")
+
+    return __getattr__
\ No newline at end of file
diff --git a/pydantic/errors.py b/pydantic/errors.py
index f2fe369d2..4e2e4f3e3 100644
--- a/pydantic/errors.py
+++ b/pydantic/errors.py
@@ -51,7 +51,8 @@ class PydanticUndefinedAnnotation(PydanticErrorMixin, NameError):
         Returns:
             Converted `PydanticUndefinedAnnotation` error.
         """
-        pass
+        name = str(name_error).split("'")[1] if "'" in str(name_error) else str(name_error)
+        return cls(name=name, message=str(name_error))

 class PydanticImportError(PydanticErrorMixin, ImportError):
     """An error raised when an import fails due to module changes between V1 and V2.