Skip to content

back to Reference (Gold) summary

Reference (Gold): attrs

Pytest Summary for test tests

status count
passed 1414
skipped 4
xfailed 1
total 1419
collected 1419

Failed pytests:

test_setattr.py::TestSetAttr::test_slotted_confused

test_setattr.py::TestSetAttr::test_slotted_confused
[gw4] linux -- Python 3.12.6 /testbed/.venv/bin/python3

Patch diff

diff --git a/src/attr/_cmp.py b/src/attr/_cmp.py
index 875bde6..f367bb3 100644
--- a/src/attr/_cmp.py
+++ b/src/attr/_cmp.py
@@ -1,11 +1,24 @@
+# SPDX-License-Identifier: MIT
+
+
 import functools
 import types
+
 from ._make import _make_ne
-_operation_names = {'eq': '==', 'lt': '<', 'le': '<=', 'gt': '>', 'ge': '>='}


-def cmp_using(eq=None, lt=None, le=None, gt=None, ge=None,
-    require_same_type=True, class_name='Comparable'):
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+    eq=None,
+    lt=None,
+    le=None,
+    gt=None,
+    ge=None,
+    require_same_type=True,
+    class_name="Comparable",
+):
     """
     Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
     and ``cmp`` arguments to customize field comparison.
@@ -43,32 +56,105 @@ def cmp_using(eq=None, lt=None, le=None, gt=None, ge=None,

     .. versionadded:: 21.1.0
     """
-    pass
+
+    body = {
+        "__slots__": ["value"],
+        "__init__": _make_init(),
+        "_requirements": [],
+        "_is_comparable_to": _is_comparable_to,
+    }
+
+    # Add operations.
+    num_order_functions = 0
+    has_eq_function = False
+
+    if eq is not None:
+        has_eq_function = True
+        body["__eq__"] = _make_operator("eq", eq)
+        body["__ne__"] = _make_ne()
+
+    if lt is not None:
+        num_order_functions += 1
+        body["__lt__"] = _make_operator("lt", lt)
+
+    if le is not None:
+        num_order_functions += 1
+        body["__le__"] = _make_operator("le", le)
+
+    if gt is not None:
+        num_order_functions += 1
+        body["__gt__"] = _make_operator("gt", gt)
+
+    if ge is not None:
+        num_order_functions += 1
+        body["__ge__"] = _make_operator("ge", ge)
+
+    type_ = types.new_class(
+        class_name, (object,), {}, lambda ns: ns.update(body)
+    )
+
+    # Add same type requirement.
+    if require_same_type:
+        type_._requirements.append(_check_same_type)
+
+    # Add total ordering if at least one operation was defined.
+    if 0 < num_order_functions < 4:
+        if not has_eq_function:
+            # functools.total_ordering requires __eq__ to be defined,
+            # so raise early error here to keep a nice stack.
+            msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
+            raise ValueError(msg)
+        type_ = functools.total_ordering(type_)
+
+    return type_


 def _make_init():
     """
     Create __init__ method.
     """
-    pass
+
+    def __init__(self, value):
+        """
+        Initialize object with *value*.
+        """
+        self.value = value
+
+    return __init__


 def _make_operator(name, func):
     """
     Create operator method.
     """
-    pass
+
+    def method(self, other):
+        if not self._is_comparable_to(other):
+            return NotImplemented
+
+        result = func(self.value, other.value)
+        if result is NotImplemented:
+            return NotImplemented
+
+        return result
+
+    method.__name__ = f"__{name}__"
+    method.__doc__ = (
+        f"Return a {_operation_names[name]} b.  Computed by attrs."
+    )
+
+    return method


 def _is_comparable_to(self, other):
     """
     Check whether `other` is comparable to `self`.
     """
-    pass
+    return all(func(self, other) for func in self._requirements)


 def _check_same_type(self, other):
     """
     Return True if *self* and *other* are of the same type, False otherwise.
     """
-    pass
+    return other.value.__class__ is self.value.__class__
diff --git a/src/attr/_compat.py b/src/attr/_compat.py
index 1afacff..104eeb0 100644
--- a/src/attr/_compat.py
+++ b/src/attr/_compat.py
@@ -1,10 +1,15 @@
+# SPDX-License-Identifier: MIT
+
 import inspect
 import platform
 import sys
 import threading
-from collections.abc import Mapping, Sequence
+
+from collections.abc import Mapping, Sequence  # noqa: F401
 from typing import _GenericAlias
-PYPY = platform.python_implementation() == 'PyPy'
+
+
+PYPY = platform.python_implementation() == "PyPy"
 PY_3_8_PLUS = sys.version_info[:2] >= (3, 8)
 PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
 PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
@@ -12,23 +17,28 @@ PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
 PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
 PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
 PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
+
+
 if sys.version_info < (3, 8):
     try:
         from typing_extensions import Protocol
-    except ImportError:
+    except ImportError:  # pragma: no cover
         Protocol = object
 else:
-    from typing import Protocol
-if PY_3_14_PLUS:
+    from typing import Protocol  # noqa: F401
+
+if PY_3_14_PLUS:  # pragma: no cover
     import annotationlib
+
     _get_annotations = annotationlib.get_annotations
+
 else:

     def _get_annotations(cls):
         """
         Get annotations for *cls*.
         """
-        pass
+        return cls.__dict__.get("__annotations__", {})


 class _AnnotationExtractor:
@@ -36,30 +46,58 @@ class _AnnotationExtractor:
     Extract type annotations from a callable, returning None whenever there
     is none.
     """
-    __slots__ = ['sig']
+
+    __slots__ = ["sig"]

     def __init__(self, callable):
         try:
             self.sig = inspect.signature(callable)
-        except (ValueError, TypeError):
+        except (ValueError, TypeError):  # inspect failed
             self.sig = None

     def get_first_param_type(self):
         """
         Return the type annotation of the first argument if it's not empty.
         """
-        pass
+        if not self.sig:
+            return None
+
+        params = list(self.sig.parameters.values())
+        if params and params[0].annotation is not inspect.Parameter.empty:
+            return params[0].annotation
+
+        return None

     def get_return_type(self):
         """
         Return the return type if it's not empty.
         """
-        pass
+        if (
+            self.sig
+            and self.sig.return_annotation is not inspect.Signature.empty
+        ):
+            return self.sig.return_annotation
+
+        return None


+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
 repr_context = threading.local()


 def get_generic_base(cl):
     """If this is a generic class (A[str]), return the generic base for it."""
-    pass
+    if cl.__class__ is _GenericAlias:
+        return cl.__origin__
+    return None
diff --git a/src/attr/_config.py b/src/attr/_config.py
index cfa2d9b..9c245b1 100644
--- a/src/attr/_config.py
+++ b/src/attr/_config.py
@@ -1,4 +1,7 @@
-__all__ = ['set_run_validators', 'get_run_validators']
+# SPDX-License-Identifier: MIT
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
 _run_validators = True


@@ -10,7 +13,11 @@ def set_run_validators(run):
         moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
         instead.
     """
-    pass
+    if not isinstance(run, bool):
+        msg = "'run' must be bool."
+        raise TypeError(msg)
+    global _run_validators
+    _run_validators = run


 def get_run_validators():
@@ -21,4 +28,4 @@ def get_run_validators():
         moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
         instead.
     """
-    pass
+    return _run_validators
diff --git a/src/attr/_funcs.py b/src/attr/_funcs.py
index 4c9728a..355cef4 100644
--- a/src/attr/_funcs.py
+++ b/src/attr/_funcs.py
@@ -1,11 +1,21 @@
+# SPDX-License-Identifier: MIT
+
+
 import copy
+
 from ._compat import PY_3_9_PLUS, get_generic_base
 from ._make import _OBJ_SETATTR, NOTHING, fields
 from .exceptions import AttrsAttributeNotFoundError


-def asdict(inst, recurse=True, filter=None, dict_factory=dict,
-    retain_collection_types=False, value_serializer=None):
+def asdict(
+    inst,
+    recurse=True,
+    filter=None,
+    dict_factory=dict,
+    retain_collection_types=False,
+    value_serializer=None,
+):
     """
     Return the *attrs* attribute values of *inst* as a dict.

@@ -50,19 +60,157 @@ def asdict(inst, recurse=True, filter=None, dict_factory=dict,
     ..  versionadded:: 21.3.0
         If a dict has a collection for a key, it is serialized as a tuple.
     """
-    pass
-
-
-def _asdict_anything(val, is_key, filter, dict_factory,
-    retain_collection_types, value_serializer):
+    attrs = fields(inst.__class__)
+    rv = dict_factory()
+    for a in attrs:
+        v = getattr(inst, a.name)
+        if filter is not None and not filter(a, v):
+            continue
+
+        if value_serializer is not None:
+            v = value_serializer(inst, a, v)
+
+        if recurse is True:
+            if has(v.__class__):
+                rv[a.name] = asdict(
+                    v,
+                    recurse=True,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
+                cf = v.__class__ if retain_collection_types is True else list
+                items = [
+                    _asdict_anything(
+                        i,
+                        is_key=False,
+                        filter=filter,
+                        dict_factory=dict_factory,
+                        retain_collection_types=retain_collection_types,
+                        value_serializer=value_serializer,
+                    )
+                    for i in v
+                ]
+                try:
+                    rv[a.name] = cf(items)
+                except TypeError:
+                    if not issubclass(cf, tuple):
+                        raise
+                    # Workaround for TypeError: cf.__new__() missing 1 required
+                    # positional argument (which appears, for a namedturle)
+                    rv[a.name] = cf(*items)
+            elif isinstance(v, dict):
+                df = dict_factory
+                rv[a.name] = df(
+                    (
+                        _asdict_anything(
+                            kk,
+                            is_key=True,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                        _asdict_anything(
+                            vv,
+                            is_key=False,
+                            filter=filter,
+                            dict_factory=df,
+                            retain_collection_types=retain_collection_types,
+                            value_serializer=value_serializer,
+                        ),
+                    )
+                    for kk, vv in v.items()
+                )
+            else:
+                rv[a.name] = v
+        else:
+            rv[a.name] = v
+    return rv
+
+
+def _asdict_anything(
+    val,
+    is_key,
+    filter,
+    dict_factory,
+    retain_collection_types,
+    value_serializer,
+):
     """
     ``asdict`` only works on attrs instances, this works on anything.
     """
-    pass
-
-
-def astuple(inst, recurse=True, filter=None, tuple_factory=tuple,
-    retain_collection_types=False):
+    if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+        # Attrs class.
+        rv = asdict(
+            val,
+            recurse=True,
+            filter=filter,
+            dict_factory=dict_factory,
+            retain_collection_types=retain_collection_types,
+            value_serializer=value_serializer,
+        )
+    elif isinstance(val, (tuple, list, set, frozenset)):
+        if retain_collection_types is True:
+            cf = val.__class__
+        elif is_key:
+            cf = tuple
+        else:
+            cf = list
+
+        rv = cf(
+            [
+                _asdict_anything(
+                    i,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=dict_factory,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                )
+                for i in val
+            ]
+        )
+    elif isinstance(val, dict):
+        df = dict_factory
+        rv = df(
+            (
+                _asdict_anything(
+                    kk,
+                    is_key=True,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+                _asdict_anything(
+                    vv,
+                    is_key=False,
+                    filter=filter,
+                    dict_factory=df,
+                    retain_collection_types=retain_collection_types,
+                    value_serializer=value_serializer,
+                ),
+            )
+            for kk, vv in val.items()
+        )
+    else:
+        rv = val
+        if value_serializer is not None:
+            rv = value_serializer(None, None, rv)
+
+    return rv
+
+
+def astuple(
+    inst,
+    recurse=True,
+    filter=None,
+    tuple_factory=tuple,
+    retain_collection_types=False,
+):
     """
     Return the *attrs* attribute values of *inst* as a tuple.

@@ -98,7 +246,81 @@ def astuple(inst, recurse=True, filter=None, tuple_factory=tuple,

     ..  versionadded:: 16.2.0
     """
-    pass
+    attrs = fields(inst.__class__)
+    rv = []
+    retain = retain_collection_types  # Very long. :/
+    for a in attrs:
+        v = getattr(inst, a.name)
+        if filter is not None and not filter(a, v):
+            continue
+        if recurse is True:
+            if has(v.__class__):
+                rv.append(
+                    astuple(
+                        v,
+                        recurse=True,
+                        filter=filter,
+                        tuple_factory=tuple_factory,
+                        retain_collection_types=retain,
+                    )
+                )
+            elif isinstance(v, (tuple, list, set, frozenset)):
+                cf = v.__class__ if retain is True else list
+                items = [
+                    (
+                        astuple(
+                            j,
+                            recurse=True,
+                            filter=filter,
+                            tuple_factory=tuple_factory,
+                            retain_collection_types=retain,
+                        )
+                        if has(j.__class__)
+                        else j
+                    )
+                    for j in v
+                ]
+                try:
+                    rv.append(cf(items))
+                except TypeError:
+                    if not issubclass(cf, tuple):
+                        raise
+                    # Workaround for TypeError: cf.__new__() missing 1 required
+                    # positional argument (which appears, for a namedturle)
+                    rv.append(cf(*items))
+            elif isinstance(v, dict):
+                df = v.__class__ if retain is True else dict
+                rv.append(
+                    df(
+                        (
+                            (
+                                astuple(
+                                    kk,
+                                    tuple_factory=tuple_factory,
+                                    retain_collection_types=retain,
+                                )
+                                if has(kk.__class__)
+                                else kk
+                            ),
+                            (
+                                astuple(
+                                    vv,
+                                    tuple_factory=tuple_factory,
+                                    retain_collection_types=retain,
+                                )
+                                if has(vv.__class__)
+                                else vv
+                            ),
+                        )
+                        for kk, vv in v.items()
+                    )
+                )
+            else:
+                rv.append(v)
+        else:
+            rv.append(v)
+
+    return rv if tuple_factory is list else tuple_factory(rv)


 def has(cls):
@@ -114,7 +336,19 @@ def has(cls):
     Returns:
         bool:
     """
-    pass
+    attrs = getattr(cls, "__attrs_attrs__", None)
+    if attrs is not None:
+        return True
+
+    # No attrs, maybe it's a specialized generic (A[str])?
+    generic_base = get_generic_base(cls)
+    if generic_base is not None:
+        generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
+        if generic_attrs is not None:
+            # Stick it on here for speed next time.
+            cls.__attrs_attrs__ = generic_attrs
+        return generic_attrs is not None
+    return False


 def assoc(inst, **changes):
@@ -149,7 +383,15 @@ def assoc(inst, **changes):
         removed du to the slightly different approach compared to
         `attrs.evolve`, though.
     """
-    pass
+    new = copy.copy(inst)
+    attrs = fields(inst.__class__)
+    for k, v in changes.items():
+        a = getattr(attrs, k, NOTHING)
+        if a is NOTHING:
+            msg = f"{k} is not an attrs attribute on {new.__class__}."
+            raise AttrsAttributeNotFoundError(msg)
+        _OBJ_SETATTR(new, k, v)
+    return new


 def evolve(*args, **changes):
@@ -185,11 +427,30 @@ def evolve(*args, **changes):
     .. versionchanged:: 24.1.0
        *inst* can't be passed as a keyword argument anymore.
     """
-    pass
-
-
-def resolve_types(cls, globalns=None, localns=None, attribs=None,
-    include_extras=True):
+    try:
+        (inst,) = args
+    except ValueError:
+        msg = (
+            f"evolve() takes 1 positional argument, but {len(args)} were given"
+        )
+        raise TypeError(msg) from None
+
+    cls = inst.__class__
+    attrs = fields(cls)
+    for a in attrs:
+        if not a.init:
+            continue
+        attr_name = a.name  # To deal with private attributes.
+        init_name = a.alias
+        if init_name not in changes:
+            changes[init_name] = getattr(inst, attr_name)
+
+    return cls(**changes)
+
+
+def resolve_types(
+    cls, globalns=None, localns=None, attribs=None, include_extras=True
+):
     """
     Resolve any strings and forward annotations in type annotations.

@@ -238,4 +499,24 @@ def resolve_types(cls, globalns=None, localns=None, attribs=None,
     ..  versionadded:: 21.1.0 *attribs*
     ..  versionadded:: 23.1.0 *include_extras*
     """
-    pass
+    # Since calling get_type_hints is expensive we cache whether we've
+    # done it already.
+    if getattr(cls, "__attrs_types_resolved__", None) != cls:
+        import typing
+
+        kwargs = {"globalns": globalns, "localns": localns}
+
+        if PY_3_9_PLUS:
+            kwargs["include_extras"] = include_extras
+
+        hints = typing.get_type_hints(cls, **kwargs)
+        for field in fields(cls) if attribs is None else attribs:
+            if field.name in hints:
+                # Since fields have been frozen we must work around it.
+                _OBJ_SETATTR(field, "type", hints[field.name])
+        # We store the class we resolved so that subclasses know they haven't
+        # been resolved.
+        cls.__attrs_types_resolved__ = cls
+
+    # Return the class so you can use it as a decorator too.
+    return cls
diff --git a/src/attr/_make.py b/src/attr/_make.py
index f3ed380..bf00c5f 100644
--- a/src/attr/_make.py
+++ b/src/attr/_make.py
@@ -1,4 +1,7 @@
+# SPDX-License-Identifier: MIT
+
 from __future__ import annotations
+
 import abc
 import contextlib
 import copy
@@ -10,17 +13,47 @@ import linecache
 import sys
 import types
 import typing
+
 from operator import itemgetter
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
 from . import _compat, _config, setters
-from ._compat import PY_3_8_PLUS, PY_3_10_PLUS, PY_3_11_PLUS, _AnnotationExtractor, _get_annotations, get_generic_base
-from .exceptions import DefaultAlreadySetError, FrozenInstanceError, NotAnAttrsClassError, UnannotatedAttributeError
+from ._compat import (
+    PY_3_8_PLUS,
+    PY_3_10_PLUS,
+    PY_3_11_PLUS,
+    _AnnotationExtractor,
+    _get_annotations,
+    get_generic_base,
+)
+from .exceptions import (
+    DefaultAlreadySetError,
+    FrozenInstanceError,
+    NotAnAttrsClassError,
+    UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
 _OBJ_SETATTR = object.__setattr__
-_INIT_FACTORY_PAT = '__attr_factory_%s'
-_CLASSVAR_PREFIXES = ('typing.ClassVar', 't.ClassVar', 'ClassVar',
-    'typing_extensions.ClassVar')
-_HASH_CACHE_FIELD = '_attrs_cached_hash'
+_INIT_FACTORY_PAT = "__attr_factory_%s"
+_CLASSVAR_PREFIXES = (
+    "typing.ClassVar",
+    "t.ClassVar",
+    "ClassVar",
+    "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_HASH_CACHE_FIELD = "_attrs_cached_hash"
+
 _EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
 _SENTINEL = object()
+
 _DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)


@@ -34,10 +67,11 @@ class _Nothing(enum.Enum):
     .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
     .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
     """
+
     NOTHING = enum.auto()

     def __repr__(self):
-        return 'NOTHING'
+        return "NOTHING"

     def __bool__(self):
         return False
@@ -61,13 +95,27 @@ class _CacheHashWrapper(int):
     See GH #613 for more details.
     """

-    def __reduce__(self, _none_constructor=type(None), _args=()):
+    def __reduce__(self, _none_constructor=type(None), _args=()):  # noqa: B008
         return _none_constructor, _args


-def attrib(default=NOTHING, validator=None, repr=True, cmp=None, hash=None,
-    init=True, metadata=None, type=None, converter=None, factory=None,
-    kw_only=False, eq=None, order=None, on_setattr=None, alias=None):
+def attrib(
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    cmp=None,
+    hash=None,
+    init=True,
+    metadata=None,
+    type=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+    alias=None,
+):
     """
     Create a new field / attribute on a class.

@@ -106,22 +154,94 @@ def attrib(default=NOTHING, validator=None, repr=True, cmp=None, hash=None,
     .. versionchanged:: 21.1.0 *cmp* undeprecated
     .. versionadded:: 22.2.0 *alias*
     """
-    pass
-
-
-def _compile_and_eval(script, globs, locs=None, filename=''):
+    eq, eq_key, order, order_key = _determine_attrib_eq_order(
+        cmp, eq, order, True
+    )
+
+    if hash is not None and hash is not True and hash is not False:
+        msg = "Invalid value for hash.  Must be True, False, or None."
+        raise TypeError(msg)
+
+    if factory is not None:
+        if default is not NOTHING:
+            msg = (
+                "The `default` and `factory` arguments are mutually exclusive."
+            )
+            raise ValueError(msg)
+        if not callable(factory):
+            msg = "The `factory` argument must be a callable."
+            raise ValueError(msg)
+        default = Factory(factory)
+
+    if metadata is None:
+        metadata = {}
+
+    # Apply syntactic sugar by auto-wrapping.
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
+    if validator and isinstance(validator, (list, tuple)):
+        validator = and_(*validator)
+
+    if converter and isinstance(converter, (list, tuple)):
+        converter = pipe(*converter)
+
+    return _CountingAttr(
+        default=default,
+        validator=validator,
+        repr=repr,
+        cmp=None,
+        hash=hash,
+        init=init,
+        converter=converter,
+        metadata=metadata,
+        type=type,
+        kw_only=kw_only,
+        eq=eq,
+        eq_key=eq_key,
+        order=order,
+        order_key=order_key,
+        on_setattr=on_setattr,
+        alias=alias,
+    )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
     """
     Evaluate the script with the given global (globs) and local (locs)
     variables.
     """
-    pass
+    bytecode = compile(script, filename, "exec")
+    eval(bytecode, globs, locs)


 def _make_method(name, script, filename, globs, locals=None):
     """
     Create the method with the script given and return the method object.
     """
-    pass
+    locs = {} if locals is None else locals
+
+    # In order of debuggers like PDB being able to step through the code,
+    # we add a fake linecache entry.
+    count = 1
+    base_filename = filename
+    while True:
+        linecache_tuple = (
+            len(script),
+            None,
+            script.splitlines(True),
+            filename,
+        )
+        old_val = linecache.cache.setdefault(filename, linecache_tuple)
+        if old_val == linecache_tuple:
+            break
+
+        filename = f"{base_filename[:-1]}-{count}>"
+        count += 1
+
+    _compile_and_eval(script, globs, locs, filename)
+
+    return locs[name]


 def _make_attr_tuple_class(cls_name, attr_names):
@@ -134,11 +254,36 @@ def _make_attr_tuple_class(cls_name, attr_names):
         __slots__ = ()
         x = property(itemgetter(0))
     """
-    pass
-
-
-_Attributes = _make_attr_tuple_class('_Attributes', ['attrs', 'base_attrs',
-    'base_attrs_map'])
+    attr_class_name = f"{cls_name}Attributes"
+    attr_class_template = [
+        f"class {attr_class_name}(tuple):",
+        "    __slots__ = ()",
+    ]
+    if attr_names:
+        for i, attr_name in enumerate(attr_names):
+            attr_class_template.append(
+                f"    {attr_name} = _attrs_property(_attrs_itemgetter({i}))"
+            )
+    else:
+        attr_class_template.append("    pass")
+    globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+    _compile_and_eval("\n".join(attr_class_template), globs)
+    return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+    "_Attributes",
+    [
+        # all attributes to build dunder methods for
+        "attrs",
+        # attributes that have been inherited
+        "base_attrs",
+        # map inherited attributes to their originating classes
+        "base_attrs_map",
+    ],
+)


 def _is_class_var(annot):
@@ -149,21 +294,51 @@ def _is_class_var(annot):
     annotations which would put attrs-based classes at a performance
     disadvantage compared to plain old classes.
     """
-    pass
+    annot = str(annot)
+
+    # Annotation can be quoted.
+    if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+        annot = annot[1:-1]
+
+    return annot.startswith(_CLASSVAR_PREFIXES)


 def _has_own_attribute(cls, attrib_name):
     """
     Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
     """
-    pass
+    return attrib_name in cls.__dict__


 def _collect_base_attrs(cls, taken_attr_names):
     """
     Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
     """
-    pass
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in reversed(cls.__mro__[1:-1]):
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.inherited or a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)  # noqa: PLW2901
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls
+
+    # For each name, only keep the freshest definition i.e. the furthest at the
+    # back.  base_attr_map is fine because it gets overwritten with every new
+    # instance.
+    filtered = []
+    seen = set()
+    for a in reversed(base_attrs):
+        if a.name in seen:
+            continue
+        filtered.insert(0, a)
+        seen.add(a.name)
+
+    return filtered, base_attr_map


 def _collect_base_attrs_broken(cls, taken_attr_names):
@@ -177,11 +352,26 @@ def _collect_base_attrs_broken(cls, taken_attr_names):
     Notably it collects from the front and considers inherited attributes which
     leads to the buggy behavior reported in #428.
     """
-    pass
+    base_attrs = []
+    base_attr_map = {}  # A dictionary of base attrs to their classes.
+
+    # Traverse the MRO and collect attributes.
+    for base_cls in cls.__mro__[1:-1]:
+        for a in getattr(base_cls, "__attrs_attrs__", []):
+            if a.name in taken_attr_names:
+                continue
+
+            a = a.evolve(inherited=True)  # noqa: PLW2901
+            taken_attr_names.add(a.name)
+            base_attrs.append(a)
+            base_attr_map[a.name] = base_cls

+    return base_attrs, base_attr_map

-def _transform_attrs(cls, these, auto_attribs, kw_only, collect_by_mro,
-    field_transformer):
+
+def _transform_attrs(
+    cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
     """
     Transform all `_CountingAttr`s on a class into `Attribute`s.

@@ -192,38 +382,234 @@ def _transform_attrs(cls, these, auto_attribs, kw_only, collect_by_mro,

     Return an `_Attributes`.
     """
-    pass
+    cd = cls.__dict__
+    anns = _get_annotations(cls)
+
+    if these is not None:
+        ca_list = list(these.items())
+    elif auto_attribs is True:
+        ca_names = {
+            name
+            for name, attr in cd.items()
+            if isinstance(attr, _CountingAttr)
+        }
+        ca_list = []
+        annot_names = set()
+        for attr_name, type in anns.items():
+            if _is_class_var(type):
+                continue
+            annot_names.add(attr_name)
+            a = cd.get(attr_name, NOTHING)
+
+            if not isinstance(a, _CountingAttr):
+                a = attrib() if a is NOTHING else attrib(default=a)
+            ca_list.append((attr_name, a))
+
+        unannotated = ca_names - annot_names
+        if len(unannotated) > 0:
+            raise UnannotatedAttributeError(
+                "The following `attr.ib`s lack a type annotation: "
+                + ", ".join(
+                    sorted(unannotated, key=lambda n: cd.get(n).counter)
+                )
+                + "."
+            )
+    else:
+        ca_list = sorted(
+            (
+                (name, attr)
+                for name, attr in cd.items()
+                if isinstance(attr, _CountingAttr)
+            ),
+            key=lambda e: e[1].counter,
+        )
+
+    own_attrs = [
+        Attribute.from_counting_attr(
+            name=attr_name, ca=ca, type=anns.get(attr_name)
+        )
+        for attr_name, ca in ca_list
+    ]
+
+    if collect_by_mro:
+        base_attrs, base_attr_map = _collect_base_attrs(
+            cls, {a.name for a in own_attrs}
+        )
+    else:
+        base_attrs, base_attr_map = _collect_base_attrs_broken(
+            cls, {a.name for a in own_attrs}
+        )
+
+    if kw_only:
+        own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+        base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+    attrs = base_attrs + own_attrs
+
+    # Mandatory vs non-mandatory attr order only matters when they are part of
+    # the __init__ signature and when they aren't kw_only (which are moved to
+    # the end and can be mandatory or non-mandatory in any order, as they will
+    # be specified as keyword args anyway). Check the order of those attrs:
+    had_default = False
+    for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+        if had_default is True and a.default is NOTHING:
+            msg = f"No mandatory attributes allowed after an attribute with a default value or factory.  Attribute in question: {a!r}"
+            raise ValueError(msg)
+
+        if had_default is False and a.default is not NOTHING:
+            had_default = True
+
+    if field_transformer is not None:
+        attrs = field_transformer(cls, attrs)
+
+    # Resolve default field alias after executing field_transformer.
+    # This allows field_transformer to differentiate between explicit vs
+    # default aliases and supply their own defaults.
+    attrs = [
+        a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a
+        for a in attrs
+    ]
+
+    # Create AttrsClass *after* applying the field_transformer since it may
+    # add or remove attributes!
+    attr_names = [a.name for a in attrs]
+    AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+    return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+def _make_cached_property_getattr(cached_properties, original_getattr, cls):
+    lines = [
+        # Wrapped to get `__class__` into closure cell for super()
+        # (It will be replaced with the newly constructed class after construction).
+        "def wrapper(_cls):",
+        "    __class__ = _cls",
+        "    def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
+        "         func = cached_properties.get(item)",
+        "         if func is not None:",
+        "              result = func(self)",
+        "              _setter = _cached_setattr_get(self)",
+        "              _setter(item, result)",
+        "              return result",
+    ]
+    if original_getattr is not None:
+        lines.append(
+            "         return original_getattr(self, item)",
+        )
+    else:
+        lines.extend(
+            [
+                "         try:",
+                "             return super().__getattribute__(item)",
+                "         except AttributeError:",
+                "             if not hasattr(super(), '__getattr__'):",
+                "                 raise",
+                "             return super().__getattr__(item)",
+                "         original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
+                "         raise AttributeError(original_error)",
+            ]
+        )
+
+    lines.extend(
+        [
+            "    return __getattr__",
+            "__getattr__ = wrapper(_cls)",
+        ]
+    )
+
+    unique_filename = _generate_unique_filename(cls, "getattr")
+
+    glob = {
+        "cached_properties": cached_properties,
+        "_cached_setattr_get": _OBJ_SETATTR.__get__,
+        "original_getattr": original_getattr,
+    }
+
+    return _make_method(
+        "__getattr__",
+        "\n".join(lines),
+        unique_filename,
+        glob,
+        locals={
+            "_cls": cls,
+        },
+    )


 def _frozen_setattrs(self, name, value):
     """
     Attached to frozen classes as __setattr__.
     """
-    pass
+    if isinstance(self, BaseException) and name in (
+        "__cause__",
+        "__context__",
+        "__traceback__",
+    ):
+        BaseException.__setattr__(self, name, value)
+        return
+
+    raise FrozenInstanceError()


 def _frozen_delattrs(self, name):
     """
     Attached to frozen classes as __delattr__.
     """
-    pass
+    raise FrozenInstanceError()


 class _ClassBuilder:
     """
     Iteratively build *one* class.
     """
-    __slots__ = ('_attr_names', '_attrs', '_base_attr_map', '_base_names',
-        '_cache_hash', '_cls', '_cls_dict', '_delete_attribs', '_frozen',
-        '_has_pre_init', '_pre_init_has_args', '_has_post_init', '_is_exc',
-        '_on_setattr', '_slots', '_weakref_slot', '_wrote_own_setattr',
-        '_has_custom_setattr')
-
-    def __init__(self, cls, these, slots, frozen, weakref_slot,
-        getstate_setstate, auto_attribs, kw_only, cache_hash, is_exc,
-        collect_by_mro, on_setattr, has_custom_setattr, field_transformer):
-        attrs, base_attrs, base_map = _transform_attrs(cls, these,
-            auto_attribs, kw_only, collect_by_mro, field_transformer)
+
+    __slots__ = (
+        "_attr_names",
+        "_attrs",
+        "_base_attr_map",
+        "_base_names",
+        "_cache_hash",
+        "_cls",
+        "_cls_dict",
+        "_delete_attribs",
+        "_frozen",
+        "_has_pre_init",
+        "_pre_init_has_args",
+        "_has_post_init",
+        "_is_exc",
+        "_on_setattr",
+        "_slots",
+        "_weakref_slot",
+        "_wrote_own_setattr",
+        "_has_custom_setattr",
+    )
+
+    def __init__(
+        self,
+        cls,
+        these,
+        slots,
+        frozen,
+        weakref_slot,
+        getstate_setstate,
+        auto_attribs,
+        kw_only,
+        cache_hash,
+        is_exc,
+        collect_by_mro,
+        on_setattr,
+        has_custom_setattr,
+        field_transformer,
+    ):
+        attrs, base_attrs, base_map = _transform_attrs(
+            cls,
+            these,
+            auto_attribs,
+            kw_only,
+            collect_by_mro,
+            field_transformer,
+        )
+
         self._cls = cls
         self._cls_dict = dict(cls.__dict__) if slots else {}
         self._attrs = attrs
@@ -234,44 +620,64 @@ class _ClassBuilder:
         self._frozen = frozen
         self._weakref_slot = weakref_slot
         self._cache_hash = cache_hash
-        self._has_pre_init = bool(getattr(cls, '__attrs_pre_init__', False))
+        self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
         self._pre_init_has_args = False
         if self._has_pre_init:
+            # Check if the pre init method has more arguments than just `self`
+            # We want to pass arguments if pre init expects arguments
             pre_init_func = cls.__attrs_pre_init__
             pre_init_signature = inspect.signature(pre_init_func)
             self._pre_init_has_args = len(pre_init_signature.parameters) > 1
-        self._has_post_init = bool(getattr(cls, '__attrs_post_init__', False))
+        self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
         self._delete_attribs = not bool(these)
         self._is_exc = is_exc
         self._on_setattr = on_setattr
+
         self._has_custom_setattr = has_custom_setattr
         self._wrote_own_setattr = False
-        self._cls_dict['__attrs_attrs__'] = self._attrs
+
+        self._cls_dict["__attrs_attrs__"] = self._attrs
+
         if frozen:
-            self._cls_dict['__setattr__'] = _frozen_setattrs
-            self._cls_dict['__delattr__'] = _frozen_delattrs
+            self._cls_dict["__setattr__"] = _frozen_setattrs
+            self._cls_dict["__delattr__"] = _frozen_delattrs
+
             self._wrote_own_setattr = True
-        elif on_setattr in (_DEFAULT_ON_SETATTR, setters.validate, setters.
-            convert):
+        elif on_setattr in (
+            _DEFAULT_ON_SETATTR,
+            setters.validate,
+            setters.convert,
+        ):
             has_validator = has_converter = False
             for a in attrs:
                 if a.validator is not None:
                     has_validator = True
                 if a.converter is not None:
                     has_converter = True
+
                 if has_validator and has_converter:
                     break
-            if (on_setattr == _DEFAULT_ON_SETATTR and not (has_validator or
-                has_converter) or on_setattr == setters.validate and not
-                has_validator or on_setattr == setters.convert and not
-                has_converter):
+            if (
+                (
+                    on_setattr == _DEFAULT_ON_SETATTR
+                    and not (has_validator or has_converter)
+                )
+                or (on_setattr == setters.validate and not has_validator)
+                or (on_setattr == setters.convert and not has_converter)
+            ):
+                # If class-level on_setattr is set to convert + validate, but
+                # there's no field to convert or validate, pretend like there's
+                # no on_setattr.
                 self._on_setattr = None
+
         if getstate_setstate:
-            self._cls_dict['__getstate__'], self._cls_dict['__setstate__'
-                ] = self._make_getstate_setstate()
+            (
+                self._cls_dict["__getstate__"],
+                self._cls_dict["__setstate__"],
+            ) = self._make_getstate_setstate()

     def __repr__(self):
-        return f'<_ClassBuilder(cls={self._cls.__name__})>'
+        return f"<_ClassBuilder(cls={self._cls.__name__})>"

     def build_class(self):
         """
@@ -279,31 +685,397 @@ class _ClassBuilder:

         Builder cannot be used after calling this method.
         """
-        pass
+        if self._slots is True:
+            cls = self._create_slots_class()
+        else:
+            cls = self._patch_original_class()
+            if PY_3_10_PLUS:
+                cls = abc.update_abstractmethods(cls)
+
+        # The method gets only called if it's not inherited from a base class.
+        # _has_own_attribute does NOT work properly for classmethods.
+        if (
+            getattr(cls, "__attrs_init_subclass__", None)
+            and "__attrs_init_subclass__" not in cls.__dict__
+        ):
+            cls.__attrs_init_subclass__()
+
+        return cls

     def _patch_original_class(self):
         """
         Apply accumulated methods and return the class.
         """
-        pass
+        cls = self._cls
+        base_names = self._base_names
+
+        # Clean class of attribute definitions (`attr.ib()`s).
+        if self._delete_attribs:
+            for name in self._attr_names:
+                if (
+                    name not in base_names
+                    and getattr(cls, name, _SENTINEL) is not _SENTINEL
+                ):
+                    # An AttributeError can happen if a base class defines a
+                    # class variable and we want to set an attribute with the
+                    # same name by using only a type annotation.
+                    with contextlib.suppress(AttributeError):
+                        delattr(cls, name)
+
+        # Attach our dunder methods.
+        for name, value in self._cls_dict.items():
+            setattr(cls, name, value)
+
+        # If we've inherited an attrs __setattr__ and don't write our own,
+        # reset it to object's.
+        if not self._wrote_own_setattr and getattr(
+            cls, "__attrs_own_setattr__", False
+        ):
+            cls.__attrs_own_setattr__ = False
+
+            if not self._has_custom_setattr:
+                cls.__setattr__ = _OBJ_SETATTR
+
+        return cls

     def _create_slots_class(self):
         """
         Build and return a new class with a `__slots__` attribute.
         """
-        pass
+        cd = {
+            k: v
+            for k, v in self._cls_dict.items()
+            if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
+        }
+
+        # If our class doesn't have its own implementation of __setattr__
+        # (either from the user or by us), check the bases, if one of them has
+        # an attrs-made __setattr__, that needs to be reset. We don't walk the
+        # MRO because we only care about our immediate base classes.
+        # XXX: This can be confused by subclassing a slotted attrs class with
+        # XXX: a non-attrs class and subclass the resulting class with an attrs
+        # XXX: class.  See `test_slotted_confused` for details.  For now that's
+        # XXX: OK with us.
+        if not self._wrote_own_setattr:
+            cd["__attrs_own_setattr__"] = False
+
+            if not self._has_custom_setattr:
+                for base_cls in self._cls.__bases__:
+                    if base_cls.__dict__.get("__attrs_own_setattr__", False):
+                        cd["__setattr__"] = _OBJ_SETATTR
+                        break
+
+        # Traverse the MRO to collect existing slots
+        # and check for an existing __weakref__.
+        existing_slots = {}
+        weakref_inherited = False
+        for base_cls in self._cls.__mro__[1:-1]:
+            if base_cls.__dict__.get("__weakref__", None) is not None:
+                weakref_inherited = True
+            existing_slots.update(
+                {
+                    name: getattr(base_cls, name)
+                    for name in getattr(base_cls, "__slots__", [])
+                }
+            )
+
+        base_names = set(self._base_names)
+
+        names = self._attr_names
+        if (
+            self._weakref_slot
+            and "__weakref__" not in getattr(self._cls, "__slots__", ())
+            and "__weakref__" not in names
+            and not weakref_inherited
+        ):
+            names += ("__weakref__",)
+
+        if PY_3_8_PLUS:
+            cached_properties = {
+                name: cached_property.func
+                for name, cached_property in cd.items()
+                if isinstance(cached_property, functools.cached_property)
+            }
+        else:
+            # `functools.cached_property` was introduced in 3.8.
+            # So can't be used before this.
+            cached_properties = {}
+
+        # Collect methods with a `__class__` reference that are shadowed in the new class.
+        # To know to update them.
+        additional_closure_functions_to_update = []
+        if cached_properties:
+            class_annotations = _get_annotations(self._cls)
+            for name, func in cached_properties.items():
+                # Add cached properties to names for slotting.
+                names += (name,)
+                # Clear out function from class to avoid clashing.
+                del cd[name]
+                additional_closure_functions_to_update.append(func)
+                annotation = inspect.signature(func).return_annotation
+                if annotation is not inspect.Parameter.empty:
+                    class_annotations[name] = annotation
+
+            original_getattr = cd.get("__getattr__")
+            if original_getattr is not None:
+                additional_closure_functions_to_update.append(original_getattr)
+
+            cd["__getattr__"] = _make_cached_property_getattr(
+                cached_properties, original_getattr, self._cls
+            )
+
+        # We only add the names of attributes that aren't inherited.
+        # Setting __slots__ to inherited attributes wastes memory.
+        slot_names = [name for name in names if name not in base_names]
+
+        # There are slots for attributes from current class
+        # that are defined in parent classes.
+        # As their descriptors may be overridden by a child class,
+        # we collect them here and update the class dict
+        reused_slots = {
+            slot: slot_descriptor
+            for slot, slot_descriptor in existing_slots.items()
+            if slot in slot_names
+        }
+        slot_names = [name for name in slot_names if name not in reused_slots]
+        cd.update(reused_slots)
+        if self._cache_hash:
+            slot_names.append(_HASH_CACHE_FIELD)
+
+        cd["__slots__"] = tuple(slot_names)
+
+        cd["__qualname__"] = self._cls.__qualname__
+
+        # Create new class based on old class and our methods.
+        cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+        # The following is a fix for
+        # <https://github.com/python-attrs/attrs/issues/102>.
+        # If a method mentions `__class__` or uses the no-arg super(), the
+        # compiler will bake a reference to the class in the method itself
+        # as `method.__closure__`.  Since we replace the class with a
+        # clone, we rewrite these references so it keeps working.
+        for item in itertools.chain(
+            cls.__dict__.values(), additional_closure_functions_to_update
+        ):
+            if isinstance(item, (classmethod, staticmethod)):
+                # Class- and staticmethods hide their functions inside.
+                # These might need to be rewritten as well.
+                closure_cells = getattr(item.__func__, "__closure__", None)
+            elif isinstance(item, property):
+                # Workaround for property `super()` shortcut (PY3-only).
+                # There is no universal way for other descriptors.
+                closure_cells = getattr(item.fget, "__closure__", None)
+            else:
+                closure_cells = getattr(item, "__closure__", None)
+
+            if not closure_cells:  # Catch None or the empty list.
+                continue
+            for cell in closure_cells:
+                try:
+                    match = cell.cell_contents is self._cls
+                except ValueError:  # noqa: PERF203
+                    # ValueError: Cell is empty
+                    pass
+                else:
+                    if match:
+                        cell.cell_contents = cls
+        return cls
+
+    def add_repr(self, ns):
+        self._cls_dict["__repr__"] = self._add_method_dunders(
+            _make_repr(self._attrs, ns, self._cls)
+        )
+        return self
+
+    def add_str(self):
+        repr = self._cls_dict.get("__repr__")
+        if repr is None:
+            msg = "__str__ can only be generated if a __repr__ exists."
+            raise ValueError(msg)
+
+        def __str__(self):
+            return self.__repr__()
+
+        self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+        return self

     def _make_getstate_setstate(self):
         """
         Create custom __setstate__ and __getstate__ methods.
         """
-        pass
+        # __weakref__ is not writable.
+        state_attr_names = tuple(
+            an for an in self._attr_names if an != "__weakref__"
+        )
+
+        def slots_getstate(self):
+            """
+            Automatically created by attrs.
+            """
+            return {name: getattr(self, name) for name in state_attr_names}
+
+        hash_caching_enabled = self._cache_hash
+
+        def slots_setstate(self, state):
+            """
+            Automatically created by attrs.
+            """
+            __bound_setattr = _OBJ_SETATTR.__get__(self)
+            if isinstance(state, tuple):
+                # Backward compatibility with attrs instances pickled with
+                # attrs versions before v22.2.0 which stored tuples.
+                for name, value in zip(state_attr_names, state):
+                    __bound_setattr(name, value)
+            else:
+                for name in state_attr_names:
+                    if name in state:
+                        __bound_setattr(name, state[name])
+
+            # The hash code cache is not included when the object is
+            # serialized, but it still needs to be initialized to None to
+            # indicate that the first call to __hash__ should be a cache
+            # miss.
+            if hash_caching_enabled:
+                __bound_setattr(_HASH_CACHE_FIELD, None)
+
+        return slots_getstate, slots_setstate
+
+    def make_unhashable(self):
+        self._cls_dict["__hash__"] = None
+        return self
+
+    def add_hash(self):
+        self._cls_dict["__hash__"] = self._add_method_dunders(
+            _make_hash(
+                self._cls,
+                self._attrs,
+                frozen=self._frozen,
+                cache_hash=self._cache_hash,
+            )
+        )
+
+        return self
+
+    def add_init(self):
+        self._cls_dict["__init__"] = self._add_method_dunders(
+            _make_init(
+                self._cls,
+                self._attrs,
+                self._has_pre_init,
+                self._pre_init_has_args,
+                self._has_post_init,
+                self._frozen,
+                self._slots,
+                self._cache_hash,
+                self._base_attr_map,
+                self._is_exc,
+                self._on_setattr,
+                attrs_init=False,
+            )
+        )
+
+        return self
+
+    def add_match_args(self):
+        self._cls_dict["__match_args__"] = tuple(
+            field.name
+            for field in self._attrs
+            if field.init and not field.kw_only
+        )
+
+    def add_attrs_init(self):
+        self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+            _make_init(
+                self._cls,
+                self._attrs,
+                self._has_pre_init,
+                self._pre_init_has_args,
+                self._has_post_init,
+                self._frozen,
+                self._slots,
+                self._cache_hash,
+                self._base_attr_map,
+                self._is_exc,
+                self._on_setattr,
+                attrs_init=True,
+            )
+        )
+
+        return self
+
+    def add_eq(self):
+        cd = self._cls_dict
+
+        cd["__eq__"] = self._add_method_dunders(
+            _make_eq(self._cls, self._attrs)
+        )
+        cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+        return self
+
+    def add_order(self):
+        cd = self._cls_dict
+
+        cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+            self._add_method_dunders(meth)
+            for meth in _make_order(self._cls, self._attrs)
+        )
+
+        return self
+
+    def add_setattr(self):
+        if self._frozen:
+            return self
+
+        sa_attrs = {}
+        for a in self._attrs:
+            on_setattr = a.on_setattr or self._on_setattr
+            if on_setattr and on_setattr is not setters.NO_OP:
+                sa_attrs[a.name] = a, on_setattr
+
+        if not sa_attrs:
+            return self
+
+        if self._has_custom_setattr:
+            # We need to write a __setattr__ but there already is one!
+            msg = "Can't combine custom __setattr__ with on_setattr hooks."
+            raise ValueError(msg)
+
+        # docstring comes from _add_method_dunders
+        def __setattr__(self, name, val):
+            try:
+                a, hook = sa_attrs[name]
+            except KeyError:
+                nval = val
+            else:
+                nval = hook(self, a, val)
+
+            _OBJ_SETATTR(self, name, nval)
+
+        self._cls_dict["__attrs_own_setattr__"] = True
+        self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+        self._wrote_own_setattr = True
+
+        return self

     def _add_method_dunders(self, method):
         """
         Add __module__ and __qualname__ to a *method* if possible.
         """
-        pass
+        with contextlib.suppress(AttributeError):
+            method.__module__ = self._cls.__module__
+
+        with contextlib.suppress(AttributeError):
+            method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+        with contextlib.suppress(AttributeError):
+            method.__doc__ = (
+                "Method generated by attrs for class "
+                f"{self._cls.__qualname__}."
+            )
+
+        return method


 def _determine_attrs_eq_order(cmp, eq, order, default_eq):
@@ -311,7 +1083,27 @@ def _determine_attrs_eq_order(cmp, eq, order, default_eq):
     Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
     values of eq and order.  If *eq* is None, set it to *default_eq*.
     """
-    pass
+    if cmp is not None and any((eq is not None, order is not None)):
+        msg = "Don't mix `cmp` with `eq' and `order`."
+        raise ValueError(msg)
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        return cmp, cmp
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq = default_eq
+
+    if order is None:
+        order = eq
+
+    if eq is False and order is True:
+        msg = "`order` can only be True if `eq` is True too."
+        raise ValueError(msg)
+
+    return eq, order


 def _determine_attrib_eq_order(cmp, eq, order, default_eq):
@@ -319,11 +1111,47 @@ def _determine_attrib_eq_order(cmp, eq, order, default_eq):
     Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
     values of eq and order.  If *eq* is None, set it to *default_eq*.
     """
-    pass
+    if cmp is not None and any((eq is not None, order is not None)):
+        msg = "Don't mix `cmp` with `eq' and `order`."
+        raise ValueError(msg)

+    def decide_callable_or_boolean(value):
+        """
+        Decide whether a key function is used.
+        """
+        if callable(value):
+            value, key = True, value
+        else:
+            key = None
+        return value, key
+
+    # cmp takes precedence due to bw-compatibility.
+    if cmp is not None:
+        cmp, cmp_key = decide_callable_or_boolean(cmp)
+        return cmp, cmp_key, cmp, cmp_key
+
+    # If left None, equality is set to the specified default and ordering
+    # mirrors equality.
+    if eq is None:
+        eq, eq_key = default_eq, None
+    else:
+        eq, eq_key = decide_callable_or_boolean(eq)
+
+    if order is None:
+        order, order_key = eq, eq_key
+    else:
+        order, order_key = decide_callable_or_boolean(order)
+
+    if eq is False and order is True:
+        msg = "`order` can only be True if `eq` is True too."
+        raise ValueError(msg)

-def _determine_whether_to_implement(cls, flag, auto_detect, dunders,
-    default=True):
+    return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+    cls, flag, auto_detect, dunders, default=True
+):
     """
     Check whether we should implement a set of methods for *cls*.

@@ -333,16 +1161,47 @@ def _determine_whether_to_implement(cls, flag, auto_detect, dunders,

     Return *default* if no reason for either for or against is found.
     """
-    pass
-
-
-def attrs(maybe_cls=None, these=None, repr_ns=None, repr=None, cmp=None,
-    hash=None, init=None, slots=False, frozen=False, weakref_slot=True, str
-    =False, auto_attribs=False, kw_only=False, cache_hash=False, auto_exc=
-    False, eq=None, order=None, auto_detect=False, collect_by_mro=False,
-    getstate_setstate=None, on_setattr=None, field_transformer=None,
-    match_args=True, unsafe_hash=None):
-    """
+    if flag is True or flag is False:
+        return flag
+
+    if flag is None and auto_detect is False:
+        return default
+
+    # Logically, flag is None and auto_detect is True here.
+    for dunder in dunders:
+        if _has_own_attribute(cls, dunder):
+            return False
+
+    return default
+
+
+def attrs(
+    maybe_cls=None,
+    these=None,
+    repr_ns=None,
+    repr=None,
+    cmp=None,
+    hash=None,
+    init=None,
+    slots=False,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=False,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=False,
+    eq=None,
+    order=None,
+    auto_detect=False,
+    collect_by_mro=False,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+    unsafe_hash=None,
+):
+    r"""
     A class decorator that adds :term:`dunder methods` according to the
     specified attributes using `attr.ib` or the *these* argument.

@@ -408,7 +1267,133 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, repr=None, cmp=None,
        ``__attrs_init_subclass__``, it is executed after the class is created.
     .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
     """
-    pass
+    if repr_ns is not None:
+        import warnings
+
+        warnings.warn(
+            DeprecationWarning(
+                "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
+            ),
+            stacklevel=2,
+        )
+
+    eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+
+    #  unsafe_hash takes precedence due to PEP 681.
+    if unsafe_hash is not None:
+        hash = unsafe_hash
+
+    if isinstance(on_setattr, (list, tuple)):
+        on_setattr = setters.pipe(*on_setattr)
+
+    def wrap(cls):
+        is_frozen = frozen or _has_frozen_base_class(cls)
+        is_exc = auto_exc is True and issubclass(cls, BaseException)
+        has_own_setattr = auto_detect and _has_own_attribute(
+            cls, "__setattr__"
+        )
+
+        if has_own_setattr and is_frozen:
+            msg = "Can't freeze a class with a custom __setattr__."
+            raise ValueError(msg)
+
+        builder = _ClassBuilder(
+            cls,
+            these,
+            slots,
+            is_frozen,
+            weakref_slot,
+            _determine_whether_to_implement(
+                cls,
+                getstate_setstate,
+                auto_detect,
+                ("__getstate__", "__setstate__"),
+                default=slots,
+            ),
+            auto_attribs,
+            kw_only,
+            cache_hash,
+            is_exc,
+            collect_by_mro,
+            on_setattr,
+            has_own_setattr,
+            field_transformer,
+        )
+        if _determine_whether_to_implement(
+            cls, repr, auto_detect, ("__repr__",)
+        ):
+            builder.add_repr(repr_ns)
+        if str is True:
+            builder.add_str()
+
+        eq = _determine_whether_to_implement(
+            cls, eq_, auto_detect, ("__eq__", "__ne__")
+        )
+        if not is_exc and eq is True:
+            builder.add_eq()
+        if not is_exc and _determine_whether_to_implement(
+            cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+        ):
+            builder.add_order()
+
+        builder.add_setattr()
+
+        nonlocal hash
+        if (
+            hash is None
+            and auto_detect is True
+            and _has_own_attribute(cls, "__hash__")
+        ):
+            hash = False
+
+        if hash is not True and hash is not False and hash is not None:
+            # Can't use `hash in` because 1 == True for example.
+            msg = "Invalid value for hash.  Must be True, False, or None."
+            raise TypeError(msg)
+
+        if hash is False or (hash is None and eq is False) or is_exc:
+            # Don't do anything. Should fall back to __object__'s __hash__
+            # which is by id.
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, hashing must be either explicitly or implicitly enabled."
+                raise TypeError(msg)
+        elif hash is True or (
+            hash is None and eq is True and is_frozen is True
+        ):
+            # Build a __hash__ if told so, or if it's safe.
+            builder.add_hash()
+        else:
+            # Raise TypeError on attempts to hash.
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, hashing must be either explicitly or implicitly enabled."
+                raise TypeError(msg)
+            builder.make_unhashable()
+
+        if _determine_whether_to_implement(
+            cls, init, auto_detect, ("__init__",)
+        ):
+            builder.add_init()
+        else:
+            builder.add_attrs_init()
+            if cache_hash:
+                msg = "Invalid value for cache_hash.  To use hash caching, init must be True."
+                raise TypeError(msg)
+
+        if (
+            PY_3_10_PLUS
+            and match_args
+            and not _has_own_attribute(cls, "__match_args__")
+        ):
+            builder.add_match_args()
+
+        return builder.build_class()
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but `None` if used as `@attrs()`.
+    if maybe_cls is None:
+        return wrap
+
+    return wrap(maybe_cls)


 _attrs = attrs
@@ -423,56 +1408,292 @@ def _has_frozen_base_class(cls):
     Check whether *cls* has a frozen ancestor by looking at its
     __setattr__.
     """
-    pass
+    return cls.__setattr__ is _frozen_setattrs


 def _generate_unique_filename(cls, func_name):
     """
     Create a "filename" suitable for a function being generated.
     """
-    pass
+    return (
+        f"<attrs generated {func_name} {cls.__module__}."
+        f"{getattr(cls, '__qualname__', cls.__name__)}>"
+    )
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+    attrs = tuple(
+        a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+    )
+
+    tab = "        "
+
+    unique_filename = _generate_unique_filename(cls, "hash")
+    type_hash = hash(unique_filename)
+    # If eq is custom generated, we need to include the functions in globs
+    globs = {}
+
+    hash_def = "def __hash__(self"
+    hash_func = "hash(("
+    closing_braces = "))"
+    if not cache_hash:
+        hash_def += "):"
+    else:
+        hash_def += ", *"
+
+        hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
+        hash_func = "_cache_wrapper(" + hash_func
+        closing_braces += ")"
+
+    method_lines = [hash_def]
+
+    def append_hash_computation_lines(prefix, indent):
+        """
+        Generate the code for actually computing the hash code.
+        Below this will either be returned directly or used to compute
+        a value which is then cached, depending on the value of cache_hash
+        """
+
+        method_lines.extend(
+            [
+                indent + prefix + hash_func,
+                indent + f"        {type_hash},",
+            ]
+        )
+
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = f"_{a.name}_key"
+                globs[cmp_name] = a.eq_key
+                method_lines.append(
+                    indent + f"        {cmp_name}(self.{a.name}),"
+                )
+            else:
+                method_lines.append(indent + f"        self.{a.name},")
+
+        method_lines.append(indent + "    " + closing_braces)
+
+    if cache_hash:
+        method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
+        if frozen:
+            append_hash_computation_lines(
+                f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
+            )
+            method_lines.append(tab * 2 + ")")  # close __setattr__
+        else:
+            append_hash_computation_lines(
+                f"self.{_HASH_CACHE_FIELD} = ", tab * 2
+            )
+        method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
+    else:
+        append_hash_computation_lines("return ", tab)
+
+    script = "\n".join(method_lines)
+    return _make_method("__hash__", script, unique_filename, globs)


 def _add_hash(cls, attrs):
     """
     Add a hash method to *cls*.
     """
-    pass
+    cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+    return cls


 def _make_ne():
     """
     Create __ne__ method.
     """
-    pass
+
+    def __ne__(self, other):
+        """
+        Check equality and either forward a NotImplemented or
+        return the result negated.
+        """
+        result = self.__eq__(other)
+        if result is NotImplemented:
+            return NotImplemented
+
+        return not result
+
+    return __ne__


 def _make_eq(cls, attrs):
     """
     Create __eq__ method for *cls* with *attrs*.
     """
-    pass
+    attrs = [a for a in attrs if a.eq]
+
+    unique_filename = _generate_unique_filename(cls, "eq")
+    lines = [
+        "def __eq__(self, other):",
+        "    if other.__class__ is not self.__class__:",
+        "        return NotImplemented",
+    ]
+
+    # We can't just do a big self.x = other.x and... clause due to
+    # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+    globs = {}
+    if attrs:
+        lines.append("    return  (")
+        for a in attrs:
+            if a.eq_key:
+                cmp_name = f"_{a.name}_key"
+                # Add the key function to the global namespace
+                # of the evaluated function.
+                globs[cmp_name] = a.eq_key
+                lines.append(
+                    f"        {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
+                )
+            else:
+                lines.append(f"        self.{a.name} == other.{a.name}")
+            if a is not attrs[-1]:
+                lines[-1] = f"{lines[-1]} and"
+        lines.append("    )")
+    else:
+        lines.append("    return True")
+
+    script = "\n".join(lines)
+
+    return _make_method("__eq__", script, unique_filename, globs)


 def _make_order(cls, attrs):
     """
     Create ordering methods for *cls* with *attrs*.
     """
-    pass
+    attrs = [a for a in attrs if a.order]
+
+    def attrs_to_tuple(obj):
+        """
+        Save us some typing.
+        """
+        return tuple(
+            key(value) if key else value
+            for value, key in (
+                (getattr(obj, a.name), a.order_key) for a in attrs
+            )
+        )
+
+    def __lt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __le__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __gt__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+        return NotImplemented
+
+    def __ge__(self, other):
+        """
+        Automatically created by attrs.
+        """
+        if other.__class__ is self.__class__:
+            return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+        return NotImplemented
+
+    return __lt__, __le__, __gt__, __ge__


 def _add_eq(cls, attrs=None):
     """
     Add equality methods to *cls* with *attrs*.
     """
-    pass
+    if attrs is None:
+        attrs = cls.__attrs_attrs__
+
+    cls.__eq__ = _make_eq(cls, attrs)
+    cls.__ne__ = _make_ne()
+
+    return cls
+
+
+def _make_repr(attrs, ns, cls):
+    unique_filename = _generate_unique_filename(cls, "repr")
+    # Figure out which attributes to include, and which function to use to
+    # format them. The a.repr value can be either bool or a custom
+    # callable.
+    attr_names_with_reprs = tuple(
+        (a.name, (repr if a.repr is True else a.repr), a.init)
+        for a in attrs
+        if a.repr is not False
+    )
+    globs = {
+        name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
+    }
+    globs["_compat"] = _compat
+    globs["AttributeError"] = AttributeError
+    globs["NOTHING"] = NOTHING
+    attribute_fragments = []
+    for name, r, i in attr_names_with_reprs:
+        accessor = (
+            "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
+        )
+        fragment = (
+            "%s={%s!r}" % (name, accessor)
+            if r == repr
+            else "%s={%s_repr(%s)}" % (name, name, accessor)
+        )
+        attribute_fragments.append(fragment)
+    repr_fragment = ", ".join(attribute_fragments)
+
+    if ns is None:
+        cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+    else:
+        cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+    lines = [
+        "def __repr__(self):",
+        "  try:",
+        "    already_repring = _compat.repr_context.already_repring",
+        "  except AttributeError:",
+        "    already_repring = {id(self),}",
+        "    _compat.repr_context.already_repring = already_repring",
+        "  else:",
+        "    if id(self) in already_repring:",
+        "      return '...'",
+        "    else:",
+        "      already_repring.add(id(self))",
+        "  try:",
+        f"    return f'{cls_name_fragment}({repr_fragment})'",
+        "  finally:",
+        "    already_repring.remove(id(self))",
+    ]
+
+    return _make_method(
+        "__repr__", "\n".join(lines), unique_filename, globs=globs
+    )


 def _add_repr(cls, ns=None, attrs=None):
     """
     Add a repr method to *cls*.
     """
-    pass
+    if attrs is None:
+        attrs = cls.__attrs_attrs__
+
+    cls.__repr__ = _make_repr(attrs, ns, cls)
+    return cls


 def fields(cls):
@@ -498,7 +1719,27 @@ def fields(cls):
        by name.
     .. versionchanged:: 23.1.0 Add support for generic classes.
     """
-    pass
+    generic_base = get_generic_base(cls)
+
+    if generic_base is None and not isinstance(cls, type):
+        msg = "Passed object must be a class."
+        raise TypeError(msg)
+
+    attrs = getattr(cls, "__attrs_attrs__", None)
+
+    if attrs is None:
+        if generic_base is not None:
+            attrs = getattr(generic_base, "__attrs_attrs__", None)
+            if attrs is not None:
+                # Even though this is global state, stick it on here to speed
+                # it up. We rely on `cls` being cached for this to be
+                # efficient.
+                cls.__attrs_attrs__ = attrs
+                return attrs
+        msg = f"{cls!r} is not an attrs-decorated class."
+        raise NotAnAttrsClassError(msg)
+
+    return attrs


 def fields_dict(cls):
@@ -520,7 +1761,14 @@ def fields_dict(cls):

     .. versionadded:: 18.1.0
     """
-    pass
+    if not isinstance(cls, type):
+        msg = "Passed object must be a class."
+        raise TypeError(msg)
+    attrs = getattr(cls, "__attrs_attrs__", None)
+    if attrs is None:
+        msg = f"{cls!r} is not an attrs-decorated class."
+        raise NotAnAttrsClassError(msg)
+    return {a.name: a for a in attrs}


 def validate(inst):
@@ -532,80 +1780,459 @@ def validate(inst):
     Args:
         inst: Instance of a class with *attrs* attributes.
     """
-    pass
+    if _config._run_validators is False:
+        return
+
+    for a in fields(inst.__class__):
+        v = a.validator
+        if v is not None:
+            v(inst, a, getattr(inst, a.name))


 def _is_slot_attr(a_name, base_attr_map):
     """
     Check if the attribute name comes from a slot class.
     """
-    pass
-
-
-def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) ->str:
+    cls = base_attr_map.get(a_name)
+    return cls and "__slots__" in cls.__dict__
+
+
+def _make_init(
+    cls,
+    attrs,
+    pre_init,
+    pre_init_has_args,
+    post_init,
+    frozen,
+    slots,
+    cache_hash,
+    base_attr_map,
+    is_exc,
+    cls_on_setattr,
+    attrs_init,
+):
+    has_cls_on_setattr = (
+        cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+    )
+
+    if frozen and has_cls_on_setattr:
+        msg = "Frozen classes can't use on_setattr."
+        raise ValueError(msg)
+
+    needs_cached_setattr = cache_hash or frozen
+    filtered_attrs = []
+    attr_dict = {}
+    for a in attrs:
+        if not a.init and a.default is NOTHING:
+            continue
+
+        filtered_attrs.append(a)
+        attr_dict[a.name] = a
+
+        if a.on_setattr is not None:
+            if frozen is True:
+                msg = "Frozen classes can't use on_setattr."
+                raise ValueError(msg)
+
+            needs_cached_setattr = True
+        elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+            needs_cached_setattr = True
+
+    unique_filename = _generate_unique_filename(cls, "init")
+
+    script, globs, annotations = _attrs_to_init_script(
+        filtered_attrs,
+        frozen,
+        slots,
+        pre_init,
+        pre_init_has_args,
+        post_init,
+        cache_hash,
+        base_attr_map,
+        is_exc,
+        needs_cached_setattr,
+        has_cls_on_setattr,
+        "__attrs_init__" if attrs_init else "__init__",
+    )
+    if cls.__module__ in sys.modules:
+        # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+        globs.update(sys.modules[cls.__module__].__dict__)
+
+    globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+    if needs_cached_setattr:
+        # Save the lookup overhead in __init__ if we need to circumvent
+        # setattr hooks.
+        globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
+
+    init = _make_method(
+        "__attrs_init__" if attrs_init else "__init__",
+        script,
+        unique_filename,
+        globs,
+    )
+    init.__annotations__ = annotations
+
+    return init
+
+
+def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
     """
     Use the cached object.setattr to set *attr_name* to *value_var*.
     """
-    pass
+    return f"_setattr('{attr_name}', {value_var})"


-def _setattr_with_converter(attr_name: str, value_var: str, has_on_setattr:
-    bool, converter: Converter) ->str:
+def _setattr_with_converter(
+    attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
     """
     Use the cached object.setattr to set *attr_name* to *value_var*, but run
     its converter first.
     """
-    pass
+    return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"


-def _assign(attr_name: str, value: str, has_on_setattr: bool) ->str:
+def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
     """
     Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
     relegate to _setattr.
     """
-    pass
+    if has_on_setattr:
+        return _setattr(attr_name, value, True)
+
+    return f"self.{attr_name} = {value}"


-def _assign_with_converter(attr_name: str, value_var: str, has_on_setattr:
-    bool, converter: Converter) ->str:
+def _assign_with_converter(
+    attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
     """
     Unless *attr_name* has an on_setattr hook, use normal assignment after
     conversion. Otherwise relegate to _setattr_with_converter.
     """
-    pass
+    if has_on_setattr:
+        return _setattr_with_converter(attr_name, value_var, True, converter)

+    return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"

-def _determine_setters(frozen: bool, slots: bool, base_attr_map: dict[str,
-    type]):
+
+def _determine_setters(
+    frozen: bool, slots: bool, base_attr_map: dict[str, type]
+):
     """
     Determine the correct setter functions based on whether a class is frozen
     and/or slotted.
     """
-    pass
-
-
-def _attrs_to_init_script(attrs: list[Attribute], is_frozen: bool,
-    is_slotted: bool, call_pre_init: bool, pre_init_has_args: bool,
-    call_post_init: bool, does_cache_hash: bool, base_attr_map: dict[str,
-    type], is_exc: bool, needs_cached_setattr: bool, has_cls_on_setattr:
-    bool, method_name: str) ->tuple[str, dict, dict]:
+    if frozen is True:
+        if slots is True:
+            return (), _setattr, _setattr_with_converter
+
+        # Dict frozen classes assign directly to __dict__.
+        # But only if the attribute doesn't come from an ancestor slot
+        # class.
+        # Note _inst_dict will be used again below if cache_hash is True
+
+        def fmt_setter(
+            attr_name: str, value_var: str, has_on_setattr: bool
+        ) -> str:
+            if _is_slot_attr(attr_name, base_attr_map):
+                return _setattr(attr_name, value_var, has_on_setattr)
+
+            return f"_inst_dict['{attr_name}'] = {value_var}"
+
+        def fmt_setter_with_converter(
+            attr_name: str,
+            value_var: str,
+            has_on_setattr: bool,
+            converter: Converter,
+        ) -> str:
+            if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+                return _setattr_with_converter(
+                    attr_name, value_var, has_on_setattr, converter
+                )
+
+            return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
+
+        return (
+            ("_inst_dict = self.__dict__",),
+            fmt_setter,
+            fmt_setter_with_converter,
+        )
+
+    # Not frozen -- we can just assign directly.
+    return (), _assign, _assign_with_converter
+
+
+def _attrs_to_init_script(
+    attrs: list[Attribute],
+    is_frozen: bool,
+    is_slotted: bool,
+    call_pre_init: bool,
+    pre_init_has_args: bool,
+    call_post_init: bool,
+    does_cache_hash: bool,
+    base_attr_map: dict[str, type],
+    is_exc: bool,
+    needs_cached_setattr: bool,
+    has_cls_on_setattr: bool,
+    method_name: str,
+) -> tuple[str, dict, dict]:
     """
     Return a script of an initializer for *attrs*, a dict of globals, and
     annotations for the initializer.

     The globals are required by the generated script.
     """
-    pass
-
-
-def _default_init_alias_for(name: str) ->str:
+    lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
+
+    if needs_cached_setattr:
+        lines.append(
+            # Circumvent the __setattr__ descriptor to save one lookup per
+            # assignment. Note _setattr will be used again below if
+            # does_cache_hash is True.
+            "_setattr = _cached_setattr_get(self)"
+        )
+
+    extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
+        is_frozen, is_slotted, base_attr_map
+    )
+    lines.extend(extra_lines)
+
+    args = []
+    kw_only_args = []
+    attrs_to_validate = []
+
+    # This is a dictionary of names to validator and converter callables.
+    # Injecting this into __init__ globals lets us avoid lookups.
+    names_for_globals = {}
+    annotations = {"return": None}
+
+    for a in attrs:
+        if a.validator:
+            attrs_to_validate.append(a)
+
+        attr_name = a.name
+        has_on_setattr = a.on_setattr is not None or (
+            a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+        )
+        # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
+        # explicitly provided
+        arg_name = a.alias
+
+        has_factory = isinstance(a.default, Factory)
+        maybe_self = "self" if has_factory and a.default.takes_self else ""
+
+        if a.converter and not isinstance(a.converter, Converter):
+            converter = Converter(a.converter)
+        else:
+            converter = a.converter
+
+        if a.init is False:
+            if has_factory:
+                init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+                if converter is not None:
+                    lines.append(
+                        fmt_setter_with_converter(
+                            attr_name,
+                            init_factory_name + f"({maybe_self})",
+                            has_on_setattr,
+                            converter,
+                        )
+                    )
+                    names_for_globals[converter._get_global_name(a.name)] = (
+                        converter.converter
+                    )
+                else:
+                    lines.append(
+                        fmt_setter(
+                            attr_name,
+                            init_factory_name + f"({maybe_self})",
+                            has_on_setattr,
+                        )
+                    )
+                names_for_globals[init_factory_name] = a.default.factory
+            elif converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name,
+                        f"attr_dict['{attr_name}'].default",
+                        has_on_setattr,
+                        converter,
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(
+                    fmt_setter(
+                        attr_name,
+                        f"attr_dict['{attr_name}'].default",
+                        has_on_setattr,
+                    )
+                )
+        elif a.default is not NOTHING and not has_factory:
+            arg = f"{arg_name}=attr_dict['{attr_name}'].default"
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+
+            if converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+        elif has_factory:
+            arg = f"{arg_name}=NOTHING"
+            if a.kw_only:
+                kw_only_args.append(arg)
+            else:
+                args.append(arg)
+            lines.append(f"if {arg_name} is not NOTHING:")
+
+            init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+            if converter is not None:
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                lines.append("else:")
+                lines.append(
+                    "    "
+                    + fmt_setter_with_converter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                        converter,
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(
+                    "    " + fmt_setter(attr_name, arg_name, has_on_setattr)
+                )
+                lines.append("else:")
+                lines.append(
+                    "    "
+                    + fmt_setter(
+                        attr_name,
+                        init_factory_name + "(" + maybe_self + ")",
+                        has_on_setattr,
+                    )
+                )
+            names_for_globals[init_factory_name] = a.default.factory
+        else:
+            if a.kw_only:
+                kw_only_args.append(arg_name)
+            else:
+                args.append(arg_name)
+
+            if converter is not None:
+                lines.append(
+                    fmt_setter_with_converter(
+                        attr_name, arg_name, has_on_setattr, converter
+                    )
+                )
+                names_for_globals[converter._get_global_name(a.name)] = (
+                    converter.converter
+                )
+            else:
+                lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+        if a.init is True:
+            if a.type is not None and converter is None:
+                annotations[arg_name] = a.type
+            elif converter is not None and converter._first_param_type:
+                # Use the type from the converter if present.
+                annotations[arg_name] = converter._first_param_type
+
+    if attrs_to_validate:  # we can skip this if there are no validators.
+        names_for_globals["_config"] = _config
+        lines.append("if _config._run_validators is True:")
+        for a in attrs_to_validate:
+            val_name = "__attr_validator_" + a.name
+            attr_name = "__attr_" + a.name
+            lines.append(f"    {val_name}(self, {attr_name}, self.{a.name})")
+            names_for_globals[val_name] = a.validator
+            names_for_globals[attr_name] = a
+
+    if call_post_init:
+        lines.append("self.__attrs_post_init__()")
+
+    # Because this is set only after __attrs_post_init__ is called, a crash
+    # will result if post-init tries to access the hash code.  This seemed
+    # preferable to setting this beforehand, in which case alteration to field
+    # values during post-init combined with post-init accessing the hash code
+    # would result in silent bugs.
+    if does_cache_hash:
+        if is_frozen:
+            if is_slotted:
+                init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
+            else:
+                init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
+        else:
+            init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
+        lines.append(init_hash_cache)
+
+    # For exceptions we rely on BaseException.__init__ for proper
+    # initialization.
+    if is_exc:
+        vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
+
+        lines.append(f"BaseException.__init__(self, {vals})")
+
+    args = ", ".join(args)
+    pre_init_args = args
+    if kw_only_args:
+        # leading comma & kw_only args
+        args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
+        pre_init_kw_only_args = ", ".join(
+            [
+                f"{kw_arg_name}={kw_arg_name}"
+                # We need to remove the defaults from the kw_only_args.
+                for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
+            ]
+        )
+        pre_init_args += ", " if pre_init_args else ""
+        pre_init_args += pre_init_kw_only_args
+
+    if call_pre_init and pre_init_has_args:
+        # If pre init method has arguments, pass same arguments as `__init__`.
+        lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
+
+    # Python 3.7 doesn't allow backslashes in f strings.
+    NL = "\n    "
+    return (
+        f"""def {method_name}(self, {args}):
+    {NL.join(lines) if lines else 'pass'}
+""",
+        names_for_globals,
+        annotations,
+    )
+
+
+def _default_init_alias_for(name: str) -> str:
     """
     The default __init__ parameter name for a field.

     This performs private-name adjustment via leading-unscore stripping,
     and is the default value of Attribute.alias if not provided.
     """
-    pass
+
+    return name.lstrip("_")


 class Attribute:
@@ -650,39 +2277,116 @@ class Attribute:

     For the full version history of the fields, see `attr.ib`.
     """
-    __slots__ = ('name', 'default', 'validator', 'repr', 'eq', 'eq_key',
-        'order', 'order_key', 'hash', 'init', 'metadata', 'type',
-        'converter', 'kw_only', 'inherited', 'on_setattr', 'alias')

-    def __init__(self, name, default, validator, repr, cmp, hash, init,
-        inherited, metadata=None, type=None, converter=None, kw_only=False,
-        eq=None, eq_key=None, order=None, order_key=None, on_setattr=None,
-        alias=None):
-        eq, eq_key, order, order_key = _determine_attrib_eq_order(cmp, 
-            eq_key or eq, order_key or order, True)
+    __slots__ = (
+        "name",
+        "default",
+        "validator",
+        "repr",
+        "eq",
+        "eq_key",
+        "order",
+        "order_key",
+        "hash",
+        "init",
+        "metadata",
+        "type",
+        "converter",
+        "kw_only",
+        "inherited",
+        "on_setattr",
+        "alias",
+    )
+
+    def __init__(
+        self,
+        name,
+        default,
+        validator,
+        repr,
+        cmp,  # XXX: unused, remove along with other cmp code.
+        hash,
+        init,
+        inherited,
+        metadata=None,
+        type=None,
+        converter=None,
+        kw_only=False,
+        eq=None,
+        eq_key=None,
+        order=None,
+        order_key=None,
+        on_setattr=None,
+        alias=None,
+    ):
+        eq, eq_key, order, order_key = _determine_attrib_eq_order(
+            cmp, eq_key or eq, order_key or order, True
+        )
+
+        # Cache this descriptor here to speed things up later.
         bound_setattr = _OBJ_SETATTR.__get__(self)
-        bound_setattr('name', name)
-        bound_setattr('default', default)
-        bound_setattr('validator', validator)
-        bound_setattr('repr', repr)
-        bound_setattr('eq', eq)
-        bound_setattr('eq_key', eq_key)
-        bound_setattr('order', order)
-        bound_setattr('order_key', order_key)
-        bound_setattr('hash', hash)
-        bound_setattr('init', init)
-        bound_setattr('converter', converter)
-        bound_setattr('metadata', types.MappingProxyType(dict(metadata)) if
-            metadata else _EMPTY_METADATA_SINGLETON)
-        bound_setattr('type', type)
-        bound_setattr('kw_only', kw_only)
-        bound_setattr('inherited', inherited)
-        bound_setattr('on_setattr', on_setattr)
-        bound_setattr('alias', alias)
+
+        # Despite the big red warning, people *do* instantiate `Attribute`
+        # themselves.
+        bound_setattr("name", name)
+        bound_setattr("default", default)
+        bound_setattr("validator", validator)
+        bound_setattr("repr", repr)
+        bound_setattr("eq", eq)
+        bound_setattr("eq_key", eq_key)
+        bound_setattr("order", order)
+        bound_setattr("order_key", order_key)
+        bound_setattr("hash", hash)
+        bound_setattr("init", init)
+        bound_setattr("converter", converter)
+        bound_setattr(
+            "metadata",
+            (
+                types.MappingProxyType(dict(metadata))  # Shallow copy
+                if metadata
+                else _EMPTY_METADATA_SINGLETON
+            ),
+        )
+        bound_setattr("type", type)
+        bound_setattr("kw_only", kw_only)
+        bound_setattr("inherited", inherited)
+        bound_setattr("on_setattr", on_setattr)
+        bound_setattr("alias", alias)

     def __setattr__(self, name, value):
         raise FrozenInstanceError()

+    @classmethod
+    def from_counting_attr(cls, name, ca, type=None):
+        # type holds the annotated value. deal with conflicts:
+        if type is None:
+            type = ca.type
+        elif ca.type is not None:
+            msg = "Type annotation and type argument cannot both be present"
+            raise ValueError(msg)
+        inst_dict = {
+            k: getattr(ca, k)
+            for k in Attribute.__slots__
+            if k
+            not in (
+                "name",
+                "validator",
+                "default",
+                "type",
+                "inherited",
+            )  # exclude methods and deprecated alias
+        }
+        return cls(
+            name=name,
+            validator=ca._validator,
+            default=ca._default,
+            type=type,
+            cmp=None,
+            inherited=False,
+            **inst_dict,
+        )
+
+    # Don't use attrs.evolve since fields(Attribute) doesn't work
     def evolve(self, **changes):
         """
         Copy *self* and apply *changes*.
@@ -694,14 +2398,21 @@ class Attribute:

         .. versionadded:: 20.3.0
         """
-        pass
+        new = copy.copy(self)
+
+        new._setattrs(changes.items())
+
+        return new

+    # Don't use _add_pickle since fields(Attribute) doesn't work
     def __getstate__(self):
         """
         Play nice with pickle.
         """
-        return tuple(getattr(self, name) if name != 'metadata' else dict(
-            self.metadata) for name in self.__slots__)
+        return tuple(
+            getattr(self, name) if name != "metadata" else dict(self.metadata)
+            for name in self.__slots__
+        )

     def __setstate__(self, state):
         """
@@ -709,14 +2420,46 @@ class Attribute:
         """
         self._setattrs(zip(self.__slots__, state))

-
-_a = [Attribute(name=name, default=NOTHING, validator=None, repr=True, cmp=
-    None, eq=True, order=False, hash=name != 'metadata', init=True,
-    inherited=False, alias=_default_init_alias_for(name)) for name in
-    Attribute.__slots__]
-Attribute = _add_hash(_add_eq(_add_repr(Attribute, attrs=_a), attrs=[a for
-    a in _a if a.name != 'inherited']), attrs=[a for a in _a if a.hash and 
-    a.name != 'inherited'])
+    def _setattrs(self, name_values_pairs):
+        bound_setattr = _OBJ_SETATTR.__get__(self)
+        for name, value in name_values_pairs:
+            if name != "metadata":
+                bound_setattr(name, value)
+            else:
+                bound_setattr(
+                    name,
+                    (
+                        types.MappingProxyType(dict(value))
+                        if value
+                        else _EMPTY_METADATA_SINGLETON
+                    ),
+                )
+
+
+_a = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=(name != "metadata"),
+        init=True,
+        inherited=False,
+        alias=_default_init_alias_for(name),
+    )
+    for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+    _add_eq(
+        _add_repr(Attribute, attrs=_a),
+        attrs=[a for a in _a if a.name != "inherited"],
+    ),
+    attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)


 class _CountingAttr:
@@ -727,24 +2470,95 @@ class _CountingAttr:
     *Internal* data structure of the attrs library.  Running into is most
     likely the result of a bug like a forgotten `@attr.s` decorator.
     """
-    __slots__ = ('counter', '_default', 'repr', 'eq', 'eq_key', 'order',
-        'order_key', 'hash', 'init', 'metadata', '_validator', 'converter',
-        'type', 'kw_only', 'on_setattr', 'alias')
-    __attrs_attrs__ = *tuple(Attribute(name=name, alias=
-        _default_init_alias_for(name), default=NOTHING, validator=None,
-        repr=True, cmp=None, hash=True, init=True, kw_only=False, eq=True,
-        eq_key=None, order=False, order_key=None, inherited=False,
-        on_setattr=None) for name in ('counter', '_default', 'repr', 'eq',
-        'order', 'hash', 'init', 'on_setattr', 'alias')), Attribute(name=
-        'metadata', alias='metadata', default=None, validator=None, repr=
-        True, cmp=None, hash=False, init=True, kw_only=False, eq=True,
-        eq_key=None, order=False, order_key=None, inherited=False,
-        on_setattr=None)
+
+    __slots__ = (
+        "counter",
+        "_default",
+        "repr",
+        "eq",
+        "eq_key",
+        "order",
+        "order_key",
+        "hash",
+        "init",
+        "metadata",
+        "_validator",
+        "converter",
+        "type",
+        "kw_only",
+        "on_setattr",
+        "alias",
+    )
+    __attrs_attrs__ = (
+        *tuple(
+            Attribute(
+                name=name,
+                alias=_default_init_alias_for(name),
+                default=NOTHING,
+                validator=None,
+                repr=True,
+                cmp=None,
+                hash=True,
+                init=True,
+                kw_only=False,
+                eq=True,
+                eq_key=None,
+                order=False,
+                order_key=None,
+                inherited=False,
+                on_setattr=None,
+            )
+            for name in (
+                "counter",
+                "_default",
+                "repr",
+                "eq",
+                "order",
+                "hash",
+                "init",
+                "on_setattr",
+                "alias",
+            )
+        ),
+        Attribute(
+            name="metadata",
+            alias="metadata",
+            default=None,
+            validator=None,
+            repr=True,
+            cmp=None,
+            hash=False,
+            init=True,
+            kw_only=False,
+            eq=True,
+            eq_key=None,
+            order=False,
+            order_key=None,
+            inherited=False,
+            on_setattr=None,
+        ),
+    )
     cls_counter = 0

-    def __init__(self, default, validator, repr, cmp, hash, init, converter,
-        metadata, type, kw_only, eq, eq_key, order, order_key, on_setattr,
-        alias):
+    def __init__(
+        self,
+        default,
+        validator,
+        repr,
+        cmp,
+        hash,
+        init,
+        converter,
+        metadata,
+        type,
+        kw_only,
+        eq,
+        eq_key,
+        order,
+        order_key,
+        on_setattr,
+        alias,
+    ):
         _CountingAttr.cls_counter += 1
         self.counter = _CountingAttr.cls_counter
         self._default = default
@@ -771,7 +2585,11 @@ class _CountingAttr:

         .. versionadded:: 17.1.0
         """
-        pass
+        if self._validator is None:
+            self._validator = meth
+        else:
+            self._validator = and_(self._validator, meth)
+        return meth

     def default(self, meth):
         """
@@ -784,7 +2602,12 @@ class _CountingAttr:

         .. versionadded:: 17.1.0
         """
-        pass
+        if self._default is not NOTHING:
+            raise DefaultAlreadySetError()
+
+        self._default = Factory(meth, takes_self=True)
+
+        return meth


 _CountingAttr = _add_eq(_add_repr(_CountingAttr))
@@ -808,7 +2631,8 @@ class Factory:

     .. versionadded:: 17.1.0  *takes_self*
     """
-    __slots__ = 'factory', 'takes_self'
+
+    __slots__ = ("factory", "takes_self")

     def __init__(self, factory, takes_self=False):
         self.factory = factory
@@ -828,9 +2652,22 @@ class Factory:
             setattr(self, name, value)


-_f = [Attribute(name=name, default=NOTHING, validator=None, repr=True, cmp=
-    None, eq=True, order=False, hash=True, init=True, inherited=False) for
-    name in Factory.__slots__]
+_f = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=True,
+        init=True,
+        inherited=False,
+    )
+    for name in Factory.__slots__
+]
+
 Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)


@@ -854,53 +2691,78 @@ class Converter:

     .. versionadded:: 24.1.0
     """
-    __slots__ = ('converter', 'takes_self', 'takes_field',
-        '_first_param_type', '_global_name', '__call__')
+
+    __slots__ = (
+        "converter",
+        "takes_self",
+        "takes_field",
+        "_first_param_type",
+        "_global_name",
+        "__call__",
+    )

     def __init__(self, converter, *, takes_self=False, takes_field=False):
         self.converter = converter
         self.takes_self = takes_self
         self.takes_field = takes_field
+
         ex = _AnnotationExtractor(converter)
         self._first_param_type = ex.get_first_param_type()
+
         if not (self.takes_self or self.takes_field):
             self.__call__ = lambda value, _, __: self.converter(value)
         elif self.takes_self and not self.takes_field:
-            self.__call__ = lambda value, instance, __: self.converter(value,
-                instance)
+            self.__call__ = lambda value, instance, __: self.converter(
+                value, instance
+            )
         elif not self.takes_self and self.takes_field:
-            self.__call__ = lambda value, __, field: self.converter(value,
-                field)
+            self.__call__ = lambda value, __, field: self.converter(
+                value, field
+            )
         else:
-            self.__call__ = lambda value, instance, field: self.converter(value
-                , instance, field)
+            self.__call__ = lambda value, instance, field: self.converter(
+                value, instance, field
+            )
+
         rt = ex.get_return_type()
         if rt is not None:
-            self.__call__.__annotations__['return'] = rt
+            self.__call__.__annotations__["return"] = rt

     @staticmethod
-    def _get_global_name(attr_name: str) ->str:
+    def _get_global_name(attr_name: str) -> str:
         """
         Return the name that a converter for an attribute name *attr_name*
         would have.
         """
-        pass
+        return f"__attr_converter_{attr_name}"

-    def _fmt_converter_call(self, attr_name: str, value_var: str) ->str:
+    def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
         """
         Return a string that calls the converter for an attribute name
         *attr_name* and the value in variable named *value_var* according to
         `self.takes_self` and `self.takes_field`.
         """
-        pass
+        if not (self.takes_self or self.takes_field):
+            return f"{self._get_global_name(attr_name)}({value_var})"
+
+        if self.takes_self and self.takes_field:
+            return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
+
+        if self.takes_self:
+            return f"{self._get_global_name(attr_name)}({value_var}, self)"
+
+        return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"

     def __getstate__(self):
         """
         Return a dict containing only converter and takes_self -- the rest gets
         computed when loading.
         """
-        return {'converter': self.converter, 'takes_self': self.takes_self,
-            'takes_field': self.takes_field}
+        return {
+            "converter": self.converter,
+            "takes_self": self.takes_self,
+            "takes_field": self.takes_field,
+        }

     def __setstate__(self, state):
         """
@@ -909,24 +2771,39 @@ class Converter:
         self.__init__(**state)


-_f = [Attribute(name=name, default=NOTHING, validator=None, repr=True, cmp=
-    None, eq=True, order=False, hash=True, init=True, inherited=False) for
-    name in ('converter', 'takes_self', 'takes_field')]
-Converter = _add_hash(_add_eq(_add_repr(Converter, attrs=_f), attrs=_f),
-    attrs=_f)
-
-
-def make_class(name, attrs, bases=(object,), class_body=None, **
-    attributes_arguments):
-    """
+_f = [
+    Attribute(
+        name=name,
+        default=NOTHING,
+        validator=None,
+        repr=True,
+        cmp=None,
+        eq=True,
+        order=False,
+        hash=True,
+        init=True,
+        inherited=False,
+    )
+    for name in ("converter", "takes_self", "takes_field")
+]
+
+Converter = _add_hash(
+    _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
+)
+
+
+def make_class(
+    name, attrs, bases=(object,), class_body=None, **attributes_arguments
+):
+    r"""
     A quick way to create a new class called *name* with *attrs*.

     Args:
         name (str): The name for the new class.

         attrs( list | dict):
-            A list of names or a dictionary of mappings of names to `attr.ib`\\
-            s / `attrs.field`\\ s.
+            A list of names or a dictionary of mappings of names to `attr.ib`\
+            s / `attrs.field`\ s.

             The order is deduced from the order of the names or attributes
             inside *attrs*.  Otherwise the order of the definition of the
@@ -946,7 +2823,61 @@ def make_class(name, attrs, bases=(object,), class_body=None, **
     .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
     .. versionchanged:: 23.2.0 *class_body*
     """
-    pass
+    if isinstance(attrs, dict):
+        cls_dict = attrs
+    elif isinstance(attrs, (list, tuple)):
+        cls_dict = {a: attrib() for a in attrs}
+    else:
+        msg = "attrs argument must be a dict or a list."
+        raise TypeError(msg)
+
+    pre_init = cls_dict.pop("__attrs_pre_init__", None)
+    post_init = cls_dict.pop("__attrs_post_init__", None)
+    user_init = cls_dict.pop("__init__", None)
+
+    body = {}
+    if class_body is not None:
+        body.update(class_body)
+    if pre_init is not None:
+        body["__attrs_pre_init__"] = pre_init
+    if post_init is not None:
+        body["__attrs_post_init__"] = post_init
+    if user_init is not None:
+        body["__init__"] = user_init
+
+    type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+    # For pickling to work, the __module__ variable needs to be set to the
+    # frame where the class is created.  Bypass this step in environments where
+    # sys._getframe is not defined (Jython for example) or sys._getframe is not
+    # defined for arguments greater than 0 (IronPython).
+    with contextlib.suppress(AttributeError, ValueError):
+        type_.__module__ = sys._getframe(1).f_globals.get(
+            "__name__", "__main__"
+        )
+
+    # We do it here for proper warnings with meaningful stacklevel.
+    cmp = attributes_arguments.pop("cmp", None)
+    (
+        attributes_arguments["eq"],
+        attributes_arguments["order"],
+    ) = _determine_attrs_eq_order(
+        cmp,
+        attributes_arguments.get("eq"),
+        attributes_arguments.get("order"),
+        True,
+    )
+
+    cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
+    # Only add type annotations now or "_attrs()" will complain:
+    cls.__annotations__ = {
+        k: v.type for k, v in cls_dict.items() if v.type is not None
+    }
+    return cls
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.


 @attrs(slots=True, unsafe_hash=True)
@@ -954,6 +2885,7 @@ class _AndValidator:
     """
     Compose many validators to a single one.
     """
+
     _validators = attrib()

     def __call__(self, inst, attr, value):
@@ -973,7 +2905,15 @@ def and_(*validators):

     .. versionadded:: 17.1.0
     """
-    pass
+    vals = []
+    for validator in validators:
+        vals.extend(
+            validator._validators
+            if isinstance(validator, _AndValidator)
+            else [validator]
+        )
+
+    return _AndValidator(tuple(vals))


 def pipe(*converters):
@@ -991,4 +2931,30 @@ def pipe(*converters):

     .. versionadded:: 20.1.0
     """
-    pass
+
+    def pipe_converter(val, inst, field):
+        for c in converters:
+            val = c(val, inst, field) if isinstance(c, Converter) else c(val)
+
+        return val
+
+    if not converters:
+        # If the converter list is empty, pipe_converter is the identity.
+        A = typing.TypeVar("A")
+        pipe_converter.__annotations__.update({"val": A, "return": A})
+    else:
+        # Get parameter type from first converter.
+        t = _AnnotationExtractor(converters[0]).get_first_param_type()
+        if t:
+            pipe_converter.__annotations__["val"] = t
+
+        last = converters[-1]
+        if not PY_3_11_PLUS and isinstance(last, Converter):
+            last = last.__call__
+
+        # Get return type from last converter.
+        rt = _AnnotationExtractor(last).get_return_type()
+        if rt:
+            pipe_converter.__annotations__["return"] = rt
+
+    return Converter(pipe_converter, takes_self=True, takes_field=True)
diff --git a/src/attr/_next_gen.py b/src/attr/_next_gen.py
index 067dc2f..dbb65cc 100644
--- a/src/attr/_next_gen.py
+++ b/src/attr/_next_gen.py
@@ -1,21 +1,51 @@
+# SPDX-License-Identifier: MIT
+
 """
 These are keyword-only APIs that call `attr.s` and `attr.ib` with different
 default values.
 """
+
+
 from functools import partial
+
 from . import setters
 from ._funcs import asdict as _asdict
 from ._funcs import astuple as _astuple
-from ._make import _DEFAULT_ON_SETATTR, NOTHING, _frozen_setattrs, attrib, attrs
+from ._make import (
+    _DEFAULT_ON_SETATTR,
+    NOTHING,
+    _frozen_setattrs,
+    attrib,
+    attrs,
+)
 from .exceptions import UnannotatedAttributeError


-def define(maybe_cls=None, *, these=None, repr=None, unsafe_hash=None, hash
-    =None, init=None, slots=True, frozen=False, weakref_slot=True, str=
-    False, auto_attribs=None, kw_only=False, cache_hash=False, auto_exc=
-    True, eq=None, order=False, auto_detect=True, getstate_setstate=None,
-    on_setattr=None, field_transformer=None, match_args=True):
-    """
+def define(
+    maybe_cls=None,
+    *,
+    these=None,
+    repr=None,
+    unsafe_hash=None,
+    hash=None,
+    init=None,
+    slots=True,
+    frozen=False,
+    weakref_slot=True,
+    str=False,
+    auto_attribs=None,
+    kw_only=False,
+    cache_hash=False,
+    auto_exc=True,
+    eq=None,
+    order=False,
+    auto_detect=True,
+    getstate_setstate=None,
+    on_setattr=None,
+    field_transformer=None,
+    match_args=True,
+):
+    r"""
     A class decorator that adds :term:`dunder methods` according to
     :term:`fields <field>` specified using :doc:`type annotations <types>`,
     `field()` calls, or the *these* argument.
@@ -99,7 +129,7 @@ def define(maybe_cls=None, *, these=None, repr=None, unsafe_hash=None, hash

         str (bool):
             Create a ``__str__`` method that is identical to ``__repr__``. This
-            is usually not necessary except for `Exception`\\ s.
+            is usually not necessary except for `Exception`\ s.

         eq (bool | None):
             If True or None (default), add ``__eq__`` and ``__ne__`` methods
@@ -149,7 +179,7 @@ def define(maybe_cls=None, *, these=None, repr=None, unsafe_hash=None, hash

                 - Our documentation on `hashing`,
                 - Python's documentation on `object.__hash__`,
-                - and the `GitHub issue that led to the default \\ behavior
+                - and the `GitHub issue that led to the default \ behavior
                   <https://github.com/python-attrs/attrs/issues/136>`_ for more
                   details.

@@ -243,9 +273,9 @@ def define(maybe_cls=None, *, these=None, repr=None, unsafe_hash=None, hash
             If left None, it will guess:

             1. If any attributes are annotated and no unannotated
-               `attrs.field`\\ s are found, it assumes *auto_attribs=True*.
+               `attrs.field`\ s are found, it assumes *auto_attribs=True*.
             2. Otherwise it assumes *auto_attribs=False* and tries to collect
-               `attrs.field`\\ s.
+               `attrs.field`\ s.

             If *attrs* decides to look at type annotations, **all** fields
             **must** be annotated. If *attrs* encounters a field that is set to
@@ -313,16 +343,95 @@ def define(maybe_cls=None, *, these=None, repr=None, unsafe_hash=None, hash
           for backwards-compatibility have been removed.

     """
-    pass
+
+    def do_it(cls, auto_attribs):
+        return attrs(
+            maybe_cls=cls,
+            these=these,
+            repr=repr,
+            hash=hash,
+            unsafe_hash=unsafe_hash,
+            init=init,
+            slots=slots,
+            frozen=frozen,
+            weakref_slot=weakref_slot,
+            str=str,
+            auto_attribs=auto_attribs,
+            kw_only=kw_only,
+            cache_hash=cache_hash,
+            auto_exc=auto_exc,
+            eq=eq,
+            order=order,
+            auto_detect=auto_detect,
+            collect_by_mro=True,
+            getstate_setstate=getstate_setstate,
+            on_setattr=on_setattr,
+            field_transformer=field_transformer,
+            match_args=match_args,
+        )
+
+    def wrap(cls):
+        """
+        Making this a wrapper ensures this code runs during class creation.
+
+        We also ensure that frozen-ness of classes is inherited.
+        """
+        nonlocal frozen, on_setattr
+
+        had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+        # By default, mutable classes convert & validate on setattr.
+        if frozen is False and on_setattr is None:
+            on_setattr = _DEFAULT_ON_SETATTR
+
+        # However, if we subclass a frozen class, we inherit the immutability
+        # and disable on_setattr.
+        for base_cls in cls.__bases__:
+            if base_cls.__setattr__ is _frozen_setattrs:
+                if had_on_setattr:
+                    msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
+                    raise ValueError(msg)
+
+                on_setattr = setters.NO_OP
+                break
+
+        if auto_attribs is not None:
+            return do_it(cls, auto_attribs)
+
+        try:
+            return do_it(cls, True)
+        except UnannotatedAttributeError:
+            return do_it(cls, False)
+
+    # maybe_cls's type depends on the usage of the decorator.  It's a class
+    # if it's used as `@attrs` but `None` if used as `@attrs()`.
+    if maybe_cls is None:
+        return wrap
+
+    return wrap(maybe_cls)


 mutable = define
 frozen = partial(define, frozen=True, on_setattr=None)


-def field(*, default=NOTHING, validator=None, repr=True, hash=None, init=
-    True, metadata=None, type=None, converter=None, factory=None, kw_only=
-    False, eq=None, order=None, on_setattr=None, alias=None):
+def field(
+    *,
+    default=NOTHING,
+    validator=None,
+    repr=True,
+    hash=None,
+    init=True,
+    metadata=None,
+    type=None,
+    converter=None,
+    factory=None,
+    kw_only=False,
+    eq=None,
+    order=None,
+    on_setattr=None,
+    alias=None,
+):
     """
     Create a new :term:`field` / :term:`attribute` on a class.

@@ -476,7 +585,22 @@ def field(*, default=NOTHING, validator=None, repr=True, hash=None, init=

        `attr.ib`
     """
-    pass
+    return attrib(
+        default=default,
+        validator=validator,
+        repr=repr,
+        hash=hash,
+        init=init,
+        metadata=metadata,
+        type=type,
+        converter=converter,
+        factory=factory,
+        kw_only=kw_only,
+        eq=eq,
+        order=order,
+        on_setattr=on_setattr,
+        alias=alias,
+    )


 def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
@@ -486,7 +610,13 @@ def asdict(inst, *, recurse=True, filter=None, value_serializer=None):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _asdict(
+        inst=inst,
+        recurse=recurse,
+        filter=filter,
+        value_serializer=value_serializer,
+        retain_collection_types=True,
+    )


 def astuple(inst, *, recurse=True, filter=None):
@@ -496,4 +626,6 @@ def astuple(inst, *, recurse=True, filter=None):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _astuple(
+        inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+    )
diff --git a/src/attr/_version_info.py b/src/attr/_version_info.py
index 6691f84..51a1312 100644
--- a/src/attr/_version_info.py
+++ b/src/attr/_version_info.py
@@ -1,4 +1,8 @@
+# SPDX-License-Identifier: MIT
+
+
 from functools import total_ordering
+
 from ._funcs import astuple
 from ._make import attrib, attrs

@@ -25,6 +29,7 @@ class VersionInfo:

     .. versionadded:: 19.2
     """
+
     year = attrib(type=int)
     minor = attrib(type=int)
     micro = attrib(type=int)
@@ -35,7 +40,13 @@ class VersionInfo:
         """
         Parse *s* and return a _VersionInfo.
         """
-        pass
+        v = s.split(".")
+        if len(v) == 3:
+            v.append("final")
+
+        return cls(
+            year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+        )

     def _ensure_tuple(self, other):
         """
@@ -44,13 +55,24 @@ class VersionInfo:
         Returns a possibly transformed *other* and ourselves as a tuple of
         the same length as *other*.
         """
-        pass
+
+        if self.__class__ is other.__class__:
+            other = astuple(other)
+
+        if not isinstance(other, tuple):
+            raise NotImplementedError
+
+        if not (1 <= len(other) <= 4):
+            raise NotImplementedError
+
+        return astuple(self)[: len(other)], other

     def __eq__(self, other):
         try:
             us, them = self._ensure_tuple(other)
         except NotImplementedError:
             return NotImplemented
+
         return us == them

     def __lt__(self, other):
@@ -58,4 +80,7 @@ class VersionInfo:
             us, them = self._ensure_tuple(other)
         except NotImplementedError:
             return NotImplemented
+
+        # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+        # have to do anything special with releaselevel for now.
         return us < them
diff --git a/src/attr/converters.py b/src/attr/converters.py
index df5c697..9238311 100644
--- a/src/attr/converters.py
+++ b/src/attr/converters.py
@@ -1,10 +1,22 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly useful converters.
 """
+
+
 import typing
+
 from ._compat import _AnnotationExtractor
 from ._make import NOTHING, Factory, pipe
-__all__ = ['default_if_none', 'optional', 'pipe', 'to_bool']
+
+
+__all__ = [
+    "default_if_none",
+    "optional",
+    "pipe",
+    "to_bool",
+]


 def optional(converter):
@@ -21,7 +33,23 @@ def optional(converter):

     .. versionadded:: 17.1.0
     """
-    pass
+
+    def optional_converter(val):
+        if val is None:
+            return None
+        return converter(val)
+
+    xtr = _AnnotationExtractor(converter)
+
+    t = xtr.get_first_param_type()
+    if t:
+        optional_converter.__annotations__["val"] = typing.Optional[t]
+
+    rt = xtr.get_return_type()
+    if rt:
+        optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+    return optional_converter


 def default_if_none(default=NOTHING, factory=None):
@@ -50,7 +78,37 @@ def default_if_none(default=NOTHING, factory=None):

     .. versionadded:: 18.2.0
     """
-    pass
+    if default is NOTHING and factory is None:
+        msg = "Must pass either `default` or `factory`."
+        raise TypeError(msg)
+
+    if default is not NOTHING and factory is not None:
+        msg = "Must pass either `default` or `factory` but not both."
+        raise TypeError(msg)
+
+    if factory is not None:
+        default = Factory(factory)
+
+    if isinstance(default, Factory):
+        if default.takes_self:
+            msg = "`takes_self` is not supported by default_if_none."
+            raise ValueError(msg)
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default.factory()
+
+    else:
+
+        def default_if_none_converter(val):
+            if val is not None:
+                return val
+
+            return default
+
+    return default_if_none_converter


 def to_bool(val):
@@ -81,4 +139,13 @@ def to_bool(val):

     .. versionadded:: 21.3.0
     """
-    pass
+    if isinstance(val, str):
+        val = val.lower()
+
+    if val in (True, "true", "t", "yes", "y", "on", "1", 1):
+        return True
+    if val in (False, "false", "f", "no", "n", "off", "0", 0):
+        return False
+
+    msg = f"Cannot convert value to bool: {val!r}"
+    raise ValueError(msg)
diff --git a/src/attr/exceptions.py b/src/attr/exceptions.py
index bbe9547..3b7abb8 100644
--- a/src/attr/exceptions.py
+++ b/src/attr/exceptions.py
@@ -1,4 +1,7 @@
+# SPDX-License-Identifier: MIT
+
 from __future__ import annotations
+
 from typing import ClassVar


@@ -12,6 +15,7 @@ class FrozenError(AttributeError):

     .. versionadded:: 20.1.0
     """
+
     msg = "can't set attribute"
     args: ClassVar[tuple[str]] = [msg]

diff --git a/src/attr/filters.py b/src/attr/filters.py
index c3c2781..689b170 100644
--- a/src/attr/filters.py
+++ b/src/attr/filters.py
@@ -1,6 +1,9 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
 """
+
 from ._make import Attribute


@@ -8,7 +11,11 @@ def _split_what(what):
     """
     Returns a tuple of `frozenset`s of classes and attributes.
     """
-    pass
+    return (
+        frozenset(cls for cls in what if isinstance(cls, type)),
+        frozenset(cls for cls in what if isinstance(cls, str)),
+        frozenset(cls for cls in what if isinstance(cls, Attribute)),
+    )


 def include(*what):
@@ -26,7 +33,16 @@ def include(*what):

     .. versionchanged:: 23.1.0 Accept strings with field names.
     """
-    pass
+    cls, names, attrs = _split_what(what)
+
+    def include_(attribute, value):
+        return (
+            value.__class__ in cls
+            or attribute.name in names
+            or attribute in attrs
+        )
+
+    return include_


 def exclude(*what):
@@ -44,4 +60,13 @@ def exclude(*what):

     .. versionchanged:: 23.3.0 Accept field name string as input argument
     """
-    pass
+    cls, names, attrs = _split_what(what)
+
+    def exclude_(attribute, value):
+        return not (
+            value.__class__ in cls
+            or attribute.name in names
+            or attribute in attrs
+        )
+
+    return exclude_
diff --git a/src/attr/setters.py b/src/attr/setters.py
index 1bb6a12..a9ce016 100644
--- a/src/attr/setters.py
+++ b/src/attr/setters.py
@@ -1,6 +1,9 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly used hooks for on_setattr.
 """
+
 from . import _config
 from .exceptions import FrozenAttributeError

@@ -11,7 +14,16 @@ def pipe(*setters):

     .. versionadded:: 20.1.0
     """
-    pass
+
+    def wrapped_pipe(instance, attrib, new_value):
+        rv = new_value
+
+        for setter in setters:
+            rv = setter(instance, attrib, rv)
+
+        return rv
+
+    return wrapped_pipe


 def frozen(_, __, ___):
@@ -20,7 +32,7 @@ def frozen(_, __, ___):

     .. versionadded:: 20.1.0
     """
-    pass
+    raise FrozenAttributeError()


 def validate(instance, attrib, new_value):
@@ -29,7 +41,16 @@ def validate(instance, attrib, new_value):

     .. versionadded:: 20.1.0
     """
-    pass
+    if _config._run_validators is False:
+        return new_value
+
+    v = attrib.validator
+    if not v:
+        return new_value
+
+    v(instance, attrib, new_value)
+
+    return new_value


 def convert(instance, attrib, new_value):
@@ -39,7 +60,20 @@ def convert(instance, attrib, new_value):

     .. versionadded:: 20.1.0
     """
-    pass
+    c = attrib.converter
+    if c:
+        # This can be removed once we drop 3.8 and use attrs.Converter instead.
+        from ._make import Converter
+
+        if not isinstance(c, Converter):
+            return c(new_value)
+
+        return c(new_value, instance, attrib)
+
+    return new_value


+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# Sphinx's autodata stopped working, so the docstring is inlined in the API
+# docs.
 NO_OP = object()
diff --git a/src/attr/validators.py b/src/attr/validators.py
index d98967a..8a56717 100644
--- a/src/attr/validators.py
+++ b/src/attr/validators.py
@@ -1,18 +1,43 @@
+# SPDX-License-Identifier: MIT
+
 """
 Commonly useful validators.
 """
+
+
 import operator
 import re
+
 from contextlib import contextmanager
 from re import Pattern
+
 from ._config import get_run_validators, set_run_validators
 from ._make import _AndValidator, and_, attrib, attrs
 from .converters import default_if_none
 from .exceptions import NotCallableError
-__all__ = ['and_', 'deep_iterable', 'deep_mapping', 'disabled', 'ge',
-    'get_disabled', 'gt', 'in_', 'instance_of', 'is_callable', 'le', 'lt',
-    'matches_re', 'max_len', 'min_len', 'not_', 'optional', 'or_',
-    'set_disabled']
+
+
+__all__ = [
+    "and_",
+    "deep_iterable",
+    "deep_mapping",
+    "disabled",
+    "ge",
+    "get_disabled",
+    "gt",
+    "in_",
+    "instance_of",
+    "is_callable",
+    "le",
+    "lt",
+    "matches_re",
+    "max_len",
+    "min_len",
+    "not_",
+    "optional",
+    "or_",
+    "set_disabled",
+]


 def set_disabled(disabled):
@@ -30,7 +55,7 @@ def set_disabled(disabled):

     .. versionadded:: 21.3.0
     """
-    pass
+    set_run_validators(not disabled)


 def get_disabled():
@@ -42,7 +67,7 @@ def get_disabled():

     .. versionadded:: 21.3.0
     """
-    pass
+    return not get_run_validators()


 @contextmanager
@@ -56,7 +81,11 @@ def disabled():

     .. versionadded:: 21.3.0
     """
-    pass
+    set_run_validators(False)
+    try:
+        yield
+    finally:
+        set_run_validators(True)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -68,13 +97,16 @@ class _InstanceOfValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if not isinstance(value, self.type):
-            msg = (
-                f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
-                )
-            raise TypeError(msg, attr, self.type, value)
+            msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
+            raise TypeError(
+                msg,
+                attr,
+                self.type,
+                value,
+            )

     def __repr__(self):
-        return f'<instance_of validator for type {self.type!r}>'
+        return f"<instance_of validator for type {self.type!r}>"


 def instance_of(type):
@@ -91,7 +123,7 @@ def instance_of(type):
             With a human readable error message, the attribute (of type
             `attrs.Attribute`), the expected type, and the value it got.
     """
-    pass
+    return _InstanceOfValidator(type)


 @attrs(repr=False, frozen=True, slots=True)
@@ -104,17 +136,20 @@ class _MatchesReValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if not self.match_func(value):
-            msg = (
-                f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
-                )
-            raise ValueError(msg, attr, self.pattern, value)
+            msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
+            raise ValueError(
+                msg,
+                attr,
+                self.pattern,
+                value,
+            )

     def __repr__(self):
-        return f'<matches_re validator for pattern {self.pattern!r}>'
+        return f"<matches_re validator for pattern {self.pattern!r}>"


 def matches_re(regex, flags=0, func=None):
-    """
+    r"""
     A validator that raises `ValueError` if the initializer is called with a
     string that doesn't match *regex*.

@@ -134,7 +169,31 @@ def matches_re(regex, flags=0, func=None):
     .. versionadded:: 19.2.0
     .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
     """
-    pass
+    valid_funcs = (re.fullmatch, None, re.search, re.match)
+    if func not in valid_funcs:
+        msg = "'func' must be one of {}.".format(
+            ", ".join(
+                sorted(e and e.__name__ or "None" for e in set(valid_funcs))
+            )
+        )
+        raise ValueError(msg)
+
+    if isinstance(regex, Pattern):
+        if flags:
+            msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
+            raise TypeError(msg)
+        pattern = regex
+    else:
+        pattern = re.compile(regex, flags)
+
+    if func is re.match:
+        match_func = pattern.match
+    elif func is re.search:
+        match_func = pattern.search
+    else:
+        match_func = pattern.fullmatch
+
+    return _MatchesReValidator(pattern, match_func)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -144,10 +203,11 @@ class _OptionalValidator:
     def __call__(self, inst, attr, value):
         if value is None:
             return
+
         self.validator(inst, attr, value)

     def __repr__(self):
-        return f'<optional validator for {self.validator!r} or None>'
+        return f"<optional validator for {self.validator!r} or None>"


 def optional(validator):
@@ -165,7 +225,10 @@ def optional(validator):
     .. versionchanged:: 17.1.0 *validator* can be a list of validators.
     .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
     """
-    pass
+    if isinstance(validator, (list, tuple)):
+        return _OptionalValidator(_AndValidator(validator))
+
+    return _OptionalValidator(validator)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -176,16 +239,20 @@ class _InValidator:
     def __call__(self, inst, attr, value):
         try:
             in_options = value in self.options
-        except TypeError:
+        except TypeError:  # e.g. `1 in "abc"`
             in_options = False
+
         if not in_options:
-            msg = (
-                f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
-                )
-            raise ValueError(msg, attr, self._original_options, value)
+            msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
+            raise ValueError(
+                msg,
+                attr,
+                self._original_options,
+                value,
+            )

     def __repr__(self):
-        return f'<in_ validator with options {self._original_options!r}>'
+        return f"<in_ validator with options {self._original_options!r}>"


 def in_(options):
@@ -216,25 +283,33 @@ def in_(options):
        *options* that are a list, dict, or a set are now transformed into a
        tuple to keep the validator hashable.
     """
-    pass
+    repr_options = options
+    if isinstance(options, (list, dict, set)):
+        options = tuple(options)
+
+    return _InValidator(options, repr_options)


 @attrs(repr=False, slots=False, unsafe_hash=True)
 class _IsCallableValidator:
-
     def __call__(self, inst, attr, value):
         """
         We use a callable class to be able to change the ``__repr__``.
         """
         if not callable(value):
             message = (
-                "'{name}' must be callable (got {value!r} that is a {actual!r})."
-                )
-            raise NotCallableError(msg=message.format(name=attr.name, value
-                =value, actual=value.__class__), value=value)
+                "'{name}' must be callable "
+                "(got {value!r} that is a {actual!r})."
+            )
+            raise NotCallableError(
+                msg=message.format(
+                    name=attr.name, value=value, actual=value.__class__
+                ),
+                value=value,
+            )

     def __repr__(self):
-        return '<is_callable validator>'
+        return "<is_callable validator>"


 def is_callable():
@@ -250,14 +325,15 @@ def is_callable():
             With a human readable error message containing the attribute
             (`attrs.Attribute`) name, and the value it got.
     """
-    pass
+    return _IsCallableValidator()


 @attrs(repr=False, slots=True, unsafe_hash=True)
 class _DeepIterable:
     member_validator = attrib(validator=is_callable())
-    iterable_validator = attrib(default=None, validator=optional(is_callable())
-        )
+    iterable_validator = attrib(
+        default=None, validator=optional(is_callable())
+    )

     def __call__(self, inst, attr, value):
         """
@@ -265,15 +341,20 @@ class _DeepIterable:
         """
         if self.iterable_validator is not None:
             self.iterable_validator(inst, attr, value)
+
         for member in value:
             self.member_validator(inst, attr, member)

     def __repr__(self):
-        iterable_identifier = ('' if self.iterable_validator is None else
-            f' {self.iterable_validator!r}')
+        iterable_identifier = (
+            ""
+            if self.iterable_validator is None
+            else f" {self.iterable_validator!r}"
+        )
         return (
-            f'<deep_iterable validator for{iterable_identifier} iterables of {self.member_validator!r}>'
-            )
+            f"<deep_iterable validator for{iterable_identifier}"
+            f" iterables of {self.member_validator!r}>"
+        )


 def deep_iterable(member_validator, iterable_validator=None):
@@ -291,7 +372,9 @@ def deep_iterable(member_validator, iterable_validator=None):

     .. versionadded:: 19.1.0
     """
-    pass
+    if isinstance(member_validator, (list, tuple)):
+        member_validator = and_(*member_validator)
+    return _DeepIterable(member_validator, iterable_validator)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -306,14 +389,13 @@ class _DeepMapping:
         """
         if self.mapping_validator is not None:
             self.mapping_validator(inst, attr, value)
+
         for key in value:
             self.key_validator(inst, attr, key)
             self.value_validator(inst, attr, value[key])

     def __repr__(self):
-        return (
-            f'<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>'
-            )
+        return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"


 def deep_mapping(key_validator, value_validator, mapping_validator=None):
@@ -333,7 +415,7 @@ def deep_mapping(key_validator, value_validator, mapping_validator=None):
     Raises:
         TypeError: if any sub-validators fail
     """
-    pass
+    return _DeepMapping(key_validator, value_validator, mapping_validator)


 @attrs(repr=False, frozen=True, slots=True)
@@ -347,13 +429,11 @@ class _NumberValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if not self.compare_func(value, self.bound):
-            msg = (
-                f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
-                )
+            msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
             raise ValueError(msg)

     def __repr__(self):
-        return f'<Validator for x {self.compare_op} {self.bound}>'
+        return f"<Validator for x {self.compare_op} {self.bound}>"


 def lt(val):
@@ -368,7 +448,7 @@ def lt(val):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _NumberValidator(val, "<", operator.lt)


 def le(val):
@@ -383,7 +463,7 @@ def le(val):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _NumberValidator(val, "<=", operator.le)


 def ge(val):
@@ -398,7 +478,7 @@ def ge(val):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _NumberValidator(val, ">=", operator.ge)


 def gt(val):
@@ -413,7 +493,7 @@ def gt(val):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _NumberValidator(val, ">", operator.gt)


 @attrs(repr=False, frozen=True, slots=True)
@@ -425,13 +505,11 @@ class _MaxLengthValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if len(value) > self.max_length:
-            msg = (
-                f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
-                )
+            msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
             raise ValueError(msg)

     def __repr__(self):
-        return f'<max_len validator for {self.max_length}>'
+        return f"<max_len validator for {self.max_length}>"


 def max_len(length):
@@ -444,7 +522,7 @@ def max_len(length):

     .. versionadded:: 21.3.0
     """
-    pass
+    return _MaxLengthValidator(length)


 @attrs(repr=False, frozen=True, slots=True)
@@ -456,13 +534,11 @@ class _MinLengthValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if len(value) < self.min_length:
-            msg = (
-                f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
-                )
+            msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
             raise ValueError(msg)

     def __repr__(self):
-        return f'<min_len validator for {self.min_length}>'
+        return f"<min_len validator for {self.min_length}>"


 def min_len(length):
@@ -475,7 +551,7 @@ def min_len(length):

     .. versionadded:: 22.1.0
     """
-    pass
+    return _MinLengthValidator(length)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -487,13 +563,16 @@ class _SubclassOfValidator:
         We use a callable class to be able to change the ``__repr__``.
         """
         if not issubclass(value, self.type):
-            msg = (
-                f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
-                )
-            raise TypeError(msg, attr, self.type, value)
+            msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
+            raise TypeError(
+                msg,
+                attr,
+                self.type,
+                value,
+            )

     def __repr__(self):
-        return f'<subclass_of validator for type {self.type!r}>'
+        return f"<subclass_of validator for type {self.type!r}>"


 def _subclass_of(type):
@@ -510,31 +589,44 @@ def _subclass_of(type):
             With a human readable error message, the attribute (of type
             `attrs.Attribute`), the expected type, and the value it got.
     """
-    pass
+    return _SubclassOfValidator(type)


 @attrs(repr=False, slots=True, unsafe_hash=True)
 class _NotValidator:
     validator = attrib()
-    msg = attrib(converter=default_if_none(
-        "not_ validator child '{validator!r}' did not raise a captured error"))
-    exc_types = attrib(validator=deep_iterable(member_validator=
-        _subclass_of(Exception), iterable_validator=instance_of(tuple)))
+    msg = attrib(
+        converter=default_if_none(
+            "not_ validator child '{validator!r}' "
+            "did not raise a captured error"
+        )
+    )
+    exc_types = attrib(
+        validator=deep_iterable(
+            member_validator=_subclass_of(Exception),
+            iterable_validator=instance_of(tuple),
+        ),
+    )

     def __call__(self, inst, attr, value):
         try:
             self.validator(inst, attr, value)
         except self.exc_types:
-            pass
+            pass  # suppress error to invert validity
         else:
-            raise ValueError(self.msg.format(validator=self.validator,
-                exc_types=self.exc_types), attr, self.validator, value,
-                self.exc_types)
+            raise ValueError(
+                self.msg.format(
+                    validator=self.validator,
+                    exc_types=self.exc_types,
+                ),
+                attr,
+                self.validator,
+                value,
+                self.exc_types,
+            )

     def __repr__(self):
-        return (
-            f'<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>'
-            )
+        return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"


 def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
@@ -566,7 +658,11 @@ def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):

     .. versionadded:: 22.2.0
     """
-    pass
+    try:
+        exc_types = tuple(exc_types)
+    except TypeError:
+        exc_types = (exc_types,)
+    return _NotValidator(validator, msg, exc_types)


 @attrs(repr=False, slots=True, unsafe_hash=True)
@@ -577,15 +673,16 @@ class _OrValidator:
         for v in self.validators:
             try:
                 v(inst, attr, value)
-            except Exception:
+            except Exception:  # noqa: BLE001, PERF203, S112
                 continue
             else:
                 return
-        msg = f'None of {self.validators!r} satisfied for value {value!r}'
+
+        msg = f"None of {self.validators!r} satisfied for value {value!r}"
         raise ValueError(msg)

     def __repr__(self):
-        return f'<or validator wrapping {self.validators!r}>'
+        return f"<or validator wrapping {self.validators!r}>"


 def or_(*validators):
@@ -607,4 +704,8 @@ def or_(*validators):

     .. versionadded:: 24.1.0
     """
-    pass
+    vals = []
+    for v in validators:
+        vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
+
+    return _OrValidator(tuple(vals))
diff --git a/src/attrs/converters.py b/src/attrs/converters.py
index 54e74a1..7821f6c 100644
--- a/src/attrs/converters.py
+++ b/src/attrs/converters.py
@@ -1 +1,3 @@
-from attr.converters import *
+# SPDX-License-Identifier: MIT
+
+from attr.converters import *  # noqa: F403
diff --git a/src/attrs/exceptions.py b/src/attrs/exceptions.py
index d6746ed..3323f9d 100644
--- a/src/attrs/exceptions.py
+++ b/src/attrs/exceptions.py
@@ -1 +1,3 @@
-from attr.exceptions import *
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import *  # noqa: F403
diff --git a/src/attrs/filters.py b/src/attrs/filters.py
index 2cd93f8..3080f48 100644
--- a/src/attrs/filters.py
+++ b/src/attrs/filters.py
@@ -1 +1,3 @@
-from attr.filters import *
+# SPDX-License-Identifier: MIT
+
+from attr.filters import *  # noqa: F403
diff --git a/src/attrs/setters.py b/src/attrs/setters.py
index 5cd6a8f..f3d73bb 100644
--- a/src/attrs/setters.py
+++ b/src/attrs/setters.py
@@ -1 +1,3 @@
-from attr.setters import *
+# SPDX-License-Identifier: MIT
+
+from attr.setters import *  # noqa: F403
diff --git a/src/attrs/validators.py b/src/attrs/validators.py
index 07710e6..037e124 100644
--- a/src/attrs/validators.py
+++ b/src/attrs/validators.py
@@ -1 +1,3 @@
-from attr.validators import *
+# SPDX-License-Identifier: MIT
+
+from attr.validators import *  # noqa: F403