Skip to content

back to Reference (Gold) summary

Reference (Gold): pylint

Pytest Summary for test tests

status count
passed 1825
skipped 333
xfailed 5
failed 13
total 2176
collected 2176

Failed pytests:

unittest_non_ascii_name.py::TestNonAsciiChecker::test_check_import[from_okay_module_import_bad_as_good_and_star]

unittest_non_ascii_name.py::TestNonAsciiChecker::test_check_import[from_okay_module_import_bad_as_good_and_star]
self = 
data = 'from foo.bar import functiøn as good, * #@', modname = '', path = None

    def _data_build(
        self, data: str, modname: str, path: str | None
    ) -> tuple[nodes.Module, rebuilder.TreeRebuilder]:
        """Build tree node from data and add some informations."""
        try:
>           node, parser_module = _parse_string(
                data, type_comments=True, modname=modname
            )

.venv/lib/python3.10/site-packages/astroid/builder.py:181: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
.venv/lib/python3.10/site-packages/astroid/builder.py:480: in _parse_string
    parsed = parser_module.parse(
.venv/lib/python3.10/site-packages/astroid/_ast.py:30: in parse
    return ast.parse(string, type_comments=type_comments)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

source = 'from foo.bar import functiøn as good, * #@\n', filename = ''
mode = 'exec'

    def parse(source, filename='', mode='exec', *,
              type_comments=False, feature_version=None):
        """
        Parse the source into an AST node.
        Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
        Pass type_comments=True to get back type comments where the syntax allows.
        """
        flags = PyCF_ONLY_AST
        if type_comments:
            flags |= PyCF_TYPE_COMMENTS
        if isinstance(feature_version, tuple):
            major, minor = feature_version  # Should be a 2-tuple.
            assert major == 3
            feature_version = minor
        elif feature_version is None:
            feature_version = -1
        # Else it should be an int giving the minor version for 3.x.
>       return compile(source, filename, mode, flags,
                       _feature_version=feature_version)
E         File "", line 1
E           from foo.bar import functiøn as good, * #@
E                                                 ^
E       SyntaxError: invalid syntax

/usr/lib/python3.10/ast.py:50: SyntaxError

The above exception was the direct cause of the following exception:

self = 
import_statement = 'from foo.bar import functiøn as good, *'
wrong_name = 'functiøn'

    @pytest.mark.parametrize(
        "import_statement, wrong_name",
        [
            pytest.param("import fürimma", "fürimma", id="bad_single_main_module"),
            pytest.param(
                "import fürimma as okay",
                None,
                id="bad_single_main_module_with_okay_alias",
            ),
            pytest.param(
                "import fürimma, pathlib",
                "fürimma",
                id="bad_single_main_module_with_stdlib_import",
            ),
            pytest.param(
                "import pathlib, os, foobar, fürimma",
                "fürimma",
                id="stdlib_with_bad_single_main_module",
            ),
            pytest.param(
                "import pathlib, os, foobar, sys as systëm",
                "systëm",
                id="stdlib_with_bad_alias",
            ),
            pytest.param(
                "import fürimma as okay, pathlib",
                None,
                id="bad_single_main_module_with_okay_alias_with_stdlib_import",
            ),
            pytest.param(
                "import fürimma.submodule", "fürimma.submodule", id="bad_main_module"
            ),
            pytest.param(
                "import fürimma.submodule as submodule",
                None,
                id="bad_main_module_with_okay_alias",
            ),
            pytest.param(
                "import main_module.fürimma", "main_module.fürimma", id="bad_submodule"
            ),
            pytest.param(
                "import main_module.fürimma as okay",
                None,
                id="bad_submodule_with_okay_alias",
            ),
            pytest.param(
                "import main_module.fürimma as not_økay",
                "not_økay",
                id="bad_submodule_with_bad_alias",
            ),
            pytest.param(
                "from foo.bar import function", None, id="from_okay_module_import_okay"
            ),
            pytest.param(
                "from foo.bär import function", None, id="from_bad_module_import_okay"
            ),
            pytest.param(
                "from foo.bar import functiøn",
                "functiøn",
                id="from_okay_module_import_bad",
            ),
            pytest.param(
                "from foo.bar import functiøn as function",
                None,
                id="from_okay_module_import_bad_as_good",
            ),
            pytest.param(
                "from foo.bär import functiøn as function",
                None,
                id="from_bad_module_import_bad_as_good",
            ),
            pytest.param(
                "from foo.bar import functiøn as føl",
                "føl",
                id="from_okay_module_import_bad_as_bad",
            ),
            pytest.param(
                "from foo.bar import functiøn as good, bäd",
                "bäd",
                id="from_okay_module_import_bad_as_good_and_bad",
            ),
            pytest.param(
                "from foo.bar import functiøn as good, bäd",
                "bäd",
                id="from_okay_module_import_bad_as_good_and_bad",
            ),
            pytest.param(
                "from foo.bar import functiøn as good, *",
                # We still have functiøn within our namespace and could detect this
                # But to do this properly we would need to check all `*` imports
                # -> Too much effort!
                "functiøn",
                id="from_okay_module_import_bad_as_good_and_star",
                marks=pytest.mark.xfail(
                    reason="We don't know what is imported when using star"
                ),
            ),
        ],
    )
    def test_check_import(self, import_statement: str, wrong_name: str | None) -> None:
        """We expect that for everything that user can change there is a message."""
>       node = astroid.extract_node(f"{import_statement} #@")

tests/checkers/unittest_non_ascii_name.py:266: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
.venv/lib/python3.10/site-packages/astroid/builder.py:446: in extract_node
    tree = parse(code, module_name=module_name)
.venv/lib/python3.10/site-packages/astroid/builder.py:303: in parse
    return builder.string_build(code, modname=module_name, path=path)
.venv/lib/python3.10/site-packages/astroid/builder.py:151: in string_build
    module, builder = self._data_build(data, modname, path)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
data = 'from foo.bar import functiøn as good, * #@', modname = '', path = None

    def _data_build(
        self, data: str, modname: str, path: str | None
    ) -> tuple[nodes.Module, rebuilder.TreeRebuilder]:
        """Build tree node from data and add some informations."""
        try:
            node, parser_module = _parse_string(
                data, type_comments=True, modname=modname
            )
        except (TypeError, ValueError, SyntaxError) as exc:
>           raise AstroidSyntaxError(
                "Parsing Python code failed:\n{error}",
                source=data,
                modname=modname,
                path=path,
                error=exc,
            ) from exc
E           astroid.exceptions.AstroidSyntaxError: Parsing Python code failed:
E           invalid syntax (, line 1)

.venv/lib/python3.10/site-packages/astroid/builder.py:185: AstroidSyntaxError

unittest_invalid_encoding.py::TestInvalidEncoding::test_invalid_unicode_files[pep_bidirectional_utf_16_le_no_bom.txt-2]

unittest_invalid_encoding.py::TestInvalidEncoding::test_invalid_unicode_files[pep_bidirectional_utf_16_le_no_bom.txt-2]
self = 
tmp_path = PosixPath('/tmp/pytest-of-root/pytest-0/test_invalid_unicode_files_pep0')
test_file = 'pep_bidirectional_utf_16_le_no_bom.txt', line_no = 2

    @pytest.mark.parametrize(
        "test_file, line_no",
        [
            pytest.param(
                "pep_bidirectional_utf_16_le_no_bom.txt",
                2,
                marks=pytest.mark.xfail(
                    reason="Python currently doesn't support UTF-16 code detection"
                ),
            ),
            pytest.param(
                "pep_bidirectional_utf_32_le_no_bom.txt",
                2,
                marks=pytest.mark.xfail(
                    reason="Python currently doesn't support UTF-32 code detection"
                ),
            ),
            # A note to the xfails above: If you open these files in an editor, you
            # only will see garbage if you don't select the correct encoding by hand.
            # Still maybe in the future the python way of defining the encoding could
            # work - even so it is unlikely as the first line is not ASCII and would
            # have to be treated differently anyway...
            ("pep_bidirectional_utf_16_bom.txt", 1),
            ("pep_bidirectional_utf_32_bom.txt", 1),
        ],
    )
    def test_invalid_unicode_files(
        self, tmp_path: Path, test_file: str, line_no: int
    ) -> None:
        test_file_path = UNICODE_TESTS / test_file
        target = shutil.copy(
            test_file_path, tmp_path / test_file.replace(".txt", ".py")
        )

        # Fake node as otherwise we get syntax errors etc...
        # So currently the UTF-16/UTF-32 tests does not work, as UTF-16 / UTF-32
        # is not really working at all in in Python, but checking it now already
        # is future save in case that changes....

        module = FakeNode(Path(target).read_bytes())

>       with self.assertAddsMessages(
            pylint.testutils.MessageTest(
                msg_id="invalid-unicode-codec",
                confidence=pylint.interfaces.HIGH,
                # node=module,
                line=line_no,
                end_line=1,
                col_offset=None,
                end_col_offset=None,
            ),
            pylint.testutils.MessageTest(
                msg_id="bidirectional-unicode",
                confidence=pylint.interfaces.HIGH,
                # node=module,
                line=line_no + 2,
                end_line=line_no + 2,
                col_offset=0,
                end_col_offset=37,
            ),
        ):

tests/checkers/unittest_unicode/unittest_invalid_encoding.py:70: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/usr/lib/python3.10/contextlib.py:142: in __exit__
    next(self.gen)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
ignore_position = False
messages = (MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', descript...me='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37))
got = [MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', descript...GH', description='Warning that is not based on inference result.'), col_offset=12, end_line=1, end_col_offset=13), ...]
no_msg = 'No message.'

    @contextlib.contextmanager
    def assertAddsMessages(
        self, *messages: MessageTest, ignore_position: bool = False
    ) -> Generator[None, None, None]:
        """Assert that exactly the given method adds the given messages.

        The list of messages must exactly match *all* the messages added by the
        method. Additionally, we check to see whether the args in each message can
        actually be substituted into the message string.

        Using the keyword argument `ignore_position`, all checks for position
        arguments (line, col_offset, ...) will be skipped. This can be used to
        just test messages for the correct node.
        """
        yield
        got = self.linter.release_messages()
        no_msg = "No message."
        expected = "\n".join(repr(m) for m in messages) or no_msg
        got_str = "\n".join(repr(m) for m in got) or no_msg
        msg = (
            "Expected messages did not match actual.\n"
            f"\nExpected:\n{expected}\n\nGot:\n{got_str}\n"
        )

>       assert len(messages) == len(got), msg
E       AssertionError: Expected messages did not match actual.
E         
E         Expected:
E         MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=None, end_line=1, end_col_offset=None)
E         MessageTest(msg_id='bidirectional-unicode', line=4, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37)
E         
E         Got:
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=2, end_line=1, end_col_offset=3)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=4, end_line=1, end_col_offset=5)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=6, end_line=1, end_col_offset=7)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=8, end_line=1, end_col_offset=9)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=10, end_line=1, end_col_offset=11)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=12, end_line=1, end_col_offset=13)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=14, end_line=1, end_col_offset=15)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=16, end_line=1, end_col_offset=17)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=18, end_line=1, end_col_offset=19)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=20, end_line=1, end_col_offset=21)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=22, end_line=1, end_col_offset=23)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=24, end_line=1, end_col_offset=25)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=26, end_line=1, end_col_offset=27)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=28, end_line=1, end_col_offset=29)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=30, end_line=1, end_col_offset=31)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=32, end_line=1, end_col_offset=33)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=34, end_line=1, end_col_offset=35)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=36, end_line=1, end_col_offset=37)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=38, end_line=1, end_col_offset=39)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=40, end_line=1, end_col_offset=41)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=42, end_line=1, end_col_offset=43)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=44, end_line=1, end_col_offset=45)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=46, end_line=1, end_col_offset=47)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=48, end_line=1, end_col_offset=49)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=50, end_line=1, end_col_offset=51)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=52, end_line=1, end_col_offset=53)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=54, end_line=1, end_col_offset=55)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=56, end_line=1, end_col_offset=57)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=58, end_line=1, end_col_offset=59)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=60, end_line=1, end_col_offset=61)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=62, end_line=1, end_col_offset=63)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=64, end_line=1, end_col_offset=65)
E         
E       assert 2 == 32
E        +  where 2 = len((MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', descript...me='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37)))
E        +  and   32 = len([MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', descript...GH', description='Warning that is not based on inference result.'), col_offset=12, end_line=1, end_col_offset=13), ...])

pylint/testutils/checker_test_case.py:65: AssertionError

unittest_invalid_encoding.py::TestInvalidEncoding::test_invalid_unicode_files[pep_bidirectional_utf_32_le_no_bom.txt-2]

unittest_invalid_encoding.py::TestInvalidEncoding::test_invalid_unicode_files[pep_bidirectional_utf_32_le_no_bom.txt-2]
self = 
tmp_path = PosixPath('/tmp/pytest-of-root/pytest-0/test_invalid_unicode_files_pep1')
test_file = 'pep_bidirectional_utf_32_le_no_bom.txt', line_no = 2

    @pytest.mark.parametrize(
        "test_file, line_no",
        [
            pytest.param(
                "pep_bidirectional_utf_16_le_no_bom.txt",
                2,
                marks=pytest.mark.xfail(
                    reason="Python currently doesn't support UTF-16 code detection"
                ),
            ),
            pytest.param(
                "pep_bidirectional_utf_32_le_no_bom.txt",
                2,
                marks=pytest.mark.xfail(
                    reason="Python currently doesn't support UTF-32 code detection"
                ),
            ),
            # A note to the xfails above: If you open these files in an editor, you
            # only will see garbage if you don't select the correct encoding by hand.
            # Still maybe in the future the python way of defining the encoding could
            # work - even so it is unlikely as the first line is not ASCII and would
            # have to be treated differently anyway...
            ("pep_bidirectional_utf_16_bom.txt", 1),
            ("pep_bidirectional_utf_32_bom.txt", 1),
        ],
    )
    def test_invalid_unicode_files(
        self, tmp_path: Path, test_file: str, line_no: int
    ) -> None:
        test_file_path = UNICODE_TESTS / test_file
        target = shutil.copy(
            test_file_path, tmp_path / test_file.replace(".txt", ".py")
        )

        # Fake node as otherwise we get syntax errors etc...
        # So currently the UTF-16/UTF-32 tests does not work, as UTF-16 / UTF-32
        # is not really working at all in in Python, but checking it now already
        # is future save in case that changes....

        module = FakeNode(Path(target).read_bytes())

>       with self.assertAddsMessages(
            pylint.testutils.MessageTest(
                msg_id="invalid-unicode-codec",
                confidence=pylint.interfaces.HIGH,
                # node=module,
                line=line_no,
                end_line=1,
                col_offset=None,
                end_col_offset=None,
            ),
            pylint.testutils.MessageTest(
                msg_id="bidirectional-unicode",
                confidence=pylint.interfaces.HIGH,
                # node=module,
                line=line_no + 2,
                end_line=line_no + 2,
                col_offset=0,
                end_col_offset=37,
            ),
        ):

tests/checkers/unittest_unicode/unittest_invalid_encoding.py:70: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/usr/lib/python3.10/contextlib.py:142: in __exit__
    next(self.gen)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
ignore_position = False
messages = (MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', descript...me='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37))
got = [MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', descript...HIGH', description='Warning that is not based on inference result.'), col_offset=8, end_line=1, end_col_offset=9), ...]
no_msg = 'No message.'

    @contextlib.contextmanager
    def assertAddsMessages(
        self, *messages: MessageTest, ignore_position: bool = False
    ) -> Generator[None, None, None]:
        """Assert that exactly the given method adds the given messages.

        The list of messages must exactly match *all* the messages added by the
        method. Additionally, we check to see whether the args in each message can
        actually be substituted into the message string.

        Using the keyword argument `ignore_position`, all checks for position
        arguments (line, col_offset, ...) will be skipped. This can be used to
        just test messages for the correct node.
        """
        yield
        got = self.linter.release_messages()
        no_msg = "No message."
        expected = "\n".join(repr(m) for m in messages) or no_msg
        got_str = "\n".join(repr(m) for m in got) or no_msg
        msg = (
            "Expected messages did not match actual.\n"
            f"\nExpected:\n{expected}\n\nGot:\n{got_str}\n"
        )

>       assert len(messages) == len(got), msg
E       AssertionError: Expected messages did not match actual.
E         
E         Expected:
E         MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=None, end_line=1, end_col_offset=None)
E         MessageTest(msg_id='bidirectional-unicode', line=4, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37)
E         
E         Got:
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=2, end_line=1, end_col_offset=3)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=3, end_line=1, end_col_offset=4)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=4, end_line=1, end_col_offset=5)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=6, end_line=1, end_col_offset=7)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=7, end_line=1, end_col_offset=8)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=8, end_line=1, end_col_offset=9)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=10, end_line=1, end_col_offset=11)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=11, end_line=1, end_col_offset=12)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=12, end_line=1, end_col_offset=13)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=14, end_line=1, end_col_offset=15)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=15, end_line=1, end_col_offset=16)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=16, end_line=1, end_col_offset=17)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=18, end_line=1, end_col_offset=19)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=19, end_line=1, end_col_offset=20)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=20, end_line=1, end_col_offset=21)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=22, end_line=1, end_col_offset=23)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=23, end_line=1, end_col_offset=24)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=24, end_line=1, end_col_offset=25)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=26, end_line=1, end_col_offset=27)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=27, end_line=1, end_col_offset=28)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=28, end_line=1, end_col_offset=29)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=30, end_line=1, end_col_offset=31)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=31, end_line=1, end_col_offset=32)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=32, end_line=1, end_col_offset=33)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=34, end_line=1, end_col_offset=35)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=35, end_line=1, end_col_offset=36)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=36, end_line=1, end_col_offset=37)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=38, end_line=1, end_col_offset=39)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=39, end_line=1, end_col_offset=40)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=40, end_line=1, end_col_offset=41)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=42, end_line=1, end_col_offset=43)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=43, end_line=1, end_col_offset=44)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=44, end_line=1, end_col_offset=45)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=46, end_line=1, end_col_offset=47)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=47, end_line=1, end_col_offset=48)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=48, end_line=1, end_col_offset=49)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=50, end_line=1, end_col_offset=51)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=51, end_line=1, end_col_offset=52)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=52, end_line=1, end_col_offset=53)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=54, end_line=1, end_col_offset=55)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=55, end_line=1, end_col_offset=56)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=56, end_line=1, end_col_offset=57)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=58, end_line=1, end_col_offset=59)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=59, end_line=1, end_col_offset=60)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=60, end_line=1, end_col_offset=61)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=62, end_line=1, end_col_offset=63)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=63, end_line=1, end_col_offset=64)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=64, end_line=1, end_col_offset=65)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=66, end_line=1, end_col_offset=67)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=67, end_line=1, end_col_offset=68)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=68, end_line=1, end_col_offset=69)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=70, end_line=1, end_col_offset=71)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=71, end_line=1, end_col_offset=72)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=72, end_line=1, end_col_offset=73)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=74, end_line=1, end_col_offset=75)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=75, end_line=1, end_col_offset=76)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=76, end_line=1, end_col_offset=77)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=78, end_line=1, end_col_offset=79)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=79, end_line=1, end_col_offset=80)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=80, end_line=1, end_col_offset=81)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=82, end_line=1, end_col_offset=83)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=83, end_line=1, end_col_offset=84)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=84, end_line=1, end_col_offset=85)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=86, end_line=1, end_col_offset=87)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=87, end_line=1, end_col_offset=88)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=88, end_line=1, end_col_offset=89)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=90, end_line=1, end_col_offset=91)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=91, end_line=1, end_col_offset=92)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=92, end_line=1, end_col_offset=93)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=94, end_line=1, end_col_offset=95)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=95, end_line=1, end_col_offset=96)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=96, end_line=1, end_col_offset=97)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=98, end_line=1, end_col_offset=99)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=99, end_line=1, end_col_offset=100)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=100, end_line=1, end_col_offset=101)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=102, end_line=1, end_col_offset=103)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=103, end_line=1, end_col_offset=104)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=104, end_line=1, end_col_offset=105)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=106, end_line=1, end_col_offset=107)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=107, end_line=1, end_col_offset=108)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=108, end_line=1, end_col_offset=109)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=110, end_line=1, end_col_offset=111)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=111, end_line=1, end_col_offset=112)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=112, end_line=1, end_col_offset=113)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=114, end_line=1, end_col_offset=115)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=115, end_line=1, end_col_offset=116)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=116, end_line=1, end_col_offset=117)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=118, end_line=1, end_col_offset=119)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=119, end_line=1, end_col_offset=120)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=120, end_line=1, end_col_offset=121)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=122, end_line=1, end_col_offset=123)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=123, end_line=1, end_col_offset=124)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=124, end_line=1, end_col_offset=125)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=126, end_line=1, end_col_offset=127)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=127, end_line=1, end_col_offset=128)
E         MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', description='Warning that is not based on inference result.'), col_offset=128, end_line=1, end_col_offset=129)
E         
E       assert 2 == 96
E        +  where 2 = len((MessageTest(msg_id='invalid-unicode-codec', line=2, node=None, args=None, confidence=Confidence(name='HIGH', descript...me='HIGH', description='Warning that is not based on inference result.'), col_offset=0, end_line=4, end_col_offset=37)))
E        +  and   96 = len([MessageTest(msg_id='invalid-character-nul', line=1, node=None, args=None, confidence=Confidence(name='HIGH', descript...HIGH', description='Warning that is not based on inference result.'), col_offset=8, end_line=1, end_col_offset=9), ...])

pylint/testutils/checker_test_case.py:65: AssertionError

unittest_invalid_encoding.py::TestInvalidEncoding::test__determine_codec[pep263_utf-16le_real]

unittest_invalid_encoding.py::TestInvalidEncoding::test__determine_codec[pep263_utf-16le_real]
self = 
content = b'#\x00 \x00c\x00o\x00d\x00i\x00n\x00g\x00:\x00 \x00U\x00T\x00F\x00-\x001\x006\x00l\x00e\x00'
codec = 'utf-16le', line = 1

    @pytest.mark.parametrize(
        "content, codec, line",
        [
            pytest.param(b"# Nothing", "utf-8", 1, id="default_utf8"),
            pytest.param(b"# coding: latin-1", "iso-8859-1", 1, id="pep263_latin1"),
            pytest.param(
                b"#!/usr/bin/python\n# coding: latin-1",
                "iso-8859-1",
                2,
                id="pep263_latin1_multiline",
            ),
            pytest.param(b"# coding: ascii", "ascii", 1, id="pep263_ascii"),
            pytest.param(b"# coding: UTF-8", "utf-8", 1, id="pep263_utf-8"),
            # This looks correct but is actually wrong. If you would try to decode
            # the byte to utf-16be it would fail
            pytest.param(
                b"# coding: UTF-16le", "utf-16le", 1, id="pep263_utf-16le_fake"
            ),
            # This contains no bom but a correct encoding line in none ascii
            # So this fails at the moment
            pytest.param(
                "# coding: UTF-16le".encode("utf-16le"),
                "utf-16le",
                1,
                id="pep263_utf-16le_real",
                marks=pytest.mark.xfail(reason="Currently not supported by Python"),
            ),
            *(
                pytest.param(bom, codec, 1, id=f"bom_{codec}")
                for codec, bom in (
                    ("utf-8", codecs.BOM_UTF8),
                    ("utf-16le", codecs.BOM_UTF16_LE),
                    ("utf-16be", codecs.BOM_UTF16_BE),
                    ("utf-32le", codecs.BOM_UTF32_LE),
                    ("utf-32be", codecs.BOM_UTF32_BE),
                )
            ),
        ],
    )
    def test__determine_codec(self, content: bytes, codec: str, line: int) -> None:
        """The codec determined should be exact no matter what we throw at it."""
>       assert self.checker._determine_codec(io.BytesIO(content)) == (codec, line)
E       AssertionError: assert ('utf-8', 1) == ('utf-16le', 1)
E         
E         At index 0 diff: 'utf-8' != 'utf-16le'
E         Use -v to get more diff

tests/checkers/unittest_unicode/unittest_invalid_encoding.py:133: AssertionError

test_functional.py::test_functional[inconsistent_returns]

test_functional.py::test_functional[inconsistent_returns]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "inconsistent_returns.py":
E       
E       Unexpected in testdata:
E        111: inconsistent-return-statements

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[invalid_all_format_valid_5]

test_functional.py::test_functional[invalid_all_format_valid_5]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "invalid_all_format_valid_5.py":
E       
E       Unexpected in testdata:
E          5: no-name-in-module

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[invalid_all_format_valid_6]

test_functional.py::test_functional[invalid_all_format_valid_6]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "invalid_all_format_valid_6.py":
E       
E       Unexpected in testdata:
E          5: no-name-in-module

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[recursion_error_3152]

test_functional.py::test_functional[recursion_error_3152]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "recursion_error_3152.py":
E       
E       Unexpected in testdata:
E          6: abstract-method

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[regression_4680]

test_functional.py::test_functional[regression_4680]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "regression_4680.py":
E       
E       Unexpected in testdata:
E          3: no-name-in-module
E         18: c-extension-no-member

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[unreachable]

test_functional.py::test_functional[unreachable]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "unreachable.py":
E       
E       Expected in testdata:
E         42: unreachable
E         80: unreachable

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[undefined_variable_typing]

test_functional.py::test_functional[undefined_variable_typing]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "undefined_variable_typing.py":
E       
E       Unexpected in testdata:
E         10: no-name-in-module

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[unused_import_assigned_to]

test_functional.py::test_functional[unused_import_assigned_to]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "unused_import_assigned_to.py":
E       
E       Unexpected in testdata:
E         25: c-extension-no-member

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[used_before_assignment]

test_functional.py::test_functional[used_before_assignment]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "used_before_assignment.py":
E       
E       Unexpected in testdata:
E        127: possibly-used-before-assignment

pylint/testutils/lint_module_test.py:147: AssertionError

test_functional.py::test_functional[used_before_assignment_else_return]

test_functional.py::test_functional[used_before_assignment_else_return]
self = 

    def runTest(self) -> None:
>       self._runTest()
E       AssertionError: Wrong message(s) raised for "used_before_assignment_else_return.py":
E       
E       Unexpected in testdata:
E         71: used-before-assignment

pylint/testutils/lint_module_test.py:147: AssertionError

test_regr.py::test_package[file_names1-]

test_regr.py::test_package[file_names1-]
finalize_linter = Checker 'main' (responsible for 'F0001', 'F0002', 'F0010', 'F0011', 'I0001', 'I0010', 'I0011', 'I0013', 'I0020', 'I0021', 'I0022', 'E0001', 'E0011', 'W0012', 'R0022', 'E0013', 'E0014', 'E0015')
file_names = ['precedence_test']
check = . at 0x7f244b8623b0>

    @pytest.mark.parametrize(
        "file_names, check",
        [
            (["package.__init__"], Equals("")),
            (["precedence_test"], Equals("")),
            (["import_package_subpackage_module"], Equals("")),
            (["pylint.checkers.__init__"], lambda x: "__path__" not in x),
            ([join(REGR_DATA, "classdoc_usage.py")], Equals("")),
            ([join(REGR_DATA, "module_global.py")], Equals("")),
            ([join(REGR_DATA, "decimal_inference.py")], Equals("")),
            ([join(REGR_DATA, "absimp", "string.py")], Equals("")),
            ([join(REGR_DATA, "bad_package")], lambda x: "Unused import missing" in x),
        ],
    )
    def test_package(
        finalize_linter: PyLinter, file_names: list[str], check: Callable[[str], bool]
    ) -> None:
        finalize_linter.check(file_names)
        finalize_linter.reporter = cast(  # Due to fixture
            testutils.GenericTestReporter, finalize_linter.reporter
        )
        got = finalize_linter.reporter.finalize().strip()
>       assert check(got)
E       assert False
E        +  where False = . at 0x7f244b8623b0>("E: 21: Module 'package.AudioTime' has no 'DECIMAL' member")

tests/test_regr.py:77: AssertionError

test_self.py::TestRunTC::test_abbreviations_are_not_supported

test_self.py::TestRunTC::test_abbreviations_are_not_supported
self = 

    @pytest.mark.xfail
    def test_abbreviations_are_not_supported(self) -> None:
        expected = "No module named --load-plugin"
>       self._test_output([".", "--load-plugin"], expected_output=expected)

tests/test_self.py:263: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
args = ['--rcfile=/testbed/pylint/testutils/testing_pylintrc', '.', '--load-plugin']
expected_output = 'No module named --load-plugin', unexpected_output = ''

    def _test_output(
        self, args: list[str], expected_output: str, unexpected_output: str = ""
    ) -> None:
        out = StringIO()
        args = _add_rcfile_default_pylintrc(args)
        self._run_pylint(args, out=out)
        actual_output = self._clean_paths(out.getvalue())
        expected_output = self._clean_paths(expected_output)
>       assert expected_output.strip() in actual_output.strip()
E       AssertionError: assert 'No module named --load-plugin' in 'Option --load-plugin expects a value'
E        +  where 'No module named --load-plugin' = ()
E        +    where  = 'No module named --load-plugin'.strip
E        +  and   'Option --load-plugin expects a value' = ()
E        +    where  = 'Option --load-plugin expects a value\n'.strip

tests/test_self.py:158: AssertionError

test_self.py::TestRunTC::test_do_not_import_files_from_local_directory[args0]

test_self.py::TestRunTC::test_do_not_import_files_from_local_directory[args0]
self = 
tmp_path = PosixPath('/tmp/pytest-of-root/pytest-0/test_do_not_import_files_from_0')
args = ['--disable=import-error,unused-import']

    @pytest.mark.parametrize(
        "args",
        [
            ["--disable=import-error,unused-import"],
            # Test with multiple jobs for 'hmac.py' for which we have a
            # CVE against: https://github.com/pylint-dev/pylint/issues/959
            ["-j2", "--disable=import-error,unused-import"],
        ],
    )
    def test_do_not_import_files_from_local_directory(
        self, tmp_path: Path, args: list[str]
    ) -> None:
        for path in ("astroid.py", "hmac.py"):
            file_path = tmp_path / path
            file_path.write_text("'Docstring'\nimport completely_unknown\n")
            pylint_call = [sys.executable, "-m", "pylint", *args, path]
            with _test_cwd(tmp_path):
                subprocess.check_output(pylint_call, cwd=str(tmp_path))
            new_python_path = os.environ.get("PYTHONPATH", "").strip(":")
            with _test_cwd(tmp_path), _test_environ_pythonpath(f"{new_python_path}:"):
                # Appending a colon to PYTHONPATH should not break path stripping
                # https://github.com/pylint-dev/pylint/issues/3636
>               subprocess.check_output(pylint_call, cwd=str(tmp_path))

tests/test_self.py:921: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/usr/lib/python3.10/subprocess.py:421: in check_output
    return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

input = None, capture_output = False, timeout = None, check = True
popenargs = (['/testbed/.venv/bin/python3', '-m', 'pylint', '--disable=import-error,unused-import', 'astroid.py'],)
kwargs = {'cwd': '/tmp/pytest-of-root/pytest-0/test_do_not_import_files_from_0', 'stdout': -1}
process = 
stdout = b'', stderr = None, retcode = 1

    def run(*popenargs,
            input=None, capture_output=False, timeout=None, check=False, **kwargs):
        """Run command with arguments and return a CompletedProcess instance.

        The returned instance will have attributes args, returncode, stdout and
        stderr. By default, stdout and stderr are not captured, and those attributes
        will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them,
        or pass capture_output=True to capture both.

        If check is True and the exit code was non-zero, it raises a
        CalledProcessError. The CalledProcessError object will have the return code
        in the returncode attribute, and output & stderr attributes if those streams
        were captured.

        If timeout is given, and the process takes too long, a TimeoutExpired
        exception will be raised.

        There is an optional argument "input", allowing you to
        pass bytes or a string to the subprocess's stdin.  If you use this argument
        you may not also use the Popen constructor's "stdin" argument, as
        it will be used internally.

        By default, all communication is in bytes, and therefore any "input" should
        be bytes, and the stdout and stderr will be bytes. If in text mode, any
        "input" should be a string, and stdout and stderr will be strings decoded
        according to locale encoding, or by "encoding" if set. Text mode is
        triggered by setting any of text, encoding, errors or universal_newlines.

        The other arguments are the same as for the Popen constructor.
        """
        if input is not None:
            if kwargs.get('stdin') is not None:
                raise ValueError('stdin and input arguments may not both be used.')
            kwargs['stdin'] = PIPE

        if capture_output:
            if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
                raise ValueError('stdout and stderr arguments may not be used '
                                 'with capture_output.')
            kwargs['stdout'] = PIPE
            kwargs['stderr'] = PIPE

        with Popen(*popenargs, **kwargs) as process:
            try:
                stdout, stderr = process.communicate(input, timeout=timeout)
            except TimeoutExpired as exc:
                process.kill()
                if _mswindows:
                    # Windows accumulates the output in a single blocking
                    # read() call run on child threads, with the timeout
                    # being done in a join() on those threads.  communicate()
                    # _after_ kill() is required to collect that and add it
                    # to the exception.
                    exc.stdout, exc.stderr = process.communicate()
                else:
                    # POSIX _communicate already populated the output so
                    # far into the TimeoutExpired exception.
                    process.wait()
                raise
            except:  # Including KeyboardInterrupt, communicate handled that.
                process.kill()
                # We don't call process.wait() as .__exit__ does that for us.
                raise
            retcode = process.poll()
            if check and retcode:
>               raise CalledProcessError(retcode, process.args,
                                         output=stdout, stderr=stderr)
E               subprocess.CalledProcessError: Command '['/testbed/.venv/bin/python3', '-m', 'pylint', '--disable=import-error,unused-import', 'astroid.py']' returned non-zero exit status 1.

/usr/lib/python3.10/subprocess.py:526: CalledProcessError

test_self.py::TestRunTC::test_do_not_import_files_from_local_directory[args1]

test_self.py::TestRunTC::test_do_not_import_files_from_local_directory[args1]
self = 
tmp_path = PosixPath('/tmp/pytest-of-root/pytest-0/test_do_not_import_files_from_1')
args = ['-j2', '--disable=import-error,unused-import']

    @pytest.mark.parametrize(
        "args",
        [
            ["--disable=import-error,unused-import"],
            # Test with multiple jobs for 'hmac.py' for which we have a
            # CVE against: https://github.com/pylint-dev/pylint/issues/959
            ["-j2", "--disable=import-error,unused-import"],
        ],
    )
    def test_do_not_import_files_from_local_directory(
        self, tmp_path: Path, args: list[str]
    ) -> None:
        for path in ("astroid.py", "hmac.py"):
            file_path = tmp_path / path
            file_path.write_text("'Docstring'\nimport completely_unknown\n")
            pylint_call = [sys.executable, "-m", "pylint", *args, path]
            with _test_cwd(tmp_path):
                subprocess.check_output(pylint_call, cwd=str(tmp_path))
            new_python_path = os.environ.get("PYTHONPATH", "").strip(":")
            with _test_cwd(tmp_path), _test_environ_pythonpath(f"{new_python_path}:"):
                # Appending a colon to PYTHONPATH should not break path stripping
                # https://github.com/pylint-dev/pylint/issues/3636
>               subprocess.check_output(pylint_call, cwd=str(tmp_path))

tests/test_self.py:921: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/usr/lib/python3.10/subprocess.py:421: in check_output
    return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

input = None, capture_output = False, timeout = None, check = True
popenargs = (['/testbed/.venv/bin/python3', '-m', 'pylint', '-j2', '--disable=import-error,unused-import', 'astroid.py'],)
kwargs = {'cwd': '/tmp/pytest-of-root/pytest-0/test_do_not_import_files_from_1', 'stdout': -1}
process = 
stdout = b'', stderr = None, retcode = 1

    def run(*popenargs,
            input=None, capture_output=False, timeout=None, check=False, **kwargs):
        """Run command with arguments and return a CompletedProcess instance.

        The returned instance will have attributes args, returncode, stdout and
        stderr. By default, stdout and stderr are not captured, and those attributes
        will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them,
        or pass capture_output=True to capture both.

        If check is True and the exit code was non-zero, it raises a
        CalledProcessError. The CalledProcessError object will have the return code
        in the returncode attribute, and output & stderr attributes if those streams
        were captured.

        If timeout is given, and the process takes too long, a TimeoutExpired
        exception will be raised.

        There is an optional argument "input", allowing you to
        pass bytes or a string to the subprocess's stdin.  If you use this argument
        you may not also use the Popen constructor's "stdin" argument, as
        it will be used internally.

        By default, all communication is in bytes, and therefore any "input" should
        be bytes, and the stdout and stderr will be bytes. If in text mode, any
        "input" should be a string, and stdout and stderr will be strings decoded
        according to locale encoding, or by "encoding" if set. Text mode is
        triggered by setting any of text, encoding, errors or universal_newlines.

        The other arguments are the same as for the Popen constructor.
        """
        if input is not None:
            if kwargs.get('stdin') is not None:
                raise ValueError('stdin and input arguments may not both be used.')
            kwargs['stdin'] = PIPE

        if capture_output:
            if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
                raise ValueError('stdout and stderr arguments may not be used '
                                 'with capture_output.')
            kwargs['stdout'] = PIPE
            kwargs['stderr'] = PIPE

        with Popen(*popenargs, **kwargs) as process:
            try:
                stdout, stderr = process.communicate(input, timeout=timeout)
            except TimeoutExpired as exc:
                process.kill()
                if _mswindows:
                    # Windows accumulates the output in a single blocking
                    # read() call run on child threads, with the timeout
                    # being done in a join() on those threads.  communicate()
                    # _after_ kill() is required to collect that and add it
                    # to the exception.
                    exc.stdout, exc.stderr = process.communicate()
                else:
                    # POSIX _communicate already populated the output so
                    # far into the TimeoutExpired exception.
                    process.wait()
                raise
            except:  # Including KeyboardInterrupt, communicate handled that.
                process.kill()
                # We don't call process.wait() as .__exit__ does that for us.
                raise
            retcode = process.poll()
            if check and retcode:
>               raise CalledProcessError(retcode, process.args,
                                         output=stdout, stderr=stderr)
E               subprocess.CalledProcessError: Command '['/testbed/.venv/bin/python3', '-m', 'pylint', '-j2', '--disable=import-error,unused-import', 'astroid.py']' returned non-zero exit status 1.

/usr/lib/python3.10/subprocess.py:526: CalledProcessError

Patch diff

diff --git a/pylint/__pkginfo__.py b/pylint/__pkginfo__.py
index 74986243c..729826c7e 100644
--- a/pylint/__pkginfo__.py
+++ b/pylint/__pkginfo__.py
@@ -1,18 +1,43 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """This module exists for compatibility reasons.

 It's updated via tbump, do not modify.
 """
+
 from __future__ import annotations
-__version__ = '3.2.6'

+__version__ = "3.2.6"

-def get_numversion_from_version(v: str) ->tuple[int, int, int]:
+
+def get_numversion_from_version(v: str) -> tuple[int, int, int]:
     """Kept for compatibility reason.

     See https://github.com/pylint-dev/pylint/issues/4399
     https://github.com/pylint-dev/pylint/issues/4420,
     """
-    pass
+    version = v.replace("pylint-", "")
+    result_version = []
+    for number in version.split(".")[0:3]:
+        try:
+            result_version.append(int(number))
+        except ValueError:
+            current_number = ""
+            for char in number:
+                if char.isdigit():
+                    current_number += char
+                else:
+                    break
+            try:
+                result_version.append(int(current_number))
+            except ValueError:
+                result_version.append(0)
+    while len(result_version) != 3:
+        result_version.append(0)
+
+    return tuple(result_version)  # type: ignore[return-value] # mypy can't infer the length


 numversion = get_numversion_from_version(__version__)
diff --git a/pylint/checkers/async.py b/pylint/checkers/async.py
index 03df31611..a8ee77302 100644
--- a/pylint/checkers/async.py
+++ b/pylint/checkers/async.py
@@ -1,23 +1,96 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for anything related to the async protocol (PEP 492)."""
+
 from __future__ import annotations
+
 import sys
 from typing import TYPE_CHECKING
+
 import astroid
 from astroid import nodes, util
+
 from pylint import checkers
 from pylint.checkers import utils as checker_utils
 from pylint.checkers.utils import decorated_with
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class AsyncChecker(checkers.BaseChecker):
-    name = 'async'
-    msgs = {'E1700': ('Yield inside async function',
-        'yield-inside-async-function',
-        'Used when an `yield` or `yield from` statement is found inside an async function.'
-        , {'minversion': (3, 5)}), 'E1701': (
-        "Async context manager '%s' doesn't implement __aenter__ and __aexit__."
-        , 'not-async-context-manager',
-        'Used when an async context manager is used with an object that does not implement the async context management protocol.'
-        , {'minversion': (3, 5)})}
+    name = "async"
+    msgs = {
+        "E1700": (
+            "Yield inside async function",
+            "yield-inside-async-function",
+            "Used when an `yield` or `yield from` statement is "
+            "found inside an async function.",
+            {"minversion": (3, 5)},
+        ),
+        "E1701": (
+            "Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
+            "not-async-context-manager",
+            "Used when an async context manager is used with an object "
+            "that does not implement the async context management protocol.",
+            {"minversion": (3, 5)},
+        ),
+    }
+
+    def open(self) -> None:
+        self._mixin_class_rgx = self.linter.config.mixin_class_rgx
+        self._async_generators = ["contextlib.asynccontextmanager"]
+
+    @checker_utils.only_required_for_messages("yield-inside-async-function")
+    def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> None:
+        for child in node.nodes_of_class(nodes.Yield):
+            if child.scope() is node and (
+                sys.version_info[:2] == (3, 5) or isinstance(child, nodes.YieldFrom)
+            ):
+                self.add_message("yield-inside-async-function", node=child)
+
+    @checker_utils.only_required_for_messages("not-async-context-manager")
+    def visit_asyncwith(self, node: nodes.AsyncWith) -> None:
+        for ctx_mgr, _ in node.items:
+            inferred = checker_utils.safe_infer(ctx_mgr)
+            if inferred is None or isinstance(inferred, util.UninferableBase):
+                continue
+
+            if isinstance(inferred, nodes.AsyncFunctionDef):
+                # Check if we are dealing with a function decorated
+                # with contextlib.asynccontextmanager.
+                if decorated_with(inferred, self._async_generators):
+                    continue
+            elif isinstance(inferred, astroid.bases.AsyncGenerator):
+                # Check if we are dealing with a function decorated
+                # with contextlib.asynccontextmanager.
+                if decorated_with(inferred.parent, self._async_generators):
+                    continue
+            else:
+                try:
+                    inferred.getattr("__aenter__")
+                    inferred.getattr("__aexit__")
+                except astroid.exceptions.NotFoundError:
+                    if isinstance(inferred, astroid.Instance):
+                        # If we do not know the bases of this class,
+                        # just skip it.
+                        if not checker_utils.has_known_bases(inferred):
+                            continue
+                        # Ignore mixin classes if they match the rgx option.
+                        if (
+                            "not-async-context-manager"
+                            in self.linter.config.ignored_checks_for_mixins
+                            and self._mixin_class_rgx.match(inferred.name)
+                        ):
+                            continue
+                else:
+                    continue
+            self.add_message(
+                "not-async-context-manager", node=node, args=(inferred.name,)
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(AsyncChecker(linter))
diff --git a/pylint/checkers/bad_chained_comparison.py b/pylint/checkers/bad_chained_comparison.py
index 09763c88d..2e1912160 100644
--- a/pylint/checkers/bad_chained_comparison.py
+++ b/pylint/checkers/bad_chained_comparison.py
@@ -1,20 +1,60 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-COMPARISON_OP = frozenset(('<', '<=', '>', '>=', '!=', '=='))
-IDENTITY_OP = frozenset(('is', 'is not'))
-MEMBERSHIP_OP = frozenset(('in', 'not in'))
+
+COMPARISON_OP = frozenset(("<", "<=", ">", ">=", "!=", "=="))
+IDENTITY_OP = frozenset(("is", "is not"))
+MEMBERSHIP_OP = frozenset(("in", "not in"))


 class BadChainedComparisonChecker(BaseChecker):
     """Checks for unintentional usage of chained comparison."""
-    name = 'bad-chained-comparison'
-    msgs = {'W3601': (
-        'Suspicious %s-part chained comparison using semantically incompatible operators (%s)'
-        , 'bad-chained-comparison',
-        'Used when there is a chained comparison where one expression is part of two comparisons that belong to different semantic groups ("<" does not mean the same thing as "is", chaining them in "0 < x is None" is probably a mistake).'
-        )}
+
+    name = "bad-chained-comparison"
+    msgs = {
+        "W3601": (
+            "Suspicious %s-part chained comparison using semantically incompatible operators (%s)",
+            "bad-chained-comparison",
+            "Used when there is a chained comparison where one expression is part "
+            "of two comparisons that belong to different semantic groups "
+            '("<" does not mean the same thing as "is", chaining them in '
+            '"0 < x is None" is probably a mistake).',
+        )
+    }
+
+    def _has_diff_semantic_groups(self, operators: list[str]) -> bool:
+        # Check if comparison operators are in the same semantic group
+        for semantic_group in (COMPARISON_OP, IDENTITY_OP, MEMBERSHIP_OP):
+            if operators[0] in semantic_group:
+                group = semantic_group
+        return not all(o in group for o in operators)
+
+    def visit_compare(self, node: nodes.Compare) -> None:
+        operators = sorted({op[0] for op in node.ops})
+        if self._has_diff_semantic_groups(operators):
+            num_parts = f"{len(node.ops)}"
+            incompatibles = (
+                ", ".join(f"'{o}'" for o in operators[:-1]) + f" and '{operators[-1]}'"
+            )
+            self.add_message(
+                "bad-chained-comparison",
+                node=node,
+                args=(num_parts, incompatibles),
+                confidence=HIGH,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(BadChainedComparisonChecker(linter))
diff --git a/pylint/checkers/base/basic_checker.py b/pylint/checkers/base/basic_checker.py
index 2c39eeb46..bd3190528 100644
--- a/pylint/checkers/base/basic_checker.py
+++ b/pylint/checkers/base/basic_checker.py
@@ -1,16 +1,25 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Basic checker for Python code."""
+
 from __future__ import annotations
+
 import collections
 import itertools
 from collections.abc import Iterator
 from typing import TYPE_CHECKING, Literal, cast
+
 import astroid
 from astroid import nodes, objects, util
+
 from pylint import utils as lint_utils
 from pylint.checkers import BaseChecker, utils
 from pylint.interfaces import HIGH, INFERENCE, Confidence
 from pylint.reporters.ureports import nodes as reporter_nodes
 from pylint.utils import LinterStats
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter

@@ -19,29 +28,77 @@ class _BasicChecker(BaseChecker):
     """Permits separating multiple checks with the same checker name into
     classes/file.
     """
-    name = 'basic'
-

-REVERSED_PROTOCOL_METHOD = '__reversed__'
-SEQUENCE_PROTOCOL_METHODS = '__getitem__', '__len__'
-REVERSED_METHODS = SEQUENCE_PROTOCOL_METHODS, (REVERSED_PROTOCOL_METHOD,)
-DEFAULT_ARGUMENT_SYMBOLS = dict(zip(['.'.join(['builtins', x]) for x in (
-    'set', 'dict', 'list')], ['set()', '{}', '[]']), **{x: f'{x}()' for x in
-    ('collections.deque', 'collections.ChainMap', 'collections.Counter',
-    'collections.OrderedDict', 'collections.defaultdict',
-    'collections.UserDict', 'collections.UserList')})
-
-
-def report_by_type_stats(sect: reporter_nodes.Section, stats: LinterStats,
-    old_stats: (LinterStats | None)) ->None:
+    name = "basic"
+
+
+REVERSED_PROTOCOL_METHOD = "__reversed__"
+SEQUENCE_PROTOCOL_METHODS = ("__getitem__", "__len__")
+REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS, (REVERSED_PROTOCOL_METHOD,))
+# A mapping from qname -> symbol, to be used when generating messages
+# about dangerous default values as arguments
+DEFAULT_ARGUMENT_SYMBOLS = dict(
+    zip(
+        [".".join(["builtins", x]) for x in ("set", "dict", "list")],
+        ["set()", "{}", "[]"],
+    ),
+    **{
+        x: f"{x}()"
+        for x in (
+            "collections.deque",
+            "collections.ChainMap",
+            "collections.Counter",
+            "collections.OrderedDict",
+            "collections.defaultdict",
+            "collections.UserDict",
+            "collections.UserList",
+        )
+    },
+)
+
+
+def report_by_type_stats(
+    sect: reporter_nodes.Section,
+    stats: LinterStats,
+    old_stats: LinterStats | None,
+) -> None:
     """Make a report of.

     * percentage of different types documented
     * percentage of different types with a bad name
     """
-    pass
-
-
+    # percentage of different types documented and/or with a bad name
+    nice_stats: dict[str, dict[str, str]] = {}
+    for node_type in ("module", "class", "method", "function"):
+        node_type = cast(Literal["function", "class", "method", "module"], node_type)
+        total = stats.get_node_count(node_type)
+        nice_stats[node_type] = {}
+        if total != 0:
+            undocumented_node = stats.get_undocumented(node_type)
+            documented = total - undocumented_node
+            percent = (documented * 100.0) / total
+            nice_stats[node_type]["percent_documented"] = f"{percent:.2f}"
+            badname_node = stats.get_bad_names(node_type)
+            percent = (badname_node * 100.0) / total
+            nice_stats[node_type]["percent_badname"] = f"{percent:.2f}"
+    lines = ["type", "number", "old number", "difference", "%documented", "%badname"]
+    for node_type in ("module", "class", "method", "function"):
+        node_type = cast(Literal["function", "class", "method", "module"], node_type)
+        new = stats.get_node_count(node_type)
+        old = old_stats.get_node_count(node_type) if old_stats else None
+        diff_str = lint_utils.diff_string(old, new) if old else None
+        lines += [
+            node_type,
+            str(new),
+            str(old) if old else "NC",
+            diff_str if diff_str else "NC",
+            nice_stats[node_type].get("percent_documented", "0"),
+            nice_stats[node_type].get("percent_badname", "0"),
+        ]
+    sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
+
+
+# pylint: disable-next = too-many-public-methods
 class BasicChecker(_BasicChecker):
     """Basic checker.

@@ -54,129 +111,531 @@ class BasicChecker(_BasicChecker):
     * redefinition of function / method / class
     * uses of the global statement
     """
-    name = 'basic'
-    msgs = {'W0101': ('Unreachable code', 'unreachable',
-        'Used when there is some code behind a "return" or "raise" statement, which will never be accessed.'
-        ), 'W0102': ('Dangerous default value %s as argument',
-        'dangerous-default-value',
-        'Used when a mutable value as list or dictionary is detected in a default value for an argument.'
-        ), 'W0104': ('Statement seems to have no effect',
-        'pointless-statement',
-        "Used when a statement doesn't have (or at least seems to) any effect."
-        ), 'W0105': ('String statement has no effect',
-        'pointless-string-statement',
-        "Used when a string is used as a statement (which of course has no effect). This is a particular case of W0104 with its own message so you can easily disable it if you're using those strings as documentation, instead of comments."
-        ), 'W0106': ('Expression "%s" is assigned to nothing',
-        'expression-not-assigned',
-        'Used when an expression that is not a function call is assigned to nothing. Probably something else was intended.'
-        ), 'W0108': ('Lambda may not be necessary', 'unnecessary-lambda',
-        'Used when the body of a lambda expression is a function call on the same argument list as the lambda itself; such lambda expressions are in all but a few cases replaceable with the function being called in the body of the lambda.'
-        ), 'W0109': ('Duplicate key %r in dictionary', 'duplicate-key',
-        'Used when a dictionary expression binds the same key multiple times.'
-        ), 'W0122': ('Use of exec', 'exec-used',
-        "Raised when the 'exec' statement is used. It's dangerous to use this function for a user input, and it's also slower than actual code in general. This doesn't mean you should never use it, but you should consider alternatives first and restrict the functions available."
-        ), 'W0123': ('Use of eval', 'eval-used',
-        'Used when you use the "eval" function, to discourage its usage. Consider using `ast.literal_eval` for safely evaluating strings containing Python expressions from untrusted sources.'
-        ), 'W0150': ('%s statement in finally block may swallow exception',
-        'lost-exception',
-        'Used when a break or a return statement is found inside the finally clause of a try...finally block: the exceptions raised in the try clause will be silently swallowed instead of being re-raised.'
-        ), 'W0199': (
-        "Assert called on a populated tuple. Did you mean 'assert x,y'?",
-        'assert-on-tuple',
-        'A call of assert on a tuple will always evaluate to true if the tuple is not empty, and will always evaluate to false if it is.'
-        ), 'W0124': (
-        'Following "as" with another context manager looks like a tuple.',
-        'confusing-with-statement',
-        "Emitted when a `with` statement component returns multiple values and uses name binding with `as` only for a part of those values, as in with ctx() as a, b. This can be misleading, since it's not clear if the context manager returns a tuple or if the node without a name binding is another context manager."
-        ), 'W0125': ('Using a conditional statement with a constant value',
-        'using-constant-test',
-        'Emitted when a conditional statement (If or ternary if) uses a constant value for its test. This might not be what the user intended to do.'
-        ), 'W0126': (
-        'Using a conditional statement with potentially wrong function or method call due to missing parentheses'
-        , 'missing-parentheses-for-call-in-test',
-        'Emitted when a conditional statement (If or ternary if) seems to wrongly call a function due to missing parentheses'
-        ), 'W0127': ('Assigning the same variable %r to itself',
-        'self-assigning-variable',
-        'Emitted when we detect that a variable is assigned to itself'),
-        'W0128': ('Redeclared variable %r in assignment',
-        'redeclared-assigned-name',
-        'Emitted when we detect that a variable was redeclared in the same assignment.'
-        ), 'E0111': ('The first reversed() argument is not a sequence',
-        'bad-reversed-sequence',
-        "Used when the first argument to reversed() builtin isn't a sequence (does not implement __reversed__, nor __getitem__ and __len__"
-        ), 'E0119': ('format function is not called on str',
-        'misplaced-format-function',
-        'Emitted when format function is not called on str object. e.g doing print("value: {}").format(123) instead of print("value: {}".format(123)). This might not be what the user intended to do.'
-        ), 'W0129': (
-        'Assert statement has a string literal as its first argument. The assert will %s fail.'
-        , 'assert-on-string-literal',
-        'Used when an assert statement has a string literal as its first argument, which will cause the assert to always pass.'
-        ), 'W0130': ('Duplicate value %r in set', 'duplicate-value',
-        'This message is emitted when a set contains the same value two or more times.'
-        ), 'W0131': ('Named expression used without context',
-        'named-expr-without-context',
-        'Emitted if named expression is used to do a regular assignment outside a context like if, for, while, or a comprehension.'
-        ), 'W0133': ('Exception statement has no effect',
-        'pointless-exception-statement',
-        'Used when an exception is created without being assigned, raised or returned for subsequent use elsewhere.'
-        ), 'W0134': ("'return' shadowed by the 'finally' clause.",
-        'return-in-finally',
-        "Emitted when a 'return' statement is found in a 'finally' block. This will overwrite the return value of a function and should be avoided."
-        )}
-    reports = ('RP0101', 'Statistics by type', report_by_type_stats),
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "basic"
+    msgs = {
+        "W0101": (
+            "Unreachable code",
+            "unreachable",
+            'Used when there is some code behind a "return" or "raise" '
+            "statement, which will never be accessed.",
+        ),
+        "W0102": (
+            "Dangerous default value %s as argument",
+            "dangerous-default-value",
+            "Used when a mutable value as list or dictionary is detected in "
+            "a default value for an argument.",
+        ),
+        "W0104": (
+            "Statement seems to have no effect",
+            "pointless-statement",
+            "Used when a statement doesn't have (or at least seems to) any effect.",
+        ),
+        "W0105": (
+            "String statement has no effect",
+            "pointless-string-statement",
+            "Used when a string is used as a statement (which of course "
+            "has no effect). This is a particular case of W0104 with its "
+            "own message so you can easily disable it if you're using "
+            "those strings as documentation, instead of comments.",
+        ),
+        "W0106": (
+            'Expression "%s" is assigned to nothing',
+            "expression-not-assigned",
+            "Used when an expression that is not a function call is assigned "
+            "to nothing. Probably something else was intended.",
+        ),
+        "W0108": (
+            "Lambda may not be necessary",
+            "unnecessary-lambda",
+            "Used when the body of a lambda expression is a function call "
+            "on the same argument list as the lambda itself; such lambda "
+            "expressions are in all but a few cases replaceable with the "
+            "function being called in the body of the lambda.",
+        ),
+        "W0109": (
+            "Duplicate key %r in dictionary",
+            "duplicate-key",
+            "Used when a dictionary expression binds the same key multiple times.",
+        ),
+        "W0122": (
+            "Use of exec",
+            "exec-used",
+            "Raised when the 'exec' statement is used. It's dangerous to use this "
+            "function for a user input, and it's also slower than actual code in "
+            "general. This doesn't mean you should never use it, but you should "
+            "consider alternatives first and restrict the functions available.",
+        ),
+        "W0123": (
+            "Use of eval",
+            "eval-used",
+            'Used when you use the "eval" function, to discourage its '
+            "usage. Consider using `ast.literal_eval` for safely evaluating "
+            "strings containing Python expressions "
+            "from untrusted sources.",
+        ),
+        "W0150": (
+            "%s statement in finally block may swallow exception",
+            "lost-exception",
+            "Used when a break or a return statement is found inside the "
+            "finally clause of a try...finally block: the exceptions raised "
+            "in the try clause will be silently swallowed instead of being "
+            "re-raised.",
+        ),
+        "W0199": (
+            "Assert called on a populated tuple. Did you mean 'assert x,y'?",
+            "assert-on-tuple",
+            "A call of assert on a tuple will always evaluate to true if "
+            "the tuple is not empty, and will always evaluate to false if "
+            "it is.",
+        ),
+        "W0124": (
+            'Following "as" with another context manager looks like a tuple.',
+            "confusing-with-statement",
+            "Emitted when a `with` statement component returns multiple values "
+            "and uses name binding with `as` only for a part of those values, "
+            "as in with ctx() as a, b. This can be misleading, since it's not "
+            "clear if the context manager returns a tuple or if the node without "
+            "a name binding is another context manager.",
+        ),
+        "W0125": (
+            "Using a conditional statement with a constant value",
+            "using-constant-test",
+            "Emitted when a conditional statement (If or ternary if) "
+            "uses a constant value for its test. This might not be what "
+            "the user intended to do.",
+        ),
+        "W0126": (
+            "Using a conditional statement with potentially wrong function or method call due to "
+            "missing parentheses",
+            "missing-parentheses-for-call-in-test",
+            "Emitted when a conditional statement (If or ternary if) "
+            "seems to wrongly call a function due to missing parentheses",
+        ),
+        "W0127": (
+            "Assigning the same variable %r to itself",
+            "self-assigning-variable",
+            "Emitted when we detect that a variable is assigned to itself",
+        ),
+        "W0128": (
+            "Redeclared variable %r in assignment",
+            "redeclared-assigned-name",
+            "Emitted when we detect that a variable was redeclared in the same assignment.",
+        ),
+        "E0111": (
+            "The first reversed() argument is not a sequence",
+            "bad-reversed-sequence",
+            "Used when the first argument to reversed() builtin "
+            "isn't a sequence (does not implement __reversed__, "
+            "nor __getitem__ and __len__",
+        ),
+        "E0119": (
+            "format function is not called on str",
+            "misplaced-format-function",
+            "Emitted when format function is not called on str object. "
+            'e.g doing print("value: {}").format(123) instead of '
+            'print("value: {}".format(123)). This might not be what the user '
+            "intended to do.",
+        ),
+        "W0129": (
+            "Assert statement has a string literal as its first argument. The assert will %s fail.",
+            "assert-on-string-literal",
+            "Used when an assert statement has a string literal as its first argument, which will "
+            "cause the assert to always pass.",
+        ),
+        "W0130": (
+            "Duplicate value %r in set",
+            "duplicate-value",
+            "This message is emitted when a set contains the same value two or more times.",
+        ),
+        "W0131": (
+            "Named expression used without context",
+            "named-expr-without-context",
+            "Emitted if named expression is used to do a regular assignment "
+            "outside a context like if, for, while, or a comprehension.",
+        ),
+        "W0133": (
+            "Exception statement has no effect",
+            "pointless-exception-statement",
+            "Used when an exception is created without being assigned, raised or returned "
+            "for subsequent use elsewhere.",
+        ),
+        "W0134": (
+            "'return' shadowed by the 'finally' clause.",
+            "return-in-finally",
+            "Emitted when a 'return' statement is found in a 'finally' block. This will overwrite "
+            "the return value of a function and should be avoided.",
+        ),
+    }
+
+    reports = (("RP0101", "Statistics by type", report_by_type_stats),)
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._trys: list[nodes.Try]

-    def open(self) ->None:
+    def open(self) -> None:
         """Initialize visit variables and statistics."""
-        pass
+        py_version = self.linter.config.py_version
+        self._py38_plus = py_version >= (3, 8)
+        self._trys = []
+        self.linter.stats.reset_node_count()
+
+    @utils.only_required_for_messages(
+        "using-constant-test", "missing-parentheses-for-call-in-test"
+    )
+    def visit_if(self, node: nodes.If) -> None:
+        self._check_using_constant_test(node, node.test)
+
+    @utils.only_required_for_messages(
+        "using-constant-test", "missing-parentheses-for-call-in-test"
+    )
+    def visit_ifexp(self, node: nodes.IfExp) -> None:
+        self._check_using_constant_test(node, node.test)
+
+    @utils.only_required_for_messages(
+        "using-constant-test", "missing-parentheses-for-call-in-test"
+    )
+    def visit_comprehension(self, node: nodes.Comprehension) -> None:
+        if node.ifs:
+            for if_test in node.ifs:
+                self._check_using_constant_test(node, if_test)
+
+    def _check_using_constant_test(
+        self,
+        node: nodes.If | nodes.IfExp | nodes.Comprehension,
+        test: nodes.NodeNG | None,
+    ) -> None:
+        const_nodes = (
+            nodes.Module,
+            nodes.GeneratorExp,
+            nodes.Lambda,
+            nodes.FunctionDef,
+            nodes.ClassDef,
+            astroid.bases.Generator,
+            astroid.UnboundMethod,
+            astroid.BoundMethod,
+            nodes.Module,
+        )
+        structs = (nodes.Dict, nodes.Tuple, nodes.Set, nodes.List)
+
+        # These nodes are excepted, since they are not constant
+        # values, requiring a computation to happen.
+        except_nodes = (
+            nodes.Call,
+            nodes.BinOp,
+            nodes.BoolOp,
+            nodes.UnaryOp,
+            nodes.Subscript,
+        )
+        inferred = None
+        emit = isinstance(test, (nodes.Const, *structs, *const_nodes))
+        maybe_generator_call = None
+        if not isinstance(test, except_nodes):
+            inferred = utils.safe_infer(test)
+            if isinstance(inferred, util.UninferableBase) and isinstance(
+                test, nodes.Name
+            ):
+                emit, maybe_generator_call = BasicChecker._name_holds_generator(test)
+
+        # Emit if calling a function that only returns GeneratorExp (always tests True)
+        elif isinstance(test, nodes.Call):
+            maybe_generator_call = test
+        if maybe_generator_call:
+            inferred_call = utils.safe_infer(maybe_generator_call.func)
+            if isinstance(inferred_call, nodes.FunctionDef):
+                # Can't use all(x) or not any(not x) for this condition, because it
+                # will return True for empty generators, which is not what we want.
+                all_returns_were_generator = None
+                for return_node in inferred_call._get_return_nodes_skip_functions():
+                    if not isinstance(return_node.value, nodes.GeneratorExp):
+                        all_returns_were_generator = False
+                        break
+                    all_returns_were_generator = True
+                if all_returns_were_generator:
+                    self.add_message(
+                        "using-constant-test", node=node, confidence=INFERENCE
+                    )
+                    return
+
+        if emit:
+            self.add_message("using-constant-test", node=test, confidence=INFERENCE)
+        elif isinstance(inferred, const_nodes):
+            # If the constant node is a FunctionDef or Lambda then
+            # it may be an illicit function call due to missing parentheses
+            call_inferred = None
+            try:
+                # Just forcing the generator to infer all elements.
+                # astroid.exceptions.InferenceError are false positives
+                # see https://github.com/pylint-dev/pylint/pull/8185
+                if isinstance(inferred, nodes.FunctionDef):
+                    call_inferred = list(inferred.infer_call_result(node))
+                elif isinstance(inferred, nodes.Lambda):
+                    call_inferred = list(inferred.infer_call_result(node))
+            except astroid.InferenceError:
+                call_inferred = None
+            if call_inferred:
+                self.add_message(
+                    "missing-parentheses-for-call-in-test",
+                    node=test,
+                    confidence=INFERENCE,
+                )
+            self.add_message("using-constant-test", node=test, confidence=INFERENCE)

     @staticmethod
-    def _name_holds_generator(test: nodes.Name) ->tuple[bool, nodes.Call | None
-        ]:
+    def _name_holds_generator(test: nodes.Name) -> tuple[bool, nodes.Call | None]:
         """Return whether `test` tests a name certain to hold a generator, or optionally
         a call that should be then tested to see if *it* returns only generators.
         """
-        pass
-
-    def visit_module(self, _: nodes.Module) ->None:
+        assert isinstance(test, nodes.Name)
+        emit = False
+        maybe_generator_call = None
+        lookup_result = test.frame().lookup(test.name)
+        if not lookup_result:
+            return emit, maybe_generator_call
+        maybe_generator_assigned = (
+            isinstance(assign_name.parent.value, nodes.GeneratorExp)
+            for assign_name in lookup_result[1]
+            if isinstance(assign_name.parent, nodes.Assign)
+        )
+        first_item = next(maybe_generator_assigned, None)
+        if first_item is not None:
+            # Emit if this variable is certain to hold a generator
+            if all(itertools.chain((first_item,), maybe_generator_assigned)):
+                emit = True
+            # If this variable holds the result of a call, save it for next test
+            elif (
+                len(lookup_result[1]) == 1
+                and isinstance(lookup_result[1][0].parent, nodes.Assign)
+                and isinstance(lookup_result[1][0].parent.value, nodes.Call)
+            ):
+                maybe_generator_call = lookup_result[1][0].parent.value
+        return emit, maybe_generator_call
+
+    def visit_module(self, _: nodes.Module) -> None:
         """Check module name, docstring and required arguments."""
-        pass
+        self.linter.stats.node_count["module"] += 1

-    def visit_classdef(self, _: nodes.ClassDef) ->None:
+    def visit_classdef(self, _: nodes.ClassDef) -> None:
         """Check module name, docstring and redefinition
         increment branch counter.
         """
-        pass
-
-    @utils.only_required_for_messages('pointless-statement',
-        'pointless-exception-statement', 'pointless-string-statement',
-        'expression-not-assigned', 'named-expr-without-context')
-    def visit_expr(self, node: nodes.Expr) ->None:
+        self.linter.stats.node_count["klass"] += 1
+
+    @utils.only_required_for_messages(
+        "pointless-statement",
+        "pointless-exception-statement",
+        "pointless-string-statement",
+        "expression-not-assigned",
+        "named-expr-without-context",
+    )
+    def visit_expr(self, node: nodes.Expr) -> None:
         """Check for various kind of statements without effect."""
-        pass
+        expr = node.value
+        if isinstance(expr, nodes.Const) and isinstance(expr.value, str):
+            # treat string statement in a separated message
+            # Handle PEP-257 attribute docstrings.
+            # An attribute docstring is defined as being a string right after
+            # an assignment at the module level, class level or __init__ level.
+            scope = expr.scope()
+            if isinstance(scope, (nodes.ClassDef, nodes.Module, nodes.FunctionDef)):
+                if isinstance(scope, nodes.FunctionDef) and scope.name != "__init__":
+                    pass
+                else:
+                    sibling = expr.previous_sibling()
+                    if (
+                        sibling is not None
+                        and sibling.scope() is scope
+                        and isinstance(
+                            sibling, (nodes.Assign, nodes.AnnAssign, nodes.TypeAlias)
+                        )
+                    ):
+                        return
+            self.add_message("pointless-string-statement", node=node)
+            return
+
+        # Warn W0133 for exceptions that are used as statements
+        if isinstance(expr, nodes.Call):
+            name = ""
+            if isinstance(expr.func, nodes.Name):
+                name = expr.func.name
+            elif isinstance(expr.func, nodes.Attribute):
+                name = expr.func.attrname
+
+            # Heuristic: only run inference for names that begin with an uppercase char
+            # This reduces W0133's coverage, but retains acceptable runtime performance
+            # For more details, see: https://github.com/pylint-dev/pylint/issues/8073
+            inferred = utils.safe_infer(expr) if name[:1].isupper() else None
+            if isinstance(inferred, objects.ExceptionInstance):
+                self.add_message(
+                    "pointless-exception-statement", node=node, confidence=INFERENCE
+                )
+            return
+
+        # Ignore if this is :
+        # * the unique child of a try/except body
+        # * a yield statement
+        # * an ellipsis (which can be used on Python 3 instead of pass)
+        # warn W0106 if we have any underlying function call (we can't predict
+        # side effects), else pointless-statement
+        if (
+            isinstance(expr, (nodes.Yield, nodes.Await))
+            or (
+                isinstance(node.parent, (nodes.Try, nodes.TryStar))
+                and node.parent.body == [node]
+            )
+            or (isinstance(expr, nodes.Const) and expr.value is Ellipsis)
+        ):
+            return
+        if isinstance(expr, nodes.NamedExpr):
+            self.add_message("named-expr-without-context", node=node, confidence=HIGH)
+        elif any(expr.nodes_of_class(nodes.Call)):
+            self.add_message(
+                "expression-not-assigned", node=node, args=expr.as_string()
+            )
+        else:
+            self.add_message("pointless-statement", node=node)

-    @utils.only_required_for_messages('unnecessary-lambda')
-    def visit_lambda(self, node: nodes.Lambda) ->None:
-        """Check whether the lambda is suspicious."""
-        pass
+    @staticmethod
+    def _filter_vararg(
+        node: nodes.Lambda, call_args: list[nodes.NodeNG]
+    ) -> Iterator[nodes.NodeNG]:
+        # Return the arguments for the given call which are
+        # not passed as vararg.
+        for arg in call_args:
+            if isinstance(arg, nodes.Starred):
+                if (
+                    isinstance(arg.value, nodes.Name)
+                    and arg.value.name != node.args.vararg
+                ):
+                    yield arg
+            else:
+                yield arg

-    @utils.only_required_for_messages('dangerous-default-value')
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+    @staticmethod
+    def _has_variadic_argument(
+        args: list[nodes.Starred | nodes.Keyword], variadic_name: str
+    ) -> bool:
+        return not args or any(
+            isinstance(a.value, nodes.Name)
+            and a.value.name != variadic_name
+            or not isinstance(a.value, nodes.Name)
+            for a in args
+        )
+
+    @utils.only_required_for_messages("unnecessary-lambda")
+    def visit_lambda(self, node: nodes.Lambda) -> None:
+        """Check whether the lambda is suspicious."""
+        # if the body of the lambda is a call expression with the same
+        # argument list as the lambda itself, then the lambda is
+        # possibly unnecessary and at least suspicious.
+        if node.args.defaults:
+            # If the arguments of the lambda include defaults, then a
+            # judgment cannot be made because there is no way to check
+            # that the defaults defined by the lambda are the same as
+            # the defaults defined by the function called in the body
+            # of the lambda.
+            return
+        call = node.body
+        if not isinstance(call, nodes.Call):
+            # The body of the lambda must be a function call expression
+            # for the lambda to be unnecessary.
+            return
+        if isinstance(node.body.func, nodes.Attribute) and isinstance(
+            node.body.func.expr, nodes.Call
+        ):
+            # Chained call, the intermediate call might
+            # return something else (but we don't check that, yet).
+            return
+
+        ordinary_args = list(node.args.args)
+        new_call_args = list(self._filter_vararg(node, call.args))
+        if node.args.kwarg:
+            if self._has_variadic_argument(call.keywords, node.args.kwarg):
+                return
+        elif call.keywords:
+            return
+
+        if node.args.vararg:
+            if self._has_variadic_argument(call.starargs, node.args.vararg):
+                return
+        elif call.starargs:
+            return
+
+        # The "ordinary" arguments must be in a correspondence such that:
+        # ordinary_args[i].name == call.args[i].name.
+        if len(ordinary_args) != len(new_call_args):
+            return
+        for arg, passed_arg in zip(ordinary_args, new_call_args):
+            if not isinstance(passed_arg, nodes.Name):
+                return
+            if arg.name != passed_arg.name:
+                return
+
+        # The lambda is necessary if it uses its parameter in the function it is
+        # calling in the lambda's body
+        # e.g. lambda foo: (func1 if foo else func2)(foo)
+        for name in call.func.nodes_of_class(nodes.Name):
+            if name.lookup(name.name)[0] is node:
+                return
+
+        self.add_message("unnecessary-lambda", line=node.fromlineno, node=node)
+
+    @utils.only_required_for_messages("dangerous-default-value")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Check function name, docstring, arguments, redefinition,
         variable names, max locals.
         """
-        pass
+        if node.is_method():
+            self.linter.stats.node_count["method"] += 1
+        else:
+            self.linter.stats.node_count["function"] += 1
+        self._check_dangerous_default(node)
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_dangerous_default(self, node: nodes.FunctionDef) ->None:
+    def _check_dangerous_default(self, node: nodes.FunctionDef) -> None:
         """Check for dangerous default values as arguments."""
-        pass

-    @utils.only_required_for_messages('unreachable', 'lost-exception')
-    def visit_return(self, node: nodes.Return) ->None:
+        def is_iterable(internal_node: nodes.NodeNG) -> bool:
+            return isinstance(internal_node, (nodes.List, nodes.Set, nodes.Dict))
+
+        defaults = (node.args.defaults or []) + (node.args.kw_defaults or [])
+        for default in defaults:
+            if not default:
+                continue
+            try:
+                value = next(default.infer())
+            except astroid.InferenceError:
+                continue
+
+            if (
+                isinstance(value, astroid.Instance)
+                and value.qname() in DEFAULT_ARGUMENT_SYMBOLS
+            ):
+                if value is default:
+                    msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
+                elif isinstance(value, astroid.Instance) or is_iterable(value):
+                    # We are here in the following situation(s):
+                    #   * a dict/set/list/tuple call which wasn't inferred
+                    #     to a syntax node ({}, () etc.). This can happen
+                    #     when the arguments are invalid or unknown to
+                    #     the inference.
+                    #   * a variable from somewhere else, which turns out to be a list
+                    #     or a dict.
+                    if is_iterable(default):
+                        msg = value.pytype()
+                    elif isinstance(default, nodes.Call):
+                        msg = f"{value.name}() ({value.qname()})"
+                    else:
+                        msg = f"{default.as_string()} ({value.qname()})"
+                else:
+                    # this argument is a name
+                    msg = f"{default.as_string()} ({DEFAULT_ARGUMENT_SYMBOLS[value.qname()]})"
+                self.add_message("dangerous-default-value", node=node, args=(msg,))
+
+    @utils.only_required_for_messages("unreachable", "lost-exception")
+    def visit_return(self, node: nodes.Return) -> None:
         """Return node visitor.

         1 - check if the node has a right sibling (if so, that's some
@@ -184,17 +643,19 @@ class BasicChecker(_BasicChecker):
         2 - check if the node is inside the 'finally' clause of a 'try...finally'
         block
         """
-        pass
+        self._check_unreachable(node)
+        # Is it inside final body of a try...finally block ?
+        self._check_not_in_finally(node, "return", (nodes.FunctionDef,))

-    @utils.only_required_for_messages('unreachable')
-    def visit_continue(self, node: nodes.Continue) ->None:
+    @utils.only_required_for_messages("unreachable")
+    def visit_continue(self, node: nodes.Continue) -> None:
         """Check is the node has a right sibling (if so, that's some unreachable
         code).
         """
-        pass
+        self._check_unreachable(node)

-    @utils.only_required_for_messages('unreachable', 'lost-exception')
-    def visit_break(self, node: nodes.Break) ->None:
+    @utils.only_required_for_messages("unreachable", "lost-exception")
+    def visit_break(self, node: nodes.Break) -> None:
         """Break node visitor.

         1 - check if the node has a right sibling (if so, that's some
@@ -202,61 +663,309 @@ class BasicChecker(_BasicChecker):
         2 - check if the node is inside the 'finally' clause of a 'try...finally'
         block
         """
-        pass
+        # 1 - Is it right sibling ?
+        self._check_unreachable(node)
+        # 2 - Is it inside final body of a try...finally block ?
+        self._check_not_in_finally(node, "break", (nodes.For, nodes.While))

-    @utils.only_required_for_messages('unreachable')
-    def visit_raise(self, node: nodes.Raise) ->None:
+    @utils.only_required_for_messages("unreachable")
+    def visit_raise(self, node: nodes.Raise) -> None:
         """Check if the node has a right sibling (if so, that's some unreachable
         code).
         """
-        pass
-
-    @utils.only_required_for_messages('eval-used', 'exec-used',
-        'bad-reversed-sequence', 'misplaced-format-function', 'unreachable')
-    def visit_call(self, node: nodes.Call) ->None:
+        self._check_unreachable(node)
+
+    def _check_misplaced_format_function(self, call_node: nodes.Call) -> None:
+        if not isinstance(call_node.func, nodes.Attribute):
+            return
+        if call_node.func.attrname != "format":
+            return
+
+        expr = utils.safe_infer(call_node.func.expr)
+        if isinstance(expr, util.UninferableBase):
+            return
+        if not expr:
+            # we are doubtful on inferred type of node, so here just check if format
+            # was called on print()
+            call_expr = call_node.func.expr
+            if not isinstance(call_expr, nodes.Call):
+                return
+            if (
+                isinstance(call_expr.func, nodes.Name)
+                and call_expr.func.name == "print"
+            ):
+                self.add_message("misplaced-format-function", node=call_node)
+
+    @utils.only_required_for_messages(
+        "eval-used",
+        "exec-used",
+        "bad-reversed-sequence",
+        "misplaced-format-function",
+        "unreachable",
+    )
+    def visit_call(self, node: nodes.Call) -> None:
         """Visit a Call node."""
-        pass
-
-    @utils.only_required_for_messages('assert-on-tuple',
-        'assert-on-string-literal')
-    def visit_assert(self, node: nodes.Assert) ->None:
+        if utils.is_terminating_func(node):
+            self._check_unreachable(node, confidence=INFERENCE)
+        self._check_misplaced_format_function(node)
+        if isinstance(node.func, nodes.Name):
+            name = node.func.name
+            # ignore the name if it's not a builtin (i.e. not defined in the
+            # locals nor globals scope)
+            if not (name in node.frame() or name in node.root()):
+                if name == "exec":
+                    self.add_message("exec-used", node=node)
+                elif name == "reversed":
+                    self._check_reversed(node)
+                elif name == "eval":
+                    self.add_message("eval-used", node=node)
+
+    @utils.only_required_for_messages("assert-on-tuple", "assert-on-string-literal")
+    def visit_assert(self, node: nodes.Assert) -> None:
         """Check whether assert is used on a tuple or string literal."""
-        pass
-
-    @utils.only_required_for_messages('duplicate-key')
-    def visit_dict(self, node: nodes.Dict) ->None:
+        if isinstance(node.test, nodes.Tuple) and len(node.test.elts) > 0:
+            self.add_message("assert-on-tuple", node=node, confidence=HIGH)
+
+        if isinstance(node.test, nodes.Const) and isinstance(node.test.value, str):
+            if node.test.value:
+                when = "never"
+            else:
+                when = "always"
+            self.add_message("assert-on-string-literal", node=node, args=(when,))
+
+    @utils.only_required_for_messages("duplicate-key")
+    def visit_dict(self, node: nodes.Dict) -> None:
         """Check duplicate key in dictionary."""
-        pass
-
-    @utils.only_required_for_messages('duplicate-value')
-    def visit_set(self, node: nodes.Set) ->None:
+        keys = set()
+        for k, _ in node.items:
+            if isinstance(k, nodes.Const):
+                key = k.value
+            elif isinstance(k, nodes.Attribute):
+                key = k.as_string()
+            else:
+                continue
+            if key in keys:
+                self.add_message("duplicate-key", node=node, args=key)
+            keys.add(key)
+
+    @utils.only_required_for_messages("duplicate-value")
+    def visit_set(self, node: nodes.Set) -> None:
         """Check duplicate value in set."""
-        pass
-
-    def visit_try(self, node: nodes.Try) ->None:
+        values = set()
+        for v in node.elts:
+            if isinstance(v, nodes.Const):
+                value = v.value
+            else:
+                continue
+            if value in values:
+                self.add_message(
+                    "duplicate-value", node=node, args=value, confidence=HIGH
+                )
+            values.add(value)
+
+    def visit_try(self, node: nodes.Try) -> None:
         """Update try block flag."""
-        pass
+        self._trys.append(node)

-    def leave_try(self, _: nodes.Try) ->None:
+        for final_node in node.finalbody:
+            for return_node in final_node.nodes_of_class(nodes.Return):
+                self.add_message("return-in-finally", node=return_node, confidence=HIGH)
+
+    def leave_try(self, _: nodes.Try) -> None:
         """Update try block flag."""
-        pass
+        self._trys.pop()

-    def _check_unreachable(self, node: (nodes.Return | nodes.Continue |
-        nodes.Break | nodes.Raise | nodes.Call), confidence: Confidence=HIGH
-        ) ->None:
+    def _check_unreachable(
+        self,
+        node: nodes.Return | nodes.Continue | nodes.Break | nodes.Raise | nodes.Call,
+        confidence: Confidence = HIGH,
+    ) -> None:
         """Check unreachable code."""
-        pass
-
-    def _check_not_in_finally(self, node: (nodes.Break | nodes.Return),
-        node_name: str, breaker_classes: tuple[nodes.NodeNG, ...]=()) ->None:
+        unreachable_statement = node.next_sibling()
+        if unreachable_statement is not None:
+            if (
+                isinstance(node, nodes.Return)
+                and isinstance(unreachable_statement, nodes.Expr)
+                and isinstance(unreachable_statement.value, nodes.Yield)
+            ):
+                # Don't add 'unreachable' for empty generators.
+                # Only add warning if 'yield' is followed by another node.
+                unreachable_statement = unreachable_statement.next_sibling()
+                if unreachable_statement is None:
+                    return
+            self.add_message(
+                "unreachable", node=unreachable_statement, confidence=confidence
+            )
+
+    def _check_not_in_finally(
+        self,
+        node: nodes.Break | nodes.Return,
+        node_name: str,
+        breaker_classes: tuple[nodes.NodeNG, ...] = (),
+    ) -> None:
         """Check that a node is not inside a 'finally' clause of a
         'try...finally' statement.

         If we find a parent which type is in breaker_classes before
         a 'try...finally' block we skip the whole check.
         """
-        pass
-
-    def _check_reversed(self, node: nodes.Call) ->None:
+        # if self._trys is empty, we're not an in try block
+        if not self._trys:
+            return
+        # the node could be a grand-grand...-child of the 'try...finally'
+        _parent = node.parent
+        _node = node
+        while _parent and not isinstance(_parent, breaker_classes):
+            if hasattr(_parent, "finalbody") and _node in _parent.finalbody:
+                self.add_message("lost-exception", node=node, args=node_name)
+                return
+            _node = _parent
+            _parent = _node.parent
+
+    def _check_reversed(self, node: nodes.Call) -> None:
         """Check that the argument to `reversed` is a sequence."""
-        pass
+        try:
+            argument = utils.safe_infer(utils.get_argument_from_call(node, position=0))
+        except utils.NoSuchArgumentError:
+            pass
+        else:
+            if isinstance(argument, util.UninferableBase):
+                return
+            if argument is None:
+                # Nothing was inferred.
+                # Try to see if we have iter().
+                if isinstance(node.args[0], nodes.Call):
+                    try:
+                        func = next(node.args[0].func.infer())
+                    except astroid.InferenceError:
+                        return
+                    if getattr(
+                        func, "name", None
+                    ) == "iter" and utils.is_builtin_object(func):
+                        self.add_message("bad-reversed-sequence", node=node)
+                return
+
+            if isinstance(argument, (nodes.List, nodes.Tuple)):
+                return
+
+            # dicts are reversible, but only from Python 3.8 onward. Prior to
+            # that, any class based on dict must explicitly provide a
+            # __reversed__ method
+            if not self._py38_plus and isinstance(argument, astroid.Instance):
+                if any(
+                    ancestor.name == "dict" and utils.is_builtin_object(ancestor)
+                    for ancestor in itertools.chain(
+                        (argument._proxied,), argument._proxied.ancestors()
+                    )
+                ):
+                    try:
+                        argument.locals[REVERSED_PROTOCOL_METHOD]
+                    except KeyError:
+                        self.add_message("bad-reversed-sequence", node=node)
+                    return
+
+            if hasattr(argument, "getattr"):
+                # everything else is not a proper sequence for reversed()
+                for methods in REVERSED_METHODS:
+                    for meth in methods:
+                        try:
+                            argument.getattr(meth)
+                        except astroid.NotFoundError:
+                            break
+                    else:
+                        break
+                else:
+                    self.add_message("bad-reversed-sequence", node=node)
+            else:
+                self.add_message("bad-reversed-sequence", node=node)
+
+    @utils.only_required_for_messages("confusing-with-statement")
+    def visit_with(self, node: nodes.With) -> None:
+        # a "with" statement with multiple managers corresponds
+        # to one AST "With" node with multiple items
+        pairs = node.items
+        if pairs:
+            for prev_pair, pair in zip(pairs, pairs[1:]):
+                if isinstance(prev_pair[1], nodes.AssignName) and (
+                    pair[1] is None and not isinstance(pair[0], nodes.Call)
+                ):
+                    # Don't emit a message if the second is a function call
+                    # there's no way that can be mistaken for a name assignment.
+                    # If the line number doesn't match
+                    # we assume it's a nested "with".
+                    self.add_message("confusing-with-statement", node=node)
+
+    def _check_self_assigning_variable(self, node: nodes.Assign) -> None:
+        # Detect assigning to the same variable.
+
+        scope = node.scope()
+        scope_locals = scope.locals
+
+        rhs_names = []
+        targets = node.targets
+        if isinstance(targets[0], nodes.Tuple):
+            if len(targets) != 1:
+                # A complex assignment, so bail out early.
+                return
+            targets = targets[0].elts
+            if len(targets) == 1:
+                # Unpacking a variable into the same name.
+                return
+
+        if isinstance(node.value, nodes.Name):
+            if len(targets) != 1:
+                return
+            rhs_names = [node.value]
+        elif isinstance(node.value, nodes.Tuple):
+            rhs_count = len(node.value.elts)
+            if len(targets) != rhs_count or rhs_count == 1:
+                return
+            rhs_names = node.value.elts
+
+        for target, lhs_name in zip(targets, rhs_names):
+            if not isinstance(lhs_name, nodes.Name):
+                continue
+            if not isinstance(target, nodes.AssignName):
+                continue
+            # Check that the scope is different from a class level, which is usually
+            # a pattern to expose module level attributes as class level ones.
+            if isinstance(scope, nodes.ClassDef) and target.name in scope_locals:
+                continue
+            if target.name == lhs_name.name:
+                self.add_message(
+                    "self-assigning-variable", args=(target.name,), node=target
+                )
+
+    def _check_redeclared_assign_name(self, targets: list[nodes.NodeNG | None]) -> None:
+        dummy_variables_rgx = self.linter.config.dummy_variables_rgx
+
+        for target in targets:
+            if not isinstance(target, nodes.Tuple):
+                continue
+
+            found_names = []
+            for element in target.elts:
+                if isinstance(element, nodes.Tuple):
+                    self._check_redeclared_assign_name([element])
+                elif isinstance(element, nodes.AssignName) and element.name != "_":
+                    if dummy_variables_rgx and dummy_variables_rgx.match(element.name):
+                        return
+                    found_names.append(element.name)
+
+            names = collections.Counter(found_names)
+            for name, count in names.most_common():
+                if count > 1:
+                    self.add_message(
+                        "redeclared-assigned-name", args=(name,), node=target
+                    )
+
+    @utils.only_required_for_messages(
+        "self-assigning-variable", "redeclared-assigned-name"
+    )
+    def visit_assign(self, node: nodes.Assign) -> None:
+        self._check_self_assigning_variable(node)
+        self._check_redeclared_assign_name(node.targets)
+
+    @utils.only_required_for_messages("redeclared-assigned-name")
+    def visit_for(self, node: nodes.For) -> None:
+        self._check_redeclared_assign_name([node.target])
diff --git a/pylint/checkers/base/basic_error_checker.py b/pylint/checkers/base/basic_error_checker.py
index 14c962bfb..e58be2309 100644
--- a/pylint/checkers/base/basic_error_checker.py
+++ b/pylint/checkers/base/basic_error_checker.py
@@ -1,22 +1,31 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Basic Error checker from the basic checker."""
+
 from __future__ import annotations
+
 import itertools
 from collections.abc import Iterator
 from typing import Any
+
 import astroid
 from astroid import nodes
 from astroid.typing import InferenceResult
+
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker
 from pylint.checkers.utils import infer_all
 from pylint.interfaces import HIGH
-ABC_METACLASSES = {'_py_abc.ABCMeta', 'abc.ABCMeta'}
-REDEFINABLE_METHODS = frozenset(('__module__',))
-TYPING_FORWARD_REF_QNAME = 'typing.ForwardRef'
+
+ABC_METACLASSES = {"_py_abc.ABCMeta", "abc.ABCMeta"}  # Python 3.7+,
+# List of methods which can be redefined
+REDEFINABLE_METHODS = frozenset(("__module__",))
+TYPING_FORWARD_REF_QNAME = "typing.ForwardRef"


-def _get_break_loop_node(break_node: nodes.Break) ->(nodes.For | nodes.
-    While | None):
+def _get_break_loop_node(break_node: nodes.Break) -> nodes.For | nodes.While | None:
     """Returns the loop node that holds the break node in arguments.

     Args:
@@ -25,10 +34,19 @@ def _get_break_loop_node(break_node: nodes.Break) ->(nodes.For | nodes.
     Returns:
         astroid.For or astroid.While: the loop node holding the break node.
     """
-    pass
+    loop_nodes = (nodes.For, nodes.While)
+    parent = break_node.parent
+    while not isinstance(parent, loop_nodes) or break_node in getattr(
+        parent, "orelse", []
+    ):
+        break_node = parent
+        parent = parent.parent
+        if parent is None:
+            break
+    return parent


-def _loop_exits_early(loop: (nodes.For | nodes.While)) ->bool:
+def _loop_exits_early(loop: nodes.For | nodes.While) -> bool:
     """Returns true if a loop may end with a break statement.

     Args:
@@ -37,19 +55,30 @@ def _loop_exits_early(loop: (nodes.For | nodes.While)) ->bool:
     Returns:
         bool: True if the loop may end with a break statement, False otherwise.
     """
-    pass
+    loop_nodes = (nodes.For, nodes.While)
+    definition_nodes = (nodes.FunctionDef, nodes.ClassDef)
+    inner_loop_nodes: list[nodes.For | nodes.While] = [
+        _node
+        for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes)
+        if _node != loop
+    ]
+    return any(
+        _node
+        for _node in loop.nodes_of_class(nodes.Break, skip_klass=definition_nodes)
+        if _get_break_loop_node(_node) not in inner_loop_nodes
+    )


-def _has_abstract_methods(node: nodes.ClassDef) ->bool:
+def _has_abstract_methods(node: nodes.ClassDef) -> bool:
     """Determine if the given `node` has abstract methods.

     The methods should be made abstract by decorating them
     with `abc` decorators.
     """
-    pass
+    return len(utils.unimplemented_abstract_methods(node)) > 0


-def redefined_by_decorator(node: nodes.FunctionDef) ->bool:
+def redefined_by_decorator(node: nodes.FunctionDef) -> bool:
     """Return True if the object is a method redefined via decorator.

     For example:
@@ -58,93 +87,495 @@ def redefined_by_decorator(node: nodes.FunctionDef) ->bool:
         @x.setter
         def x(self, value): self._x = value
     """
-    pass
+    if node.decorators:
+        for decorator in node.decorators.nodes:
+            if (
+                isinstance(decorator, nodes.Attribute)
+                and getattr(decorator.expr, "name", None) == node.name
+            ):
+                return True
+    return False


 class BasicErrorChecker(_BasicChecker):
-    msgs = {'E0100': ('__init__ method is a generator', 'init-is-generator',
-        'Used when the special class method __init__ is turned into a generator by a yield in its body.'
-        ), 'E0101': ('Explicit return in __init__', 'return-in-init',
-        'Used when the special class method __init__ has an explicit return value.'
-        ), 'E0102': ('%s already defined line %s', 'function-redefined',
-        'Used when a function / class / method is redefined.'), 'E0103': (
-        '%r not properly in loop', 'not-in-loop',
-        'Used when break or continue keywords are used outside a loop.'),
-        'E0104': ('Return outside function', 'return-outside-function',
-        'Used when a "return" statement is found outside a function or method.'
-        ), 'E0105': ('Yield outside function', 'yield-outside-function',
-        'Used when a "yield" statement is found outside a function or method.'
-        ), 'E0106': ('Return with argument inside generator',
-        'return-arg-in-generator',
-        'Used when a "return" statement with an argument is found in a generator function or method (e.g. with some "yield" statements).'
-        , {'maxversion': (3, 3)}), 'E0107': (
-        'Use of the non-existent %s operator', 'nonexistent-operator',
-        "Used when you attempt to use the C-style pre-increment or pre-decrement operator -- and ++, which doesn't exist in Python."
-        ), 'E0108': ('Duplicate argument name %s in function definition',
-        'duplicate-argument-name',
-        'Duplicate argument names in function definitions are syntax errors.'
-        ), 'E0110': ('Abstract class %r with abstract methods instantiated',
-        'abstract-class-instantiated',
-        'Used when an abstract class with `abc.ABCMeta` as metaclass has abstract methods and is instantiated.'
-        ), 'W0120': (
-        'Else clause on loop without a break statement, remove the else and de-indent all the code inside it'
-        , 'useless-else-on-loop',
-        'Loops should only have an else clause if they can exit early with a break statement, otherwise the statements under else should be on the same scope as the loop itself.'
-        ), 'E0112': ('More than one starred expression in assignment',
-        'too-many-star-expressions',
-        'Emitted when there are more than one starred expressions (`*x`) in an assignment. This is a SyntaxError.'
-        ), 'E0113': ('Starred assignment target must be in a list or tuple',
-        'invalid-star-assignment-target',
-        'Emitted when a star expression is used as a starred assignment target.'
-        ), 'E0114': ('Can use starred expression only in assignment target',
-        'star-needs-assignment-target',
-        'Emitted when a star expression is not used in an assignment target.'
-        ), 'E0115': ('Name %r is nonlocal and global',
-        'nonlocal-and-global',
-        'Emitted when a name is both nonlocal and global.'), 'E0116': (
-        "'continue' not supported inside 'finally' clause",
-        'continue-in-finally',
-        'Emitted when the `continue` keyword is found inside a finally clause, which is a SyntaxError.'
-        ), 'E0117': ('nonlocal name %s found without binding',
-        'nonlocal-without-binding',
-        'Emitted when a nonlocal variable does not have an attached name somewhere in the parent scopes'
-        ), 'E0118': ('Name %r is used prior to global declaration',
-        'used-prior-global-declaration',
-        'Emitted when a name is used prior a global declaration, which results in an error since Python 3.6.'
-        , {'minversion': (3, 6)})}
-
-    @utils.only_required_for_messages('star-needs-assignment-target')
-    def visit_starred(self, node: nodes.Starred) ->None:
+    msgs = {
+        "E0100": (
+            "__init__ method is a generator",
+            "init-is-generator",
+            "Used when the special class method __init__ is turned into a "
+            "generator by a yield in its body.",
+        ),
+        "E0101": (
+            "Explicit return in __init__",
+            "return-in-init",
+            "Used when the special class method __init__ has an explicit "
+            "return value.",
+        ),
+        "E0102": (
+            "%s already defined line %s",
+            "function-redefined",
+            "Used when a function / class / method is redefined.",
+        ),
+        "E0103": (
+            "%r not properly in loop",
+            "not-in-loop",
+            "Used when break or continue keywords are used outside a loop.",
+        ),
+        "E0104": (
+            "Return outside function",
+            "return-outside-function",
+            'Used when a "return" statement is found outside a function or method.',
+        ),
+        "E0105": (
+            "Yield outside function",
+            "yield-outside-function",
+            'Used when a "yield" statement is found outside a function or method.',
+        ),
+        "E0106": (
+            "Return with argument inside generator",
+            "return-arg-in-generator",
+            'Used when a "return" statement with an argument is found '
+            'in a generator function or method (e.g. with some "yield" statements).',
+            {"maxversion": (3, 3)},
+        ),
+        "E0107": (
+            "Use of the non-existent %s operator",
+            "nonexistent-operator",
+            "Used when you attempt to use the C-style pre-increment or "
+            "pre-decrement operator -- and ++, which doesn't exist in Python.",
+        ),
+        "E0108": (
+            "Duplicate argument name %s in function definition",
+            "duplicate-argument-name",
+            "Duplicate argument names in function definitions are syntax errors.",
+        ),
+        "E0110": (
+            "Abstract class %r with abstract methods instantiated",
+            "abstract-class-instantiated",
+            "Used when an abstract class with `abc.ABCMeta` as metaclass "
+            "has abstract methods and is instantiated.",
+        ),
+        "W0120": (
+            "Else clause on loop without a break statement, remove the else and"
+            " de-indent all the code inside it",
+            "useless-else-on-loop",
+            "Loops should only have an else clause if they can exit early "
+            "with a break statement, otherwise the statements under else "
+            "should be on the same scope as the loop itself.",
+        ),
+        "E0112": (
+            "More than one starred expression in assignment",
+            "too-many-star-expressions",
+            "Emitted when there are more than one starred "
+            "expressions (`*x`) in an assignment. This is a SyntaxError.",
+        ),
+        "E0113": (
+            "Starred assignment target must be in a list or tuple",
+            "invalid-star-assignment-target",
+            "Emitted when a star expression is used as a starred assignment target.",
+        ),
+        "E0114": (
+            "Can use starred expression only in assignment target",
+            "star-needs-assignment-target",
+            "Emitted when a star expression is not used in an assignment target.",
+        ),
+        "E0115": (
+            "Name %r is nonlocal and global",
+            "nonlocal-and-global",
+            "Emitted when a name is both nonlocal and global.",
+        ),
+        "E0116": (
+            "'continue' not supported inside 'finally' clause",
+            "continue-in-finally",
+            "Emitted when the `continue` keyword is found "
+            "inside a finally clause, which is a SyntaxError.",
+        ),
+        "E0117": (
+            "nonlocal name %s found without binding",
+            "nonlocal-without-binding",
+            "Emitted when a nonlocal variable does not have an attached "
+            "name somewhere in the parent scopes",
+        ),
+        "E0118": (
+            "Name %r is used prior to global declaration",
+            "used-prior-global-declaration",
+            "Emitted when a name is used prior a global declaration, "
+            "which results in an error since Python 3.6.",
+            {"minversion": (3, 6)},
+        ),
+    }
+
+    def open(self) -> None:
+        py_version = self.linter.config.py_version
+        self._py38_plus = py_version >= (3, 8)
+
+    @utils.only_required_for_messages("function-redefined")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        self._check_redefinition("class", node)
+
+    def _too_many_starred_for_tuple(self, assign_tuple: nodes.Tuple) -> bool:
+        starred_count = 0
+        for elem in assign_tuple.itered():
+            if isinstance(elem, nodes.Tuple):
+                return self._too_many_starred_for_tuple(elem)
+            if isinstance(elem, nodes.Starred):
+                starred_count += 1
+        return starred_count > 1
+
+    @utils.only_required_for_messages(
+        "too-many-star-expressions", "invalid-star-assignment-target"
+    )
+    def visit_assign(self, node: nodes.Assign) -> None:
+        # Check *a, *b = ...
+        assign_target = node.targets[0]
+        # Check *a = b
+        if isinstance(node.targets[0], nodes.Starred):
+            self.add_message("invalid-star-assignment-target", node=node)
+
+        if not isinstance(assign_target, nodes.Tuple):
+            return
+        if self._too_many_starred_for_tuple(assign_target):
+            self.add_message("too-many-star-expressions", node=node)
+
+    @utils.only_required_for_messages("star-needs-assignment-target")
+    def visit_starred(self, node: nodes.Starred) -> None:
         """Check that a Starred expression is used in an assignment target."""
-        pass
+        if isinstance(node.parent, nodes.Call):
+            # f(*args) is converted to Call(args=[Starred]), so ignore
+            # them for this check.
+            return
+        if isinstance(node.parent, (nodes.List, nodes.Tuple, nodes.Set, nodes.Dict)):
+            # PEP 448 unpacking.
+            return
+
+        stmt = node.statement()
+        if not isinstance(stmt, nodes.Assign):
+            return
+
+        if stmt.value is node or stmt.value.parent_of(node):
+            self.add_message("star-needs-assignment-target", node=node)
+
+    @utils.only_required_for_messages(
+        "init-is-generator",
+        "return-in-init",
+        "function-redefined",
+        "return-arg-in-generator",
+        "duplicate-argument-name",
+        "nonlocal-and-global",
+        "used-prior-global-declaration",
+    )
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        self._check_nonlocal_and_global(node)
+        self._check_name_used_prior_global(node)
+        if not redefined_by_decorator(
+            node
+        ) and not utils.is_registered_in_singledispatch_function(node):
+            self._check_redefinition(node.is_method() and "method" or "function", node)
+        # checks for max returns, branch, return in __init__
+        returns = node.nodes_of_class(
+            nodes.Return, skip_klass=(nodes.FunctionDef, nodes.ClassDef)
+        )
+        if node.is_method() and node.name == "__init__":
+            if node.is_generator():
+                self.add_message("init-is-generator", node=node)
+            else:
+                values = [r.value for r in returns]
+                # Are we returning anything but None from constructors
+                if any(v for v in values if not utils.is_none(v)):
+                    self.add_message("return-in-init", node=node)
+        # Check for duplicate names by clustering args with same name for detailed report
+        arg_clusters = {}
+        arguments: Iterator[Any] = filter(None, [node.args.args, node.args.kwonlyargs])
+        for arg in itertools.chain.from_iterable(arguments):
+            if arg.name in arg_clusters:
+                self.add_message(
+                    "duplicate-argument-name",
+                    node=arg,
+                    args=(arg.name,),
+                    confidence=HIGH,
+                )
+            else:
+                arg_clusters[arg.name] = arg
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_nonlocal_and_global(self, node: nodes.FunctionDef) ->None:
+    def _check_name_used_prior_global(self, node: nodes.FunctionDef) -> None:
+        scope_globals = {
+            name: child
+            for child in node.nodes_of_class(nodes.Global)
+            for name in child.names
+            if child.scope() is node
+        }
+
+        if not scope_globals:
+            return
+
+        for node_name in node.nodes_of_class(nodes.Name):
+            if node_name.scope() is not node:
+                continue
+
+            name = node_name.name
+            corresponding_global = scope_globals.get(name)
+            if not corresponding_global:
+                continue
+
+            global_lineno = corresponding_global.fromlineno
+            if global_lineno and global_lineno > node_name.fromlineno:
+                self.add_message(
+                    "used-prior-global-declaration", node=node_name, args=(name,)
+                )
+
+    def _check_nonlocal_and_global(self, node: nodes.FunctionDef) -> None:
         """Check that a name is both nonlocal and global."""
-        pass

-    @utils.only_required_for_messages('nonexistent-operator')
-    def visit_unaryop(self, node: nodes.UnaryOp) ->None:
+        def same_scope(current: nodes.Global | nodes.Nonlocal) -> bool:
+            return current.scope() is node
+
+        from_iter = itertools.chain.from_iterable
+        nonlocals = set(
+            from_iter(
+                child.names
+                for child in node.nodes_of_class(nodes.Nonlocal)
+                if same_scope(child)
+            )
+        )
+
+        if not nonlocals:
+            return
+
+        global_vars = set(
+            from_iter(
+                child.names
+                for child in node.nodes_of_class(nodes.Global)
+                if same_scope(child)
+            )
+        )
+        for name in nonlocals.intersection(global_vars):
+            self.add_message("nonlocal-and-global", args=(name,), node=node)
+
+    @utils.only_required_for_messages("return-outside-function")
+    def visit_return(self, node: nodes.Return) -> None:
+        if not isinstance(node.frame(), nodes.FunctionDef):
+            self.add_message("return-outside-function", node=node)
+
+    @utils.only_required_for_messages("yield-outside-function")
+    def visit_yield(self, node: nodes.Yield) -> None:
+        self._check_yield_outside_func(node)
+
+    @utils.only_required_for_messages("yield-outside-function")
+    def visit_yieldfrom(self, node: nodes.YieldFrom) -> None:
+        self._check_yield_outside_func(node)
+
+    @utils.only_required_for_messages("not-in-loop", "continue-in-finally")
+    def visit_continue(self, node: nodes.Continue) -> None:
+        self._check_in_loop(node, "continue")
+
+    @utils.only_required_for_messages("not-in-loop")
+    def visit_break(self, node: nodes.Break) -> None:
+        self._check_in_loop(node, "break")
+
+    @utils.only_required_for_messages("useless-else-on-loop")
+    def visit_for(self, node: nodes.For) -> None:
+        self._check_else_on_loop(node)
+
+    @utils.only_required_for_messages("useless-else-on-loop")
+    def visit_while(self, node: nodes.While) -> None:
+        self._check_else_on_loop(node)
+
+    @utils.only_required_for_messages("nonexistent-operator")
+    def visit_unaryop(self, node: nodes.UnaryOp) -> None:
         """Check use of the non-existent ++ and -- operators."""
-        pass
+        if (
+            (node.op in "+-")
+            and isinstance(node.operand, nodes.UnaryOp)
+            and (node.operand.op == node.op)
+            and (node.col_offset + 1 == node.operand.col_offset)
+        ):
+            self.add_message("nonexistent-operator", node=node, args=node.op * 2)
+
+    def _check_nonlocal_without_binding(self, node: nodes.Nonlocal, name: str) -> None:
+        current_scope = node.scope()
+        while current_scope.parent is not None:
+            if not isinstance(current_scope, (nodes.ClassDef, nodes.FunctionDef)):
+                self.add_message("nonlocal-without-binding", args=(name,), node=node)
+                return
+
+            # Search for `name` in the parent scope if:
+            #  `current_scope` is the same scope in which the `nonlocal` name is declared
+            #  or `name` is not in `current_scope.locals`.
+            if current_scope is node.scope() or name not in current_scope.locals:
+                current_scope = current_scope.parent.scope()
+                continue

-    @utils.only_required_for_messages('abstract-class-instantiated')
-    def visit_call(self, node: nodes.Call) ->None:
+            # Okay, found it.
+            return
+
+        if not isinstance(current_scope, nodes.FunctionDef):
+            self.add_message(
+                "nonlocal-without-binding", args=(name,), node=node, confidence=HIGH
+            )
+
+    @utils.only_required_for_messages("nonlocal-without-binding")
+    def visit_nonlocal(self, node: nodes.Nonlocal) -> None:
+        for name in node.names:
+            self._check_nonlocal_without_binding(node, name)
+
+    @utils.only_required_for_messages("abstract-class-instantiated")
+    def visit_call(self, node: nodes.Call) -> None:
         """Check instantiating abstract class with
         abc.ABCMeta as metaclass.
         """
-        pass
+        for inferred in infer_all(node.func):
+            self._check_inferred_class_is_abstract(inferred, node)
+
+    def _check_inferred_class_is_abstract(
+        self, inferred: InferenceResult, node: nodes.Call
+    ) -> None:
+        if not isinstance(inferred, nodes.ClassDef):
+            return
+
+        klass = utils.node_frame_class(node)
+        if klass is inferred:
+            # Don't emit the warning if the class is instantiated
+            # in its own body or if the call is not an instance
+            # creation. If the class is instantiated into its own
+            # body, we're expecting that it knows what it is doing.
+            return
+
+        # __init__ was called
+        abstract_methods = _has_abstract_methods(inferred)
+
+        if not abstract_methods:
+            return

-    def _check_else_on_loop(self, node: (nodes.For | nodes.While)) ->None:
+        metaclass = inferred.metaclass()
+
+        if metaclass is None:
+            # Python 3.4 has `abc.ABC`, which won't be detected
+            # by ClassNode.metaclass()
+            for ancestor in inferred.ancestors():
+                if ancestor.qname() == "abc.ABC":
+                    self.add_message(
+                        "abstract-class-instantiated", args=(inferred.name,), node=node
+                    )
+                    break
+
+            return
+
+        if metaclass.qname() in ABC_METACLASSES:
+            self.add_message(
+                "abstract-class-instantiated", args=(inferred.name,), node=node
+            )
+
+    def _check_yield_outside_func(self, node: nodes.Yield) -> None:
+        if not isinstance(node.frame(), (nodes.FunctionDef, nodes.Lambda)):
+            self.add_message("yield-outside-function", node=node)
+
+    def _check_else_on_loop(self, node: nodes.For | nodes.While) -> None:
         """Check that any loop with an else clause has a break statement."""
-        pass
+        if node.orelse and not _loop_exits_early(node):
+            self.add_message(
+                "useless-else-on-loop",
+                node=node,
+                # This is not optimal, but the line previous
+                # to the first statement in the else clause
+                # will usually be the one that contains the else:.
+                line=node.orelse[0].lineno - 1,
+            )

-    def _check_in_loop(self, node: (nodes.Continue | nodes.Break),
-        node_name: str) ->None:
+    def _check_in_loop(
+        self, node: nodes.Continue | nodes.Break, node_name: str
+    ) -> None:
         """Check that a node is inside a for or while loop."""
-        pass
+        for parent in node.node_ancestors():
+            if isinstance(parent, (nodes.For, nodes.While)):
+                if node not in parent.orelse:
+                    return
+
+            if isinstance(parent, (nodes.ClassDef, nodes.FunctionDef)):
+                break
+            if (
+                isinstance(parent, nodes.Try)
+                and node in parent.finalbody
+                and isinstance(node, nodes.Continue)
+                and not self._py38_plus
+            ):
+                self.add_message("continue-in-finally", node=node)
+
+        self.add_message("not-in-loop", node=node, args=node_name)

-    def _check_redefinition(self, redeftype: str, node: (nodes.Call | nodes
-        .FunctionDef)) ->None:
+    def _check_redefinition(
+        self, redeftype: str, node: nodes.Call | nodes.FunctionDef
+    ) -> None:
         """Check for redefinition of a function / method / class name."""
-        pass
+        parent_frame = node.parent.frame()
+
+        # Ignore function stubs created for type information
+        redefinitions = [
+            i
+            for i in parent_frame.locals[node.name]
+            if not (isinstance(i.parent, nodes.AnnAssign) and i.parent.simple)
+        ]
+        defined_self = next(
+            (local for local in redefinitions if not utils.is_overload_stub(local)),
+            node,
+        )
+        if defined_self is not node and not astroid.are_exclusive(node, defined_self):
+            # Additional checks for methods which are not considered
+            # redefined, since they are already part of the base API.
+            if (
+                isinstance(parent_frame, nodes.ClassDef)
+                and node.name in REDEFINABLE_METHODS
+            ):
+                return
+
+            # Skip typing.overload() functions.
+            if utils.is_overload_stub(node):
+                return
+
+            # Exempt functions redefined on a condition.
+            if isinstance(node.parent, nodes.If):
+                # Exempt "if not <func>" cases
+                if (
+                    isinstance(node.parent.test, nodes.UnaryOp)
+                    and node.parent.test.op == "not"
+                    and isinstance(node.parent.test.operand, nodes.Name)
+                    and node.parent.test.operand.name == node.name
+                ):
+                    return
+
+                # Exempt "if <func> is not None" cases
+                # pylint: disable=too-many-boolean-expressions
+                if (
+                    isinstance(node.parent.test, nodes.Compare)
+                    and isinstance(node.parent.test.left, nodes.Name)
+                    and node.parent.test.left.name == node.name
+                    and node.parent.test.ops[0][0] == "is"
+                    and isinstance(node.parent.test.ops[0][1], nodes.Const)
+                    and node.parent.test.ops[0][1].value is None
+                ):
+                    return
+
+            # Check if we have forward references for this node.
+            try:
+                redefinition_index = redefinitions.index(node)
+            except ValueError:
+                pass
+            else:
+                for redefinition in redefinitions[:redefinition_index]:
+                    inferred = utils.safe_infer(redefinition)
+                    if (
+                        inferred
+                        and isinstance(inferred, astroid.Instance)
+                        and inferred.qname() == TYPING_FORWARD_REF_QNAME
+                    ):
+                        return
+
+            dummy_variables_rgx = self.linter.config.dummy_variables_rgx
+            if dummy_variables_rgx and dummy_variables_rgx.match(node.name):
+                return
+            self.add_message(
+                "function-redefined",
+                node=node,
+                args=(redeftype, defined_self.fromlineno),
+            )
diff --git a/pylint/checkers/base/comparison_checker.py b/pylint/checkers/base/comparison_checker.py
index 5eefa2387..6fb053e2e 100644
--- a/pylint/checkers/base/comparison_checker.py
+++ b/pylint/checkers/base/comparison_checker.py
@@ -1,18 +1,25 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Comparison checker from the basic checker."""
+
 import astroid
 from astroid import nodes
+
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker
 from pylint.interfaces import HIGH
-LITERAL_NODE_TYPES = nodes.Const, nodes.Dict, nodes.List, nodes.Set
-COMPARISON_OPERATORS = frozenset(('==', '!=', '<', '>', '<=', '>='))
-TYPECHECK_COMPARISON_OPERATORS = frozenset(('is', 'is not', '==', '!='))
-TYPE_QNAME = 'builtins.type'
+
+LITERAL_NODE_TYPES = (nodes.Const, nodes.Dict, nodes.List, nodes.Set)
+COMPARISON_OPERATORS = frozenset(("==", "!=", "<", ">", "<=", ">="))
+TYPECHECK_COMPARISON_OPERATORS = frozenset(("is", "is not", "==", "!="))
+TYPE_QNAME = "builtins.type"


-def _is_one_arg_pos_call(call: nodes.NodeNG) ->bool:
+def _is_one_arg_pos_call(call: nodes.NodeNG) -> bool:
     """Is this a call with exactly 1 positional argument ?"""
-    pass
+    return isinstance(call, nodes.Call) and len(call.args) == 1 and not call.keywords


 class ComparisonChecker(_BasicChecker):
@@ -23,41 +30,193 @@ class ComparisonChecker(_BasicChecker):
       '<=', '>' or '>=', and right can be a variable, an attribute, a method or
       a function
     """
-    msgs = {'C0121': ('Comparison %s should be %s', 'singleton-comparison',
-        'Used when an expression is compared to singleton values like True, False or None.'
-        ), 'C0123': ('Use isinstance() rather than type() for a typecheck.',
-        'unidiomatic-typecheck',
-        'The idiomatic way to perform an explicit typecheck in Python is to use isinstance(x, Y) rather than type(x) == Y, type(x) is Y. Though there are unusual situations where these give different results.'
-        , {'old_names': [('W0154', 'old-unidiomatic-typecheck')]}), 'R0123':
-        (
-        "In '%s', use '%s' when comparing constant literals not '%s' ('%s')",
-        'literal-comparison',
-        'Used when comparing an object to a literal, which is usually what you do not want to do, since you can compare to a different literal than what was expected altogether.'
-        ), 'R0124': ('Redundant comparison - %s', 'comparison-with-itself',
-        'Used when something is compared against itself.'), 'R0133': (
-        "Comparison between constants: '%s %s %s' has a constant value",
-        'comparison-of-constants',
-        "When two literals are compared with each other the result is a constant. Using the constant directly is both easier to read and more performant. Initializing 'True' and 'False' this way is not required since Python 2.3."
-        ), 'W0143': (
-        'Comparing against a callable, did you omit the parenthesis?',
-        'comparison-with-callable',
-        'This message is emitted when pylint detects that a comparison with a callable was made, which might suggest that some parenthesis were omitted, resulting in potential unwanted behaviour.'
-        ), 'W0177': ('Comparison %s should be %s', 'nan-comparison',
-        "Used when an expression is compared to NaN values like numpy.NaN and float('nan')."
-        )}
-
-    def _check_singleton_comparison(self, left_value: nodes.NodeNG,
-        right_value: nodes.NodeNG, root_node: nodes.Compare,
-        checking_for_absence: bool=False) ->None:
+
+    msgs = {
+        "C0121": (
+            "Comparison %s should be %s",
+            "singleton-comparison",
+            "Used when an expression is compared to singleton "
+            "values like True, False or None.",
+        ),
+        "C0123": (
+            "Use isinstance() rather than type() for a typecheck.",
+            "unidiomatic-typecheck",
+            "The idiomatic way to perform an explicit typecheck in "
+            "Python is to use isinstance(x, Y) rather than "
+            "type(x) == Y, type(x) is Y. Though there are unusual "
+            "situations where these give different results.",
+            {"old_names": [("W0154", "old-unidiomatic-typecheck")]},
+        ),
+        "R0123": (
+            "In '%s', use '%s' when comparing constant literals not '%s' ('%s')",
+            "literal-comparison",
+            "Used when comparing an object to a literal, which is usually "
+            "what you do not want to do, since you can compare to a different "
+            "literal than what was expected altogether.",
+        ),
+        "R0124": (
+            "Redundant comparison - %s",
+            "comparison-with-itself",
+            "Used when something is compared against itself.",
+        ),
+        "R0133": (
+            "Comparison between constants: '%s %s %s' has a constant value",
+            "comparison-of-constants",
+            "When two literals are compared with each other the result is a constant. "
+            "Using the constant directly is both easier to read and more performant. "
+            "Initializing 'True' and 'False' this way is not required since Python 2.3.",
+        ),
+        "W0143": (
+            "Comparing against a callable, did you omit the parenthesis?",
+            "comparison-with-callable",
+            "This message is emitted when pylint detects that a comparison with a "
+            "callable was made, which might suggest that some parenthesis were omitted, "
+            "resulting in potential unwanted behaviour.",
+        ),
+        "W0177": (
+            "Comparison %s should be %s",
+            "nan-comparison",
+            "Used when an expression is compared to NaN "
+            "values like numpy.NaN and float('nan').",
+        ),
+    }
+
+    def _check_singleton_comparison(
+        self,
+        left_value: nodes.NodeNG,
+        right_value: nodes.NodeNG,
+        root_node: nodes.Compare,
+        checking_for_absence: bool = False,
+    ) -> None:
         """Check if == or != is being used to compare a singleton value."""
-        pass
+        if utils.is_singleton_const(left_value):
+            singleton, other_value = left_value.value, right_value
+        elif utils.is_singleton_const(right_value):
+            singleton, other_value = right_value.value, left_value
+        else:
+            return
+
+        singleton_comparison_example = {False: "'{} is {}'", True: "'{} is not {}'"}
+
+        # True/False singletons have a special-cased message in case the user is
+        # mistakenly using == or != to check for truthiness
+        if singleton in {True, False}:
+            suggestion_template = (
+                "{} if checking for the singleton value {}, or {} if testing for {}"
+            )
+            truthiness_example = {False: "not {}", True: "{}"}
+            truthiness_phrase = {True: "truthiness", False: "falsiness"}
+
+            # Looks for comparisons like x == True or x != False
+            checking_truthiness = singleton is not checking_for_absence
+
+            suggestion = suggestion_template.format(
+                singleton_comparison_example[checking_for_absence].format(
+                    left_value.as_string(), right_value.as_string()
+                ),
+                singleton,
+                (
+                    "'bool({})'"
+                    if not utils.is_test_condition(root_node) and checking_truthiness
+                    else "'{}'"
+                ).format(
+                    truthiness_example[checking_truthiness].format(
+                        other_value.as_string()
+                    )
+                ),
+                truthiness_phrase[checking_truthiness],
+            )
+        else:
+            suggestion = singleton_comparison_example[checking_for_absence].format(
+                left_value.as_string(), right_value.as_string()
+            )
+        self.add_message(
+            "singleton-comparison",
+            node=root_node,
+            args=(f"'{root_node.as_string()}'", suggestion),
+        )
+
+    def _check_nan_comparison(
+        self,
+        left_value: nodes.NodeNG,
+        right_value: nodes.NodeNG,
+        root_node: nodes.Compare,
+        checking_for_absence: bool = False,
+    ) -> None:
+        def _is_float_nan(node: nodes.NodeNG) -> bool:
+            try:
+                if isinstance(node, nodes.Call) and len(node.args) == 1:
+                    if (
+                        node.args[0].value.lower() == "nan"
+                        and node.inferred()[0].pytype() == "builtins.float"
+                    ):
+                        return True
+                return False
+            except AttributeError:
+                return False
+
+        def _is_numpy_nan(node: nodes.NodeNG) -> bool:
+            if isinstance(node, nodes.Attribute) and node.attrname == "NaN":
+                if isinstance(node.expr, nodes.Name):
+                    return node.expr.name in {"numpy", "nmp", "np"}
+            return False
+
+        def _is_nan(node: nodes.NodeNG) -> bool:
+            return _is_float_nan(node) or _is_numpy_nan(node)
+
+        nan_left = _is_nan(left_value)
+        if not nan_left and not _is_nan(right_value):
+            return

-    def _check_literal_comparison(self, literal: nodes.NodeNG, node: nodes.
-        Compare) ->None:
+        absence_text = ""
+        if checking_for_absence:
+            absence_text = "not "
+        if nan_left:
+            suggestion = f"'{absence_text}math.isnan({right_value.as_string()})'"
+        else:
+            suggestion = f"'{absence_text}math.isnan({left_value.as_string()})'"
+        self.add_message(
+            "nan-comparison",
+            node=root_node,
+            args=(f"'{root_node.as_string()}'", suggestion),
+        )
+
+    def _check_literal_comparison(
+        self, literal: nodes.NodeNG, node: nodes.Compare
+    ) -> None:
         """Check if we compare to a literal, which is usually what we do not want to do."""
-        pass
+        is_other_literal = isinstance(literal, (nodes.List, nodes.Dict, nodes.Set))
+        is_const = False
+        if isinstance(literal, nodes.Const):
+            if isinstance(literal.value, bool) or literal.value is None:
+                # Not interested in these values.
+                return
+            is_const = isinstance(literal.value, (bytes, str, int, float))
+
+        if is_const or is_other_literal:
+            incorrect_node_str = node.as_string()
+            if "is not" in incorrect_node_str:
+                equal_or_not_equal = "!="
+                is_or_is_not = "is not"
+            else:
+                equal_or_not_equal = "=="
+                is_or_is_not = "is"
+            fixed_node_str = incorrect_node_str.replace(
+                is_or_is_not, equal_or_not_equal
+            )
+            self.add_message(
+                "literal-comparison",
+                args=(
+                    incorrect_node_str,
+                    equal_or_not_equal,
+                    is_or_is_not,
+                    fixed_node_str,
+                ),
+                node=node,
+                confidence=HIGH,
+            )

-    def _check_logical_tautology(self, node: nodes.Compare) ->None:
+    def _check_logical_tautology(self, node: nodes.Compare) -> None:
         """Check if identifier is compared against itself.

         :param node: Compare node
@@ -66,13 +225,129 @@ class ComparisonChecker(_BasicChecker):
         if val == val:  # [comparison-with-itself]
             pass
         """
-        pass
+        left_operand = node.left
+        right_operand = node.ops[0][1]
+        operator = node.ops[0][0]
+        if isinstance(left_operand, nodes.Const) and isinstance(
+            right_operand, nodes.Const
+        ):
+            left_operand = left_operand.value
+            right_operand = right_operand.value
+        elif isinstance(left_operand, nodes.Name) and isinstance(
+            right_operand, nodes.Name
+        ):
+            left_operand = left_operand.name
+            right_operand = right_operand.name

-    def _check_constants_comparison(self, node: nodes.Compare) ->None:
+        if left_operand == right_operand:
+            suggestion = f"{left_operand} {operator} {right_operand}"
+            self.add_message("comparison-with-itself", node=node, args=(suggestion,))
+
+    def _check_constants_comparison(self, node: nodes.Compare) -> None:
         """When two constants are being compared it is always a logical tautology."""
-        pass
+        left_operand = node.left
+        if not isinstance(left_operand, nodes.Const):
+            return
+
+        right_operand = node.ops[0][1]
+        if not isinstance(right_operand, nodes.Const):
+            return
+
+        operator = node.ops[0][0]
+        self.add_message(
+            "comparison-of-constants",
+            node=node,
+            args=(left_operand.value, operator, right_operand.value),
+            confidence=HIGH,
+        )
+
+    def _check_callable_comparison(self, node: nodes.Compare) -> None:
+        operator = node.ops[0][0]
+        if operator not in COMPARISON_OPERATORS:
+            return
+
+        bare_callables = (nodes.FunctionDef, astroid.BoundMethod)
+        left_operand, right_operand = node.left, node.ops[0][1]
+        # this message should be emitted only when there is comparison of bare callable
+        # with non bare callable.
+        number_of_bare_callables = 0
+        for operand in left_operand, right_operand:
+            inferred = utils.safe_infer(operand)
+            # Ignore callables that raise, as well as typing constants
+            # implemented as functions (that raise via their decorator)
+            if (
+                isinstance(inferred, bare_callables)
+                and "typing._SpecialForm" not in inferred.decoratornames()
+                and not any(isinstance(x, nodes.Raise) for x in inferred.body)
+            ):
+                number_of_bare_callables += 1
+        if number_of_bare_callables == 1:
+            self.add_message("comparison-with-callable", node=node)

-    def _check_type_x_is_y(self, node: nodes.Compare, left: nodes.NodeNG,
-        operator: str, right: nodes.NodeNG) ->None:
+    @utils.only_required_for_messages(
+        "singleton-comparison",
+        "unidiomatic-typecheck",
+        "literal-comparison",
+        "comparison-with-itself",
+        "comparison-of-constants",
+        "comparison-with-callable",
+        "nan-comparison",
+    )
+    def visit_compare(self, node: nodes.Compare) -> None:
+        self._check_callable_comparison(node)
+        self._check_logical_tautology(node)
+        self._check_unidiomatic_typecheck(node)
+        self._check_constants_comparison(node)
+        # NOTE: this checker only works with binary comparisons like 'x == 42'
+        # but not 'x == y == 42'
+        if len(node.ops) != 1:
+            return
+
+        left = node.left
+        operator, right = node.ops[0]
+
+        if operator in {"==", "!="}:
+            self._check_singleton_comparison(
+                left, right, node, checking_for_absence=operator == "!="
+            )
+
+        if operator in {"==", "!=", "is", "is not"}:
+            self._check_nan_comparison(
+                left, right, node, checking_for_absence=operator in {"!=", "is not"}
+            )
+        if operator in {"is", "is not"}:
+            self._check_literal_comparison(right, node)
+
+    def _check_unidiomatic_typecheck(self, node: nodes.Compare) -> None:
+        operator, right = node.ops[0]
+        if operator in TYPECHECK_COMPARISON_OPERATORS:
+            left = node.left
+            if _is_one_arg_pos_call(left):
+                self._check_type_x_is_y(node, left, operator, right)
+
+    def _check_type_x_is_y(
+        self,
+        node: nodes.Compare,
+        left: nodes.NodeNG,
+        operator: str,
+        right: nodes.NodeNG,
+    ) -> None:
         """Check for expressions like type(x) == Y."""
-        pass
+        left_func = utils.safe_infer(left.func)
+        if not (
+            isinstance(left_func, nodes.ClassDef) and left_func.qname() == TYPE_QNAME
+        ):
+            return
+
+        if operator in {"is", "is not"} and _is_one_arg_pos_call(right):
+            right_func = utils.safe_infer(right.func)
+            if (
+                isinstance(right_func, nodes.ClassDef)
+                and right_func.qname() == TYPE_QNAME
+            ):
+                # type(x) == type(a)
+                right_arg = utils.safe_infer(right.args[0])
+                if not isinstance(right_arg, LITERAL_NODE_TYPES):
+                    # not e.g. type(x) == type([])
+                    return
+        self.add_message("unidiomatic-typecheck", node=node)
diff --git a/pylint/checkers/base/docstring_checker.py b/pylint/checkers/base/docstring_checker.py
index c691ef0c0..aecfd9b06 100644
--- a/pylint/checkers/base/docstring_checker.py
+++ b/pylint/checkers/base/docstring_checker.py
@@ -1,42 +1,208 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Docstring checker from the basic checker."""
+
 from __future__ import annotations
+
 import re
 from typing import Literal
+
 import astroid
 from astroid import nodes
+
 from pylint import interfaces
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker
-from pylint.checkers.utils import is_overload_stub, is_property_deleter, is_property_setter
-NO_REQUIRED_DOC_RGX = re.compile('^_')
+from pylint.checkers.utils import (
+    is_overload_stub,
+    is_property_deleter,
+    is_property_setter,
+)
+
+# do not require a doc string on private/system methods
+NO_REQUIRED_DOC_RGX = re.compile("^_")
+
+
+def _infer_dunder_doc_attribute(
+    node: nodes.Module | nodes.ClassDef | nodes.FunctionDef,
+) -> str | None:
+    # Try to see if we have a `__doc__` attribute.
+    try:
+        docstring = node["__doc__"]
+    except KeyError:
+        return None
+
+    docstring = utils.safe_infer(docstring)
+    if not docstring:
+        return None
+    if not isinstance(docstring, nodes.Const):
+        return None
+    return str(docstring.value)


 class DocStringChecker(_BasicChecker):
-    msgs = {'C0112': ('Empty %s docstring', 'empty-docstring',
-        'Used when a module, function, class or method has an empty docstring (it would be too easy ;).'
-        , {'old_names': [('W0132', 'old-empty-docstring')]}), 'C0114': (
-        'Missing module docstring', 'missing-module-docstring',
-        'Used when a module has no docstring. Empty modules do not require a docstring.'
-        , {'old_names': [('C0111', 'missing-docstring')]}), 'C0115': (
-        'Missing class docstring', 'missing-class-docstring',
-        'Used when a class has no docstring. Even an empty class must have a docstring.'
-        , {'old_names': [('C0111', 'missing-docstring')]}), 'C0116': (
-        'Missing function or method docstring',
-        'missing-function-docstring',
-        'Used when a function or method has no docstring. Some special methods like __init__ do not require a docstring.'
-        , {'old_names': [('C0111', 'missing-docstring')]})}
-    options = ('no-docstring-rgx', {'default': NO_REQUIRED_DOC_RGX, 'type':
-        'regexp', 'metavar': '<regexp>', 'help':
-        'Regular expression which should only match function or class names that do not require a docstring.'
-        }), ('docstring-min-length', {'default': -1, 'type': 'int',
-        'metavar': '<int>', 'help':
-        'Minimum line length for functions/classes that require docstrings, shorter ones are exempt.'
-        })
+    msgs = {
+        "C0112": (
+            "Empty %s docstring",
+            "empty-docstring",
+            "Used when a module, function, class or method has an empty "
+            "docstring (it would be too easy ;).",
+            {"old_names": [("W0132", "old-empty-docstring")]},
+        ),
+        "C0114": (
+            "Missing module docstring",
+            "missing-module-docstring",
+            "Used when a module has no docstring. "
+            "Empty modules do not require a docstring.",
+            {"old_names": [("C0111", "missing-docstring")]},
+        ),
+        "C0115": (
+            "Missing class docstring",
+            "missing-class-docstring",
+            "Used when a class has no docstring. "
+            "Even an empty class must have a docstring.",
+            {"old_names": [("C0111", "missing-docstring")]},
+        ),
+        "C0116": (
+            "Missing function or method docstring",
+            "missing-function-docstring",
+            "Used when a function or method has no docstring. "
+            "Some special methods like __init__ do not require a "
+            "docstring.",
+            {"old_names": [("C0111", "missing-docstring")]},
+        ),
+    }
+    options = (
+        (
+            "no-docstring-rgx",
+            {
+                "default": NO_REQUIRED_DOC_RGX,
+                "type": "regexp",
+                "metavar": "<regexp>",
+                "help": "Regular expression which should only match "
+                "function or class names that do not require a "
+                "docstring.",
+            },
+        ),
+        (
+            "docstring-min-length",
+            {
+                "default": -1,
+                "type": "int",
+                "metavar": "<int>",
+                "help": (
+                    "Minimum line length for functions/classes that"
+                    " require docstrings, shorter ones are exempt."
+                ),
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        self.linter.stats.reset_undocumented()
+
+    @utils.only_required_for_messages("missing-module-docstring", "empty-docstring")
+    def visit_module(self, node: nodes.Module) -> None:
+        self._check_docstring("module", node)
+
+    @utils.only_required_for_messages("missing-class-docstring", "empty-docstring")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        if self.linter.config.no_docstring_rgx.match(node.name) is None:
+            self._check_docstring("class", node)
+
+    @utils.only_required_for_messages("missing-function-docstring", "empty-docstring")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        if self.linter.config.no_docstring_rgx.match(node.name) is None:
+            ftype = "method" if node.is_method() else "function"
+            if (
+                is_property_setter(node)
+                or is_property_deleter(node)
+                or is_overload_stub(node)
+            ):
+                return
+
+            if isinstance(node.parent.frame(), nodes.ClassDef):
+                overridden = False
+                confidence = (
+                    interfaces.INFERENCE
+                    if utils.has_known_bases(node.parent.frame())
+                    else interfaces.INFERENCE_FAILURE
+                )
+                # check if node is from a method overridden by its ancestor
+                for ancestor in node.parent.frame().ancestors():
+                    if ancestor.qname() == "builtins.object":
+                        continue
+                    if node.name in ancestor and isinstance(
+                        ancestor[node.name], nodes.FunctionDef
+                    ):
+                        overridden = True
+                        break
+                self._check_docstring(
+                    ftype, node, report_missing=not overridden, confidence=confidence  # type: ignore[arg-type]
+                )
+            elif isinstance(node.parent.frame(), nodes.Module):
+                self._check_docstring(ftype, node)  # type: ignore[arg-type]
+            else:
+                return
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_docstring(self, node_type: Literal['class', 'function',
-        'method', 'module'], node: (nodes.Module | nodes.ClassDef | nodes.
-        FunctionDef), report_missing: bool=True, confidence: interfaces.
-        Confidence=interfaces.HIGH) ->None:
+    def _check_docstring(
+        self,
+        node_type: Literal["class", "function", "method", "module"],
+        node: nodes.Module | nodes.ClassDef | nodes.FunctionDef,
+        report_missing: bool = True,
+        confidence: interfaces.Confidence = interfaces.HIGH,
+    ) -> None:
         """Check if the node has a non-empty docstring."""
-        pass
+        docstring = node.doc_node.value if node.doc_node else None
+        if docstring is None:
+            docstring = _infer_dunder_doc_attribute(node)
+
+        if docstring is None:
+            if not report_missing:
+                return
+            lines = utils.get_node_last_lineno(node) - node.lineno
+
+            if node_type == "module" and not lines:
+                # If the module does not have a body, there's no reason
+                # to require a docstring.
+                return
+            max_lines = self.linter.config.docstring_min_length
+
+            if node_type != "module" and max_lines > -1 and lines < max_lines:
+                return
+            if node_type == "class":
+                self.linter.stats.undocumented["klass"] += 1
+            else:
+                self.linter.stats.undocumented[node_type] += 1
+            if (
+                node.body
+                and isinstance(node.body[0], nodes.Expr)
+                and isinstance(node.body[0].value, nodes.Call)
+            ):
+                # Most likely a string with a format call. Let's see.
+                func = utils.safe_infer(node.body[0].value.func)
+                if isinstance(func, astroid.BoundMethod) and isinstance(
+                    func.bound, astroid.Instance
+                ):
+                    # Strings.
+                    if func.bound.name in {"str", "unicode", "bytes"}:
+                        return
+            if node_type == "module":
+                message = "missing-module-docstring"
+            elif node_type == "class":
+                message = "missing-class-docstring"
+            else:
+                message = "missing-function-docstring"
+            self.add_message(message, node=node, confidence=confidence)
+        elif not docstring.strip():
+            if node_type == "class":
+                self.linter.stats.undocumented["klass"] += 1
+            else:
+                self.linter.stats.undocumented[node_type] += 1
+            self.add_message(
+                "empty-docstring", node=node, args=(node_type,), confidence=confidence
+            )
diff --git a/pylint/checkers/base/function_checker.py b/pylint/checkers/base/function_checker.py
index 8a0108704..f7d92a464 100644
--- a/pylint/checkers/base/function_checker.py
+++ b/pylint/checkers/base/function_checker.py
@@ -1,20 +1,42 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Function checker for Python code."""
+
 from __future__ import annotations
+
 from itertools import chain
+
 from astroid import nodes
+
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker


 class FunctionChecker(_BasicChecker):
     """Check if a function definition handles possible side effects."""
-    msgs = {'W0135': ('The context used in function %r will not be exited.',
-        'contextmanager-generator-missing-cleanup',
-        'Used when a contextmanager is used inside a generator function and the cleanup is not handled.'
-        )}

-    def _check_contextmanager_generator_missing_cleanup(self, node: nodes.
-        FunctionDef) ->None:
+    msgs = {
+        "W0135": (
+            "The context used in function %r will not be exited.",
+            "contextmanager-generator-missing-cleanup",
+            "Used when a contextmanager is used inside a generator function"
+            " and the cleanup is not handled.",
+        )
+    }
+
+    @utils.only_required_for_messages("contextmanager-generator-missing-cleanup")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        self._check_contextmanager_generator_missing_cleanup(node)
+
+    @utils.only_required_for_messages("contextmanager-generator-missing-cleanup")
+    def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> None:
+        self._check_contextmanager_generator_missing_cleanup(node)
+
+    def _check_contextmanager_generator_missing_cleanup(
+        self, node: nodes.FunctionDef
+    ) -> None:
         """Check a FunctionDef to find if it is a generator
         that uses a contextmanager internally.

@@ -23,11 +45,41 @@ class FunctionChecker(_BasicChecker):
         :param node: FunctionDef node to check
         :type node: nodes.FunctionDef
         """
-        pass
+        # if function does not use a Yield statement, it cant be a generator
+        with_nodes = list(node.nodes_of_class(nodes.With))
+        if not with_nodes:
+            return
+        # check for Yield inside the With statement
+        yield_nodes = list(
+            chain.from_iterable(
+                with_node.nodes_of_class(nodes.Yield) for with_node in with_nodes
+            )
+        )
+        if not yield_nodes:
+            return
+
+        # infer the call that yields a value, and check if it is a contextmanager
+        for with_node in with_nodes:
+            for call, held in with_node.items:
+                if held is None:
+                    # if we discard the value, then we can skip checking it
+                    continue
+
+                # safe infer is a generator
+                inferred_node = getattr(utils.safe_infer(call), "parent", None)
+                if not isinstance(inferred_node, nodes.FunctionDef):
+                    continue
+                if self._node_fails_contextmanager_cleanup(inferred_node, yield_nodes):
+                    self.add_message(
+                        "contextmanager-generator-missing-cleanup",
+                        node=with_node,
+                        args=(node.name,),
+                    )

     @staticmethod
-    def _node_fails_contextmanager_cleanup(node: nodes.FunctionDef,
-        yield_nodes: list[nodes.Yield]) ->bool:
+    def _node_fails_contextmanager_cleanup(
+        node: nodes.FunctionDef, yield_nodes: list[nodes.Yield]
+    ) -> bool:
         """Check if a node fails contextmanager cleanup.

         Current checks for a contextmanager:
@@ -42,4 +94,56 @@ class FunctionChecker(_BasicChecker):
         :type yield_nodes: list[nodes.Yield]
         :rtype: bool
         """
-        pass
+
+        def check_handles_generator_exceptions(try_node: nodes.Try) -> bool:
+            # needs to handle either GeneratorExit, Exception, or bare except
+            for handler in try_node.handlers:
+                if handler.type is None:
+                    # handles all exceptions (bare except)
+                    return True
+                inferred = utils.safe_infer(handler.type)
+                if inferred and inferred.qname() in {
+                    "builtins.GeneratorExit",
+                    "builtins.Exception",
+                }:
+                    return True
+            return False
+
+        # if context manager yields a non-constant value, then continue checking
+        if any(
+            yield_node.value is None or isinstance(yield_node.value, nodes.Const)
+            for yield_node in yield_nodes
+        ):
+            return False
+
+        # Check if yield expression is last statement
+        yield_nodes = list(node.nodes_of_class(nodes.Yield))
+        if len(yield_nodes) == 1:
+            n = yield_nodes[0].parent
+            while n is not node:
+                if n.next_sibling() is not None:
+                    break
+                n = n.parent
+            else:
+                # No next statement found
+                return False
+
+        # if function body has multiple Try, filter down to the ones that have a yield node
+        try_with_yield_nodes = [
+            try_node
+            for try_node in node.nodes_of_class(nodes.Try)
+            if any(try_node.nodes_of_class(nodes.Yield))
+        ]
+        if not try_with_yield_nodes:
+            # no try blocks at all, so checks after this line do not apply
+            return True
+        # if the contextmanager has a finally block, then it is fine
+        if all(try_node.finalbody for try_node in try_with_yield_nodes):
+            return False
+        # if the contextmanager catches GeneratorExit, then it is fine
+        if all(
+            check_handles_generator_exceptions(try_node)
+            for try_node in try_with_yield_nodes
+        ):
+            return False
+        return True
diff --git a/pylint/checkers/base/name_checker/checker.py b/pylint/checkers/base/name_checker/checker.py
index 1ef68f6ca..3514829fb 100644
--- a/pylint/checkers/base/name_checker/checker.py
+++ b/pylint/checkers/base/name_checker/checker.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Basic checker for Python code."""
+
 from __future__ import annotations
+
 import argparse
 import collections
 import itertools
@@ -9,23 +15,44 @@ from collections.abc import Iterable
 from enum import Enum, auto
 from re import Pattern
 from typing import TYPE_CHECKING, Tuple
+
 import astroid
 from astroid import nodes
+
 from pylint import constants, interfaces
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker
-from pylint.checkers.base.name_checker.naming_style import KNOWN_NAME_TYPES, KNOWN_NAME_TYPES_WITH_STYLE, NAMING_STYLES, _create_naming_options
+from pylint.checkers.base.name_checker.naming_style import (
+    KNOWN_NAME_TYPES,
+    KNOWN_NAME_TYPES_WITH_STYLE,
+    NAMING_STYLES,
+    _create_naming_options,
+)
 from pylint.checkers.utils import is_property_deleter, is_property_setter
 from pylint.typing import Options
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter
+
 _BadNamesTuple = Tuple[nodes.NodeNG, str, str, interfaces.Confidence]
-DEFAULT_PATTERNS = {'typevar': re.compile(
-    '^_{0,2}(?!T[A-Z])(?:[A-Z]+|(?:[A-Z]+[a-z]+)+T?(?<!Type))(?:_co(?:ntra)?)?$'
-    ), 'typealias': re.compile(
-    '^_{0,2}(?!T[A-Z]|Type)[A-Z]+[a-z0-9]+(?:[A-Z][a-z0-9]+)*$')}
-BUILTIN_PROPERTY = 'builtins.property'
-TYPE_VAR_QNAME = frozenset(('typing.TypeVar', 'typing_extensions.TypeVar'))
+
+# Default patterns for name types that do not have styles
+DEFAULT_PATTERNS = {
+    "typevar": re.compile(
+        r"^_{0,2}(?!T[A-Z])(?:[A-Z]+|(?:[A-Z]+[a-z]+)+T?(?<!Type))(?:_co(?:ntra)?)?$"
+    ),
+    "typealias": re.compile(
+        r"^_{0,2}(?!T[A-Z]|Type)[A-Z]+[a-z0-9]+(?:[A-Z][a-z0-9]+)*$"
+    ),
+}
+
+BUILTIN_PROPERTY = "builtins.property"
+TYPE_VAR_QNAME = frozenset(
+    (
+        "typing.TypeVar",
+        "typing_extensions.TypeVar",
+    )
+)


 class TypeVarVariance(Enum):
@@ -36,26 +63,47 @@ class TypeVarVariance(Enum):
     inferred = auto()


-def _get_properties(config: argparse.Namespace) ->tuple[set[str], set[str]]:
+def _get_properties(config: argparse.Namespace) -> tuple[set[str], set[str]]:
     """Returns a tuple of property classes and names.

     Property classes are fully qualified, such as 'abc.abstractproperty' and
     property names are the actual names, such as 'abstract_property'.
     """
-    pass
+    property_classes = {BUILTIN_PROPERTY}
+    property_names: set[str] = set()  # Not returning 'property', it has its own check.
+    if config is not None:
+        property_classes.update(config.property_classes)
+        property_names.update(
+            prop.rsplit(".", 1)[-1] for prop in config.property_classes
+        )
+    return property_classes, property_names


-def _redefines_import(node: nodes.AssignName) ->bool:
+def _redefines_import(node: nodes.AssignName) -> bool:
     """Detect that the given node (AssignName) is inside an
     exception handler and redefines an import from the tryexcept body.

     Returns True if the node redefines an import, False otherwise.
     """
-    pass
+    current = node
+    while current and not isinstance(current.parent, nodes.ExceptHandler):
+        current = current.parent
+    if not current or not utils.error_of_type(current.parent, ImportError):
+        return False
+    try_block = current.parent.parent
+    for import_node in try_block.nodes_of_class((nodes.ImportFrom, nodes.Import)):
+        for name, alias in import_node.names:
+            if alias:
+                if alias == node.name:
+                    return True
+            elif name == node.name:
+                return True
+    return False


-def _determine_function_name_type(node: nodes.FunctionDef, config: argparse
-    .Namespace) ->str:
+def _determine_function_name_type(
+    node: nodes.FunctionDef, config: argparse.Namespace
+) -> str:
     """Determine the name type whose regex the function's name should match.

     :param node: A function node.
@@ -63,54 +111,169 @@ def _determine_function_name_type(node: nodes.FunctionDef, config: argparse

     :returns: One of ('function', 'method', 'attr')
     """
-    pass
+    property_classes, property_names = _get_properties(config)
+    if not node.is_method():
+        return "function"
+
+    if is_property_setter(node) or is_property_deleter(node):
+        # If the function is decorated using the prop_method.{setter,getter}
+        # form, treat it like an attribute as well.
+        return "attr"
+
+    decorators = node.decorators.nodes if node.decorators else []
+    for decorator in decorators:
+        # If the function is a property (decorated with @property
+        # or @abc.abstractproperty), the name type is 'attr'.
+        if isinstance(decorator, nodes.Name) or (
+            isinstance(decorator, nodes.Attribute)
+            and decorator.attrname in property_names
+        ):
+            inferred = utils.safe_infer(decorator)
+            if (
+                inferred
+                and hasattr(inferred, "qname")
+                and inferred.qname() in property_classes
+            ):
+                return "attr"
+    return "method"


-EXEMPT_NAME_CATEGORIES = {'exempt', 'ignore'}
+# Name categories that are always consistent with all naming conventions.
+EXEMPT_NAME_CATEGORIES = {"exempt", "ignore"}
+
+
+def _is_multi_naming_match(
+    match: re.Match[str] | None, node_type: str, confidence: interfaces.Confidence
+) -> bool:
+    return (
+        match is not None
+        and match.lastgroup is not None
+        and match.lastgroup not in EXEMPT_NAME_CATEGORIES
+        and (node_type != "method" or confidence != interfaces.INFERENCE_FAILURE)
+    )


 class NameChecker(_BasicChecker):
-    msgs = {'C0103': ('%s name "%s" doesn\'t conform to %s', 'invalid-name',
-        "Used when the name doesn't conform to naming rules associated to its type (constant, variable, class...)."
-        ), 'C0104': ('Disallowed name "%s"', 'disallowed-name',
-        'Used when the name matches bad-names or bad-names-rgxs- (unauthorized names).'
-        , {'old_names': [('C0102', 'blacklisted-name')]}), 'C0105': (
-        'Type variable name does not reflect variance%s',
-        'typevar-name-incorrect-variance',
-        "Emitted when a TypeVar name doesn't reflect its type variance. According to PEP8, it is recommended to add suffixes '_co' and '_contra' to the variables used to declare covariant or contravariant behaviour respectively. Invariant (default) variables do not require a suffix. The message is also emitted when invariant variables do have a suffix."
-        ), 'C0131': ('TypeVar cannot be both covariant and contravariant',
-        'typevar-double-variance',
-        'Emitted when both the "covariant" and "contravariant" keyword arguments are set to "True" in a TypeVar.'
-        ), 'C0132': (
-        'TypeVar name "%s" does not match assigned variable name "%s"',
-        'typevar-name-mismatch',
-        'Emitted when a TypeVar is assigned to a variable that does not match its name argument.'
-        )}
-    _options: Options = (('good-names', {'default': ('i', 'j', 'k', 'ex',
-        'Run', '_'), 'type': 'csv', 'metavar': '<names>', 'help':
-        'Good variable names which should always be accepted, separated by a comma.'
-        }), ('good-names-rgxs', {'default': '', 'type': 'regexp_csv',
-        'metavar': '<names>', 'help':
-        'Good variable names regexes, separated by a comma. If names match any regex, they will always be accepted'
-        }), ('bad-names', {'default': ('foo', 'bar', 'baz', 'toto', 'tutu',
-        'tata'), 'type': 'csv', 'metavar': '<names>', 'help':
-        'Bad variable names which should always be refused, separated by a comma.'
-        }), ('bad-names-rgxs', {'default': '', 'type': 'regexp_csv',
-        'metavar': '<names>', 'help':
-        'Bad variable names regexes, separated by a comma. If names match any regex, they will always be refused'
-        }), ('name-group', {'default': (), 'type': 'csv', 'metavar':
-        '<name1:name2>', 'help':
-        "Colon-delimited sets of names that determine each other's naming style when the name regexes allow several styles."
-        }), ('include-naming-hint', {'default': False, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Include a hint for the correct naming format with invalid-name.'}),
-        ('property-classes', {'default': ('abc.abstractproperty',), 'type':
-        'csv', 'metavar': '<decorator names>', 'help':
-        'List of decorators that produce properties, such as abc.abstractproperty. Add to this list to register other decorators that produce valid properties. These decorators are taken in consideration only for invalid-name.'
-        }))
+    msgs = {
+        "C0103": (
+            '%s name "%s" doesn\'t conform to %s',
+            "invalid-name",
+            "Used when the name doesn't conform to naming rules "
+            "associated to its type (constant, variable, class...).",
+        ),
+        "C0104": (
+            'Disallowed name "%s"',
+            "disallowed-name",
+            "Used when the name matches bad-names or bad-names-rgxs- (unauthorized names).",
+            {
+                "old_names": [
+                    ("C0102", "blacklisted-name"),
+                ]
+            },
+        ),
+        "C0105": (
+            "Type variable name does not reflect variance%s",
+            "typevar-name-incorrect-variance",
+            "Emitted when a TypeVar name doesn't reflect its type variance. "
+            "According to PEP8, it is recommended to add suffixes '_co' and "
+            "'_contra' to the variables used to declare covariant or "
+            "contravariant behaviour respectively. Invariant (default) variables "
+            "do not require a suffix. The message is also emitted when invariant "
+            "variables do have a suffix.",
+        ),
+        "C0131": (
+            "TypeVar cannot be both covariant and contravariant",
+            "typevar-double-variance",
+            'Emitted when both the "covariant" and "contravariant" '
+            'keyword arguments are set to "True" in a TypeVar.',
+        ),
+        "C0132": (
+            'TypeVar name "%s" does not match assigned variable name "%s"',
+            "typevar-name-mismatch",
+            "Emitted when a TypeVar is assigned to a variable "
+            "that does not match its name argument.",
+        ),
+    }
+
+    _options: Options = (
+        (
+            "good-names",
+            {
+                "default": ("i", "j", "k", "ex", "Run", "_"),
+                "type": "csv",
+                "metavar": "<names>",
+                "help": "Good variable names which should always be accepted,"
+                " separated by a comma.",
+            },
+        ),
+        (
+            "good-names-rgxs",
+            {
+                "default": "",
+                "type": "regexp_csv",
+                "metavar": "<names>",
+                "help": "Good variable names regexes, separated by a comma. If names match any regex,"
+                " they will always be accepted",
+            },
+        ),
+        (
+            "bad-names",
+            {
+                "default": ("foo", "bar", "baz", "toto", "tutu", "tata"),
+                "type": "csv",
+                "metavar": "<names>",
+                "help": "Bad variable names which should always be refused, "
+                "separated by a comma.",
+            },
+        ),
+        (
+            "bad-names-rgxs",
+            {
+                "default": "",
+                "type": "regexp_csv",
+                "metavar": "<names>",
+                "help": "Bad variable names regexes, separated by a comma. If names match any regex,"
+                " they will always be refused",
+            },
+        ),
+        (
+            "name-group",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<name1:name2>",
+                "help": (
+                    "Colon-delimited sets of names that determine each"
+                    " other's naming style when the name regexes"
+                    " allow several styles."
+                ),
+            },
+        ),
+        (
+            "include-naming-hint",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Include a hint for the correct naming format with invalid-name.",
+            },
+        ),
+        (
+            "property-classes",
+            {
+                "default": ("abc.abstractproperty",),
+                "type": "csv",
+                "metavar": "<decorator names>",
+                "help": "List of decorators that produce properties, such as "
+                "abc.abstractproperty. Add to this list to register "
+                "other decorators that produce valid properties. "
+                "These decorators are taken in consideration only for invalid-name.",
+            },
+        ),
+    )
     options: Options = _options + _create_naming_options()

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._name_group: dict[str, str] = {}
         self._bad_names: dict[str, dict[str, list[_BadNamesTuple]]] = {}
@@ -118,35 +281,436 @@ class NameChecker(_BasicChecker):
         self._name_hints: dict[str, str] = {}
         self._good_names_rgxs_compiled: list[re.Pattern[str]] = []
         self._bad_names_rgxs_compiled: list[re.Pattern[str]] = []
+
+    def open(self) -> None:
+        self.linter.stats.reset_bad_names()
+        for group in self.linter.config.name_group:
+            for name_type in group.split(":"):
+                self._name_group[name_type] = f"group_{group}"
+
+        regexps, hints = self._create_naming_rules()
+        self._name_regexps = regexps
+        self._name_hints = hints
+        self._good_names_rgxs_compiled = [
+            re.compile(rgxp) for rgxp in self.linter.config.good_names_rgxs
+        ]
+        self._bad_names_rgxs_compiled = [
+            re.compile(rgxp) for rgxp in self.linter.config.bad_names_rgxs
+        ]
+
+    def _create_naming_rules(self) -> tuple[dict[str, Pattern[str]], dict[str, str]]:
+        regexps: dict[str, Pattern[str]] = {}
+        hints: dict[str, str] = {}
+
+        for name_type in KNOWN_NAME_TYPES:
+            if name_type in KNOWN_NAME_TYPES_WITH_STYLE:
+                naming_style_name = getattr(
+                    self.linter.config, f"{name_type}_naming_style"
+                )
+                regexps[name_type] = NAMING_STYLES[naming_style_name].get_regex(
+                    name_type
+                )
+            else:
+                naming_style_name = "predefined"
+                regexps[name_type] = DEFAULT_PATTERNS[name_type]
+
+            custom_regex_setting_name = f"{name_type}_rgx"
+            custom_regex = getattr(self.linter.config, custom_regex_setting_name, None)
+            if custom_regex is not None:
+                regexps[name_type] = custom_regex
+
+            if custom_regex is not None:
+                hints[name_type] = f"{custom_regex.pattern!r} pattern"
+            else:
+                hints[name_type] = f"{naming_style_name} naming style"
+
+        return regexps, hints
+
+    @utils.only_required_for_messages("disallowed-name", "invalid-name")
+    def visit_module(self, node: nodes.Module) -> None:
+        self._check_name("module", node.name.split(".")[-1], node)
+        self._bad_names = {}
+
+    def leave_module(self, _: nodes.Module) -> None:
+        for all_groups in self._bad_names.values():
+            if len(all_groups) < 2:
+                continue
+            groups: collections.defaultdict[int, list[list[_BadNamesTuple]]] = (
+                collections.defaultdict(list)
+            )
+            min_warnings = sys.maxsize
+            prevalent_group, _ = max(all_groups.items(), key=lambda item: len(item[1]))
+            for group in all_groups.values():
+                groups[len(group)].append(group)
+                min_warnings = min(len(group), min_warnings)
+            if len(groups[min_warnings]) > 1:
+                by_line = sorted(
+                    groups[min_warnings],
+                    key=lambda group: min(
+                        warning[0].lineno
+                        for warning in group
+                        if warning[0].lineno is not None
+                    ),
+                )
+                warnings: Iterable[_BadNamesTuple] = itertools.chain(*by_line[1:])
+            else:
+                warnings = groups[min_warnings][0]
+            for args in warnings:
+                self._raise_name_warning(prevalent_group, *args)
+
+    @utils.only_required_for_messages("disallowed-name", "invalid-name")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        self._check_name("class", node.name, node)
+        for attr, anodes in node.instance_attrs.items():
+            if not any(node.instance_attr_ancestors(attr)):
+                self._check_name("attr", attr, anodes[0])
+
+    @utils.only_required_for_messages("disallowed-name", "invalid-name")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        # Do not emit any warnings if the method is just an implementation
+        # of a base class method.
+        confidence = interfaces.HIGH
+        if node.is_method():
+            if utils.overrides_a_method(node.parent.frame(), node.name):
+                return
+            confidence = (
+                interfaces.INFERENCE
+                if utils.has_known_bases(node.parent.frame())
+                else interfaces.INFERENCE_FAILURE
+            )
+
+        self._check_name(
+            _determine_function_name_type(node, config=self.linter.config),
+            node.name,
+            node,
+            confidence,
+        )
+        # Check argument names
+        args = node.args.args
+        if args is not None:
+            self._recursive_check_names(args)
+
     visit_asyncfunctiondef = visit_functiondef

-    @utils.only_required_for_messages('disallowed-name', 'invalid-name',
-        'typevar-name-incorrect-variance', 'typevar-double-variance',
-        'typevar-name-mismatch')
-    def visit_assignname(self, node: nodes.AssignName) ->None:
+    @utils.only_required_for_messages(
+        "disallowed-name",
+        "invalid-name",
+        "typevar-name-incorrect-variance",
+        "typevar-double-variance",
+        "typevar-name-mismatch",
+    )
+    def visit_assignname(  # pylint: disable=too-many-branches
+        self, node: nodes.AssignName
+    ) -> None:
         """Check module level assigned names."""
-        pass
+        frame = node.frame()
+        assign_type = node.assign_type()

-    def _recursive_check_names(self, args: list[nodes.AssignName]) ->None:
+        # Check names defined in comprehensions
+        if isinstance(assign_type, nodes.Comprehension):
+            self._check_name("inlinevar", node.name, node)
+
+        elif isinstance(assign_type, nodes.TypeVar):
+            self._check_name("typevar", node.name, node)
+
+        elif isinstance(assign_type, nodes.TypeAlias):
+            self._check_name("typealias", node.name, node)
+
+        # Check names defined in module scope
+        elif isinstance(frame, nodes.Module):
+            # Check names defined in Assign nodes
+            if isinstance(assign_type, nodes.Assign):
+                inferred_assign_type = utils.safe_infer(assign_type.value)
+
+                # Check TypeVar's and TypeAliases assigned alone or in tuple assignment
+                if isinstance(node.parent, nodes.Assign):
+                    if self._assigns_typevar(assign_type.value):
+                        self._check_name("typevar", assign_type.targets[0].name, node)
+                        return
+                    if self._assigns_typealias(assign_type.value):
+                        self._check_name("typealias", assign_type.targets[0].name, node)
+                        return
+
+                if (
+                    isinstance(node.parent, nodes.Tuple)
+                    and isinstance(assign_type.value, nodes.Tuple)
+                    # protect against unbalanced tuple unpacking
+                    and node.parent.elts.index(node) < len(assign_type.value.elts)
+                ):
+                    assigner = assign_type.value.elts[node.parent.elts.index(node)]
+                    if self._assigns_typevar(assigner):
+                        self._check_name(
+                            "typevar",
+                            assign_type.targets[0]
+                            .elts[node.parent.elts.index(node)]
+                            .name,
+                            node,
+                        )
+                        return
+                    if self._assigns_typealias(assigner):
+                        self._check_name(
+                            "typealias",
+                            assign_type.targets[0]
+                            .elts[node.parent.elts.index(node)]
+                            .name,
+                            node,
+                        )
+                        return
+
+                # Check classes (TypeVar's are classes so they need to be excluded first)
+                elif isinstance(inferred_assign_type, nodes.ClassDef):
+                    self._check_name("class", node.name, node)
+
+                # Don't emit if the name redefines an import in an ImportError except handler.
+                elif not _redefines_import(node) and isinstance(
+                    inferred_assign_type, nodes.Const
+                ):
+                    self._check_name("const", node.name, node)
+                else:
+                    self._check_name(
+                        "variable", node.name, node, disallowed_check_only=True
+                    )
+
+            # Check names defined in AnnAssign nodes
+            elif isinstance(assign_type, nodes.AnnAssign):
+                if utils.is_assign_name_annotated_with(node, "Final"):
+                    self._check_name("const", node.name, node)
+                elif self._assigns_typealias(assign_type.annotation):
+                    self._check_name("typealias", node.name, node)
+
+        # Check names defined in function scopes
+        elif isinstance(frame, nodes.FunctionDef):
+            # global introduced variable aren't in the function locals
+            if node.name in frame and node.name not in frame.argnames():
+                if not _redefines_import(node):
+                    if isinstance(
+                        assign_type, nodes.AnnAssign
+                    ) and self._assigns_typealias(assign_type.annotation):
+                        self._check_name("typealias", node.name, node)
+                    else:
+                        self._check_name("variable", node.name, node)
+
+        # Check names defined in class scopes
+        elif isinstance(frame, nodes.ClassDef) and not any(
+            frame.local_attr_ancestors(node.name)
+        ):
+            if utils.is_enum_member(node) or utils.is_assign_name_annotated_with(
+                node, "Final"
+            ):
+                self._check_name("class_const", node.name, node)
+            else:
+                self._check_name("class_attribute", node.name, node)
+
+    def _recursive_check_names(self, args: list[nodes.AssignName]) -> None:
         """Check names in a possibly recursive list <arg>."""
-        pass
+        for arg in args:
+            self._check_name("argument", arg.name, arg)
+
+    def _find_name_group(self, node_type: str) -> str:
+        return self._name_group.get(node_type, node_type)
+
+    def _raise_name_warning(
+        self,
+        prevalent_group: str | None,
+        node: nodes.NodeNG,
+        node_type: str,
+        name: str,
+        confidence: interfaces.Confidence,
+        warning: str = "invalid-name",
+    ) -> None:
+        type_label = constants.HUMAN_READABLE_TYPES[node_type]
+        hint = self._name_hints[node_type]
+        if prevalent_group:
+            # This happens in the multi naming match case. The expected
+            # prevalent group needs to be spelled out to make the message
+            # correct.
+            hint = f"the `{prevalent_group}` group in the {hint}"
+        if self.linter.config.include_naming_hint:
+            hint += f" ({self._name_regexps[node_type].pattern!r} pattern)"
+        args = (
+            (type_label.capitalize(), name, hint)
+            if warning == "invalid-name"
+            else (type_label.capitalize(), name)
+        )

-    def _check_name(self, node_type: str, name: str, node: nodes.NodeNG,
-        confidence: interfaces.Confidence=interfaces.HIGH,
-        disallowed_check_only: bool=False) ->None:
+        self.add_message(warning, node=node, args=args, confidence=confidence)
+        self.linter.stats.increase_bad_name(node_type, 1)
+
+    def _name_allowed_by_regex(self, name: str) -> bool:
+        return name in self.linter.config.good_names or any(
+            pattern.match(name) for pattern in self._good_names_rgxs_compiled
+        )
+
+    def _name_disallowed_by_regex(self, name: str) -> bool:
+        return name in self.linter.config.bad_names or any(
+            pattern.match(name) for pattern in self._bad_names_rgxs_compiled
+        )
+
+    def _check_name(
+        self,
+        node_type: str,
+        name: str,
+        node: nodes.NodeNG,
+        confidence: interfaces.Confidence = interfaces.HIGH,
+        disallowed_check_only: bool = False,
+    ) -> None:
         """Check for a name using the type's regexp."""
-        pass
+
+        def _should_exempt_from_invalid_name(node: nodes.NodeNG) -> bool:
+            if node_type == "variable":
+                inferred = utils.safe_infer(node)
+                if isinstance(inferred, nodes.ClassDef):
+                    return True
+            return False
+
+        if self._name_allowed_by_regex(name=name):
+            return
+        if self._name_disallowed_by_regex(name=name):
+            self.linter.stats.increase_bad_name(node_type, 1)
+            self.add_message(
+                "disallowed-name", node=node, args=name, confidence=interfaces.HIGH
+            )
+            return
+        regexp = self._name_regexps[node_type]
+        match = regexp.match(name)
+
+        if _is_multi_naming_match(match, node_type, confidence):
+            name_group = self._find_name_group(node_type)
+            bad_name_group = self._bad_names.setdefault(name_group, {})
+            # Ignored because this is checked by the if statement
+            warnings = bad_name_group.setdefault(match.lastgroup, [])  # type: ignore[union-attr, arg-type]
+            warnings.append((node, node_type, name, confidence))
+
+        if (
+            match is None
+            and not disallowed_check_only
+            and not _should_exempt_from_invalid_name(node)
+        ):
+            self._raise_name_warning(None, node, node_type, name, confidence)
+
+        # Check TypeVar names for variance suffixes
+        if node_type == "typevar":
+            self._check_typevar(name, node)

     @staticmethod
-    def _assigns_typevar(node: (nodes.NodeNG | None)) ->bool:
+    def _assigns_typevar(node: nodes.NodeNG | None) -> bool:
         """Check if a node is assigning a TypeVar."""
-        pass
+        if isinstance(node, astroid.Call):
+            inferred = utils.safe_infer(node.func)
+            if (
+                isinstance(inferred, astroid.ClassDef)
+                and inferred.qname() in TYPE_VAR_QNAME
+            ):
+                return True
+        return False

     @staticmethod
-    def _assigns_typealias(node: (nodes.NodeNG | None)) ->bool:
+    def _assigns_typealias(node: nodes.NodeNG | None) -> bool:
         """Check if a node is assigning a TypeAlias."""
-        pass
+        inferred = utils.safe_infer(node)
+        if isinstance(inferred, nodes.ClassDef):
+            qname = inferred.qname()
+            if qname == "typing.TypeAlias":
+                return True
+            if qname == ".Union":
+                # Union is a special case because it can be used as a type alias
+                # or as a type annotation. We only want to check the former.
+                assert node is not None
+                return not isinstance(node.parent, nodes.AnnAssign)
+        elif isinstance(inferred, nodes.FunctionDef):
+            # TODO: when py3.12 is minimum, remove this condition
+            # TypeAlias became a class in python 3.12
+            if inferred.qname() == "typing.TypeAlias":
+                return True
+        return False

-    def _check_typevar(self, name: str, node: nodes.AssignName) ->None:
+    def _check_typevar(self, name: str, node: nodes.AssignName) -> None:
         """Check for TypeVar lint violations."""
-        pass
+        variance: TypeVarVariance = TypeVarVariance.invariant
+        if isinstance(node.parent, nodes.Assign):
+            keywords = node.assign_type().value.keywords
+            args = node.assign_type().value.args
+        elif isinstance(node.parent, nodes.Tuple):
+            keywords = (
+                node.assign_type().value.elts[node.parent.elts.index(node)].keywords
+            )
+            args = node.assign_type().value.elts[node.parent.elts.index(node)].args
+        else:  # PEP 695 generic type nodes
+            keywords = ()
+            args = ()
+            variance = TypeVarVariance.inferred
+
+        name_arg = None
+        for kw in keywords:
+            if variance == TypeVarVariance.double_variant:
+                pass
+            elif kw.arg == "covariant" and kw.value.value:
+                variance = (
+                    TypeVarVariance.covariant
+                    if variance != TypeVarVariance.contravariant
+                    else TypeVarVariance.double_variant
+                )
+            elif kw.arg == "contravariant" and kw.value.value:
+                variance = (
+                    TypeVarVariance.contravariant
+                    if variance != TypeVarVariance.covariant
+                    else TypeVarVariance.double_variant
+                )
+
+            if kw.arg == "name" and isinstance(kw.value, nodes.Const):
+                name_arg = kw.value.value
+
+        if name_arg is None and args and isinstance(args[0], nodes.Const):
+            name_arg = args[0].value
+
+        if variance == TypeVarVariance.inferred:
+            # Ignore variance check for PEP 695 type parameters.
+            # The variance is inferred by the type checker.
+            # Adding _co or _contra suffix can help to reason about TypeVar.
+            pass
+        elif variance == TypeVarVariance.double_variant:
+            self.add_message(
+                "typevar-double-variance",
+                node=node,
+                confidence=interfaces.INFERENCE,
+            )
+            self.add_message(
+                "typevar-name-incorrect-variance",
+                node=node,
+                args=("",),
+                confidence=interfaces.INFERENCE,
+            )
+        elif variance == TypeVarVariance.covariant and not name.endswith("_co"):
+            suggest_name = f"{re.sub('_contra$', '', name)}_co"
+            self.add_message(
+                "typevar-name-incorrect-variance",
+                node=node,
+                args=(f'. "{name}" is covariant, use "{suggest_name}" instead'),
+                confidence=interfaces.INFERENCE,
+            )
+        elif variance == TypeVarVariance.contravariant and not name.endswith("_contra"):
+            suggest_name = f"{re.sub('_co$', '', name)}_contra"
+            self.add_message(
+                "typevar-name-incorrect-variance",
+                node=node,
+                args=(f'. "{name}" is contravariant, use "{suggest_name}" instead'),
+                confidence=interfaces.INFERENCE,
+            )
+        elif variance == TypeVarVariance.invariant and (
+            name.endswith(("_co", "_contra"))
+        ):
+            suggest_name = re.sub("_contra$|_co$", "", name)
+            self.add_message(
+                "typevar-name-incorrect-variance",
+                node=node,
+                args=(f'. "{name}" is invariant, use "{suggest_name}" instead'),
+                confidence=interfaces.INFERENCE,
+            )
+
+        if name_arg is not None and name_arg != name:
+            self.add_message(
+                "typevar-name-mismatch",
+                node=node,
+                args=(name_arg, name),
+                confidence=interfaces.INFERENCE,
+            )
diff --git a/pylint/checkers/base/name_checker/naming_style.py b/pylint/checkers/base/name_checker/naming_style.py
index 91b831502..0198ae7d1 100644
--- a/pylint/checkers/base/name_checker/naming_style.py
+++ b/pylint/checkers/base/name_checker/naming_style.py
@@ -1,6 +1,12 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import re
 from re import Pattern
+
 from pylint import constants
 from pylint.typing import OptionDict, Options

@@ -12,7 +18,8 @@ class NamingStyle:
     forms of regular expressions, but we need to special-case stuff like dunder
     names in method names.
     """
-    ANY: Pattern[str] = re.compile('.*')
+
+    ANY: Pattern[str] = re.compile(".*")
     CLASS_NAME_RGX: Pattern[str] = ANY
     MOD_NAME_RGX: Pattern[str] = ANY
     CONST_NAME_RGX: Pattern[str] = ANY
@@ -20,64 +27,159 @@ class NamingStyle:
     DEFAULT_NAME_RGX: Pattern[str] = ANY
     CLASS_ATTRIBUTE_RGX: Pattern[str] = ANY

+    @classmethod
+    def get_regex(cls, name_type: str) -> Pattern[str]:
+        return {
+            "module": cls.MOD_NAME_RGX,
+            "const": cls.CONST_NAME_RGX,
+            "class": cls.CLASS_NAME_RGX,
+            "function": cls.DEFAULT_NAME_RGX,
+            "method": cls.DEFAULT_NAME_RGX,
+            "attr": cls.DEFAULT_NAME_RGX,
+            "argument": cls.DEFAULT_NAME_RGX,
+            "variable": cls.DEFAULT_NAME_RGX,
+            "class_attribute": cls.CLASS_ATTRIBUTE_RGX,
+            "class_const": cls.CONST_NAME_RGX,
+            "inlinevar": cls.COMP_VAR_RGX,
+        }[name_type]
+

 class SnakeCaseStyle(NamingStyle):
     """Regex rules for snake_case naming style."""
-    CLASS_NAME_RGX = re.compile('[^\\W\\dA-Z][^\\WA-Z]*$')
-    MOD_NAME_RGX = re.compile('[^\\W\\dA-Z][^\\WA-Z]*$')
-    CONST_NAME_RGX = re.compile('([^\\W\\dA-Z][^\\WA-Z]*|__.*__)$')
+
+    CLASS_NAME_RGX = re.compile(r"[^\W\dA-Z][^\WA-Z]*$")
+    MOD_NAME_RGX = re.compile(r"[^\W\dA-Z][^\WA-Z]*$")
+    CONST_NAME_RGX = re.compile(r"([^\W\dA-Z][^\WA-Z]*|__.*__)$")
     COMP_VAR_RGX = CLASS_NAME_RGX
     DEFAULT_NAME_RGX = re.compile(
-        '([^\\W\\dA-Z][^\\WA-Z]*|_[^\\WA-Z]*|__[^\\WA-Z\\d_][^\\WA-Z]+__)$')
-    CLASS_ATTRIBUTE_RGX = re.compile('([^\\W\\dA-Z][^\\WA-Z]*|__.*__)$')
+        r"([^\W\dA-Z][^\WA-Z]*|_[^\WA-Z]*|__[^\WA-Z\d_][^\WA-Z]+__)$"
+    )
+    CLASS_ATTRIBUTE_RGX = re.compile(r"([^\W\dA-Z][^\WA-Z]*|__.*__)$")


 class CamelCaseStyle(NamingStyle):
     """Regex rules for camelCase naming style."""
-    CLASS_NAME_RGX = re.compile('[^\\W\\dA-Z][^\\W_]*$')
-    MOD_NAME_RGX = re.compile('[^\\W\\dA-Z][^\\W_]*$')
-    CONST_NAME_RGX = re.compile('([^\\W\\dA-Z][^\\W_]*|__.*__)$')
+
+    CLASS_NAME_RGX = re.compile(r"[^\W\dA-Z][^\W_]*$")
+    MOD_NAME_RGX = re.compile(r"[^\W\dA-Z][^\W_]*$")
+    CONST_NAME_RGX = re.compile(r"([^\W\dA-Z][^\W_]*|__.*__)$")
     COMP_VAR_RGX = MOD_NAME_RGX
-    DEFAULT_NAME_RGX = re.compile(
-        '([^\\W\\dA-Z][^\\W_]*|__[^\\W\\dA-Z_]\\w+__)$')
-    CLASS_ATTRIBUTE_RGX = re.compile('([^\\W\\dA-Z][^\\W_]*|__.*__)$')
+    DEFAULT_NAME_RGX = re.compile(r"([^\W\dA-Z][^\W_]*|__[^\W\dA-Z_]\w+__)$")
+    CLASS_ATTRIBUTE_RGX = re.compile(r"([^\W\dA-Z][^\W_]*|__.*__)$")


 class PascalCaseStyle(NamingStyle):
     """Regex rules for PascalCase naming style."""
-    CLASS_NAME_RGX = re.compile('[^\\W\\da-z][^\\W_]*$')
+
+    CLASS_NAME_RGX = re.compile(r"[^\W\da-z][^\W_]*$")
     MOD_NAME_RGX = CLASS_NAME_RGX
-    CONST_NAME_RGX = re.compile('([^\\W\\da-z][^\\W_]*|__.*__)$')
+    CONST_NAME_RGX = re.compile(r"([^\W\da-z][^\W_]*|__.*__)$")
     COMP_VAR_RGX = CLASS_NAME_RGX
-    DEFAULT_NAME_RGX = re.compile(
-        '([^\\W\\da-z][^\\W_]*|__[^\\W\\dA-Z_]\\w+__)$')
-    CLASS_ATTRIBUTE_RGX = re.compile('[^\\W\\da-z][^\\W_]*$')
+    DEFAULT_NAME_RGX = re.compile(r"([^\W\da-z][^\W_]*|__[^\W\dA-Z_]\w+__)$")
+    CLASS_ATTRIBUTE_RGX = re.compile(r"[^\W\da-z][^\W_]*$")


 class UpperCaseStyle(NamingStyle):
     """Regex rules for UPPER_CASE naming style."""
-    CLASS_NAME_RGX = re.compile('[^\\W\\da-z][^\\Wa-z]*$')
+
+    CLASS_NAME_RGX = re.compile(r"[^\W\da-z][^\Wa-z]*$")
     MOD_NAME_RGX = CLASS_NAME_RGX
-    CONST_NAME_RGX = re.compile('([^\\W\\da-z][^\\Wa-z]*|__.*__)$')
+    CONST_NAME_RGX = re.compile(r"([^\W\da-z][^\Wa-z]*|__.*__)$")
     COMP_VAR_RGX = CLASS_NAME_RGX
-    DEFAULT_NAME_RGX = re.compile(
-        '([^\\W\\da-z][^\\Wa-z]*|__[^\\W\\dA-Z_]\\w+__)$')
-    CLASS_ATTRIBUTE_RGX = re.compile('[^\\W\\da-z][^\\Wa-z]*$')
+    DEFAULT_NAME_RGX = re.compile(r"([^\W\da-z][^\Wa-z]*|__[^\W\dA-Z_]\w+__)$")
+    CLASS_ATTRIBUTE_RGX = re.compile(r"[^\W\da-z][^\Wa-z]*$")


 class AnyStyle(NamingStyle):
     pass


-NAMING_STYLES = {'snake_case': SnakeCaseStyle, 'camelCase': CamelCaseStyle,
-    'PascalCase': PascalCaseStyle, 'UPPER_CASE': UpperCaseStyle, 'any':
-    AnyStyle}
-KNOWN_NAME_TYPES_WITH_STYLE = {'module', 'const', 'class', 'function',
-    'method', 'attr', 'argument', 'variable', 'class_attribute',
-    'class_const', 'inlinevar'}
-DEFAULT_NAMING_STYLES = {'module': 'snake_case', 'const': 'UPPER_CASE',
-    'class': 'PascalCase', 'function': 'snake_case', 'method': 'snake_case',
-    'attr': 'snake_case', 'argument': 'snake_case', 'variable':
-    'snake_case', 'class_attribute': 'any', 'class_const': 'UPPER_CASE',
-    'inlinevar': 'any'}
-KNOWN_NAME_TYPES = {*KNOWN_NAME_TYPES_WITH_STYLE, 'typevar', 'typealias'}
+NAMING_STYLES = {
+    "snake_case": SnakeCaseStyle,
+    "camelCase": CamelCaseStyle,
+    "PascalCase": PascalCaseStyle,
+    "UPPER_CASE": UpperCaseStyle,
+    "any": AnyStyle,
+}
+
+# Name types that have a style option
+KNOWN_NAME_TYPES_WITH_STYLE = {
+    "module",
+    "const",
+    "class",
+    "function",
+    "method",
+    "attr",
+    "argument",
+    "variable",
+    "class_attribute",
+    "class_const",
+    "inlinevar",
+}
+
+
+DEFAULT_NAMING_STYLES = {
+    "module": "snake_case",
+    "const": "UPPER_CASE",
+    "class": "PascalCase",
+    "function": "snake_case",
+    "method": "snake_case",
+    "attr": "snake_case",
+    "argument": "snake_case",
+    "variable": "snake_case",
+    "class_attribute": "any",
+    "class_const": "UPPER_CASE",
+    "inlinevar": "any",
+}
+
+
+# Name types that have a 'rgx' option
+KNOWN_NAME_TYPES = {
+    *KNOWN_NAME_TYPES_WITH_STYLE,
+    "typevar",
+    "typealias",
+}
+
+
+def _create_naming_options() -> Options:
+    name_options: list[tuple[str, OptionDict]] = []
+    for name_type in sorted(KNOWN_NAME_TYPES):
+        human_readable_name = constants.HUMAN_READABLE_TYPES[name_type]
+        name_type_hyphened = name_type.replace("_", "-")
+
+        help_msg = f"Regular expression matching correct {human_readable_name} names. "
+        if name_type in KNOWN_NAME_TYPES_WITH_STYLE:
+            help_msg += f"Overrides {name_type_hyphened}-naming-style. "
+        help_msg += (
+            f"If left empty, {human_readable_name} names will be checked "
+            "with the set naming style."
+        )
+
+        # Add style option for names that support it
+        if name_type in KNOWN_NAME_TYPES_WITH_STYLE:
+            default_style = DEFAULT_NAMING_STYLES[name_type]
+            name_options.append(
+                (
+                    f"{name_type_hyphened}-naming-style",
+                    {
+                        "default": default_style,
+                        "type": "choice",
+                        "choices": list(NAMING_STYLES.keys()),
+                        "metavar": "<style>",
+                        "help": f"Naming style matching correct {human_readable_name} names.",
+                    },
+                )
+            )
+
+        name_options.append(
+            (
+                f"{name_type_hyphened}-rgx",
+                {
+                    "default": None,
+                    "type": "regexp",
+                    "metavar": "<regexp>",
+                    "help": help_msg,
+                },
+            )
+        )
+    return tuple(name_options)
diff --git a/pylint/checkers/base/pass_checker.py b/pylint/checkers/base/pass_checker.py
index 145946d00..19952ca4f 100644
--- a/pylint/checkers/base/pass_checker.py
+++ b/pylint/checkers/base/pass_checker.py
@@ -1,10 +1,29 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from astroid import nodes
+
 from pylint.checkers import utils
 from pylint.checkers.base.basic_checker import _BasicChecker


 class PassChecker(_BasicChecker):
     """Check if the pass statement is really necessary."""
-    msgs = {'W0107': ('Unnecessary pass statement', 'unnecessary-pass',
-        'Used when a "pass" statement can be removed without affecting the behaviour of the code.'
-        )}
+
+    msgs = {
+        "W0107": (
+            "Unnecessary pass statement",
+            "unnecessary-pass",
+            'Used when a "pass" statement can be removed without affecting '
+            "the behaviour of the code.",
+        )
+    }
+
+    @utils.only_required_for_messages("unnecessary-pass")
+    def visit_pass(self, node: nodes.Pass) -> None:
+        if len(node.parent.child_sequence(node)) > 1 or (
+            isinstance(node.parent, (nodes.ClassDef, nodes.FunctionDef))
+            and node.parent.doc_node
+        ):
+            self.add_message("unnecessary-pass", node=node)
diff --git a/pylint/checkers/base_checker.py b/pylint/checkers/base_checker.py
index 7979baa3b..6d577e0bd 100644
--- a/pylint/checkers/base_checker.py
+++ b/pylint/checkers/base_checker.py
@@ -1,38 +1,57 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import abc
 import functools
 from collections.abc import Iterable, Sequence
 from inspect import cleandoc
 from tokenize import TokenInfo
 from typing import TYPE_CHECKING, Any
+
 from astroid import nodes
+
 from pylint.config.arguments_provider import _ArgumentsProvider
 from pylint.constants import _MSG_ORDER, MAIN_CHECKER_NAME, WarningScope
 from pylint.exceptions import InvalidMessageError
 from pylint.interfaces import Confidence
 from pylint.message.message_definition import MessageDefinition
-from pylint.typing import ExtraMessageOptions, MessageDefinitionTuple, OptionDict, Options, ReportsCallable
+from pylint.typing import (
+    ExtraMessageOptions,
+    MessageDefinitionTuple,
+    OptionDict,
+    Options,
+    ReportsCallable,
+)
 from pylint.utils import get_rst_section, get_rst_title
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 @functools.total_ordering
 class BaseChecker(_ArgumentsProvider):
-    name: str = ''
+    # checker name (you may reuse an existing one)
+    name: str = ""
+    # ordered list of options to control the checker behaviour
     options: Options = ()
+    # messages issued by this checker
     msgs: dict[str, MessageDefinitionTuple] = {}
+    # reports issued by this checker
     reports: tuple[tuple[str, str, ReportsCallable], ...] = ()
+    # mark this checker as enabled or not.
     enabled: bool = True

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         """Checker instances should have the linter as argument."""
         if self.name is not None:
             self.name = self.name.lower()
         self.linter = linter
         _ArgumentsProvider.__init__(self, linter)

-    def __gt__(self, other: Any) ->bool:
+    def __gt__(self, other: Any) -> bool:
         """Permits sorting checkers for stable doc and tests.

         The main checker is always the first one, then builtin checkers in alphabetical
@@ -44,37 +63,98 @@ class BaseChecker(_ArgumentsProvider):
             return False
         if other.name == MAIN_CHECKER_NAME:
             return True
-        self_is_builtin = type(self).__module__.startswith('pylint.checkers')
-        if self_is_builtin ^ type(other).__module__.startswith(
-            'pylint.checkers'):
+        self_is_builtin = type(self).__module__.startswith("pylint.checkers")
+        if self_is_builtin ^ type(other).__module__.startswith("pylint.checkers"):
             return not self_is_builtin
         return self.name > other.name

-    def __eq__(self, other: object) ->bool:
+    def __eq__(self, other: object) -> bool:
         """Permit to assert Checkers are equal."""
         if not isinstance(other, BaseChecker):
             return False
-        return f'{self.name}{self.msgs}' == f'{other.name}{other.msgs}'
+        return f"{self.name}{self.msgs}" == f"{other.name}{other.msgs}"

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         """Make Checker hashable."""
-        return hash(f'{self.name}{self.msgs}')
+        return hash(f"{self.name}{self.msgs}")

-    def __repr__(self) ->str:
-        status = 'Checker' if self.enabled else 'Disabled checker'
+    def __repr__(self) -> str:
+        status = "Checker" if self.enabled else "Disabled checker"
         msgs = "', '".join(self.msgs.keys())
         return f"{status} '{self.name}' (responsible for '{msgs}')"

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         """This might be incomplete because multiple classes inheriting BaseChecker
         can have the same name.

         See: MessageHandlerMixIn.get_full_documentation()
         """
-        return self.get_full_documentation(msgs=self.msgs, options=self.
-            _options_and_values(), reports=self.reports)
-
-    def check_consistency(self) ->None:
+        return self.get_full_documentation(
+            msgs=self.msgs, options=self._options_and_values(), reports=self.reports
+        )
+
+    def get_full_documentation(
+        self,
+        msgs: dict[str, MessageDefinitionTuple],
+        options: Iterable[tuple[str, OptionDict, Any]],
+        reports: Sequence[tuple[str, str, ReportsCallable]],
+        doc: str | None = None,
+        module: str | None = None,
+        show_options: bool = True,
+    ) -> str:
+        result = ""
+        checker_title = f"{self.name.replace('_', ' ').title()} checker"
+        if module:
+            # Provide anchor to link against
+            result += f".. _{module}:\n\n"
+        result += f"{get_rst_title(checker_title, '~')}\n"
+        if module:
+            result += f"This checker is provided by ``{module}``.\n"
+        result += f"Verbatim name of the checker is ``{self.name}``.\n\n"
+        if doc:
+            # Provide anchor to link against
+            result += get_rst_title(f"{checker_title} Documentation", "^")
+            result += f"{cleandoc(doc)}\n\n"
+        # options might be an empty generator and not be False when cast to boolean
+        options_list = list(options)
+        if options_list:
+            if show_options:
+                result += get_rst_title(f"{checker_title} Options", "^")
+                result += f"{get_rst_section(None, options_list)}\n"
+            else:
+                result += f"See also :ref:`{self.name} checker's options' documentation <{self.name}-options>`\n\n"
+        if msgs:
+            result += get_rst_title(f"{checker_title} Messages", "^")
+            for msgid, msg in sorted(
+                msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
+            ):
+                msg_def = self.create_message_definition_from_tuple(msgid, msg)
+                result += f"{msg_def.format_help(checkerref=False)}\n"
+            result += "\n"
+        if reports:
+            result += get_rst_title(f"{checker_title} Reports", "^")
+            for report in reports:
+                result += f":{report[0]}: {report[1]}\n"
+            result += "\n"
+        result += "\n"
+        return result
+
+    def add_message(
+        self,
+        msgid: str,
+        line: int | None = None,
+        node: nodes.NodeNG | None = None,
+        args: Any = None,
+        confidence: Confidence | None = None,
+        col_offset: int | None = None,
+        end_lineno: int | None = None,
+        end_col_offset: int | None = None,
+    ) -> None:
+        self.linter.add_message(
+            msgid, line, node, args, confidence, col_offset, end_lineno, end_col_offset
+        )
+
+    def check_consistency(self) -> None:
         """Check the consistency of msgid.

         msg ids for a checker should be a string of len 4, where the two first
@@ -84,33 +164,85 @@ class BaseChecker(_ArgumentsProvider):
         :raises InvalidMessageError: If the checker id in the messages are not
         always the same.
         """
-        pass
-
-    def open(self) ->None:
+        checker_id = None
+        existing_ids = []
+        for message in self.messages:
+            # Id's for shared messages such as the 'deprecated-*' messages
+            # can be inconsistent with their checker id.
+            if message.shared:
+                continue
+            if checker_id is not None and checker_id != message.msgid[1:3]:
+                error_msg = "Inconsistent checker part in message id "
+                error_msg += f"'{message.msgid}' (expected 'x{checker_id}xx' "
+                error_msg += f"because we already had {existing_ids})."
+                raise InvalidMessageError(error_msg)
+            checker_id = message.msgid[1:3]
+            existing_ids.append(message.msgid)
+
+    def create_message_definition_from_tuple(
+        self, msgid: str, msg_tuple: MessageDefinitionTuple
+    ) -> MessageDefinition:
+        if isinstance(self, (BaseTokenChecker, BaseRawFileChecker)):
+            default_scope = WarningScope.LINE
+        else:
+            default_scope = WarningScope.NODE
+        options: ExtraMessageOptions = {}
+        if len(msg_tuple) == 4:
+            (msg, symbol, descr, msg_options) = msg_tuple
+            options = ExtraMessageOptions(**msg_options)
+        elif len(msg_tuple) == 3:
+            (msg, symbol, descr) = msg_tuple
+        else:
+            error_msg = """Messages should have a msgid, a symbol and a description. Something like this :
+
+"W1234": (
+    "message",
+    "message-symbol",
+    "Message description with detail.",
+    ...
+),
+"""
+            raise InvalidMessageError(error_msg)
+        options.setdefault("scope", default_scope)
+        return MessageDefinition(self, msgid, msg, descr, symbol, **options)
+
+    @property
+    def messages(self) -> list[MessageDefinition]:
+        return [
+            self.create_message_definition_from_tuple(msgid, msg_tuple)
+            for msgid, msg_tuple in sorted(self.msgs.items())
+        ]
+
+    def open(self) -> None:
         """Called before visiting project (i.e. set of modules)."""
-        pass

-    def close(self) ->None:
+    def close(self) -> None:
         """Called after visiting project (i.e set of modules)."""
-        pass
+
+    def get_map_data(self) -> Any:
+        return None
+
+    # pylint: disable-next=unused-argument
+    def reduce_map_data(self, linter: PyLinter, data: list[Any]) -> None:
+        return None


 class BaseTokenChecker(BaseChecker):
     """Base class for checkers that want to have access to the token stream."""

     @abc.abstractmethod
-    def process_tokens(self, tokens: list[TokenInfo]) ->None:
+    def process_tokens(self, tokens: list[TokenInfo]) -> None:
         """Should be overridden by subclasses."""
-        pass
+        raise NotImplementedError()


 class BaseRawFileChecker(BaseChecker):
     """Base class for checkers which need to parse the raw file."""

     @abc.abstractmethod
-    def process_module(self, node: nodes.Module) ->None:
+    def process_module(self, node: nodes.Module) -> None:
         """Process a module.

         The module's content is accessible via ``astroid.stream``
         """
-        pass
+        raise NotImplementedError()
diff --git a/pylint/checkers/classes/class_checker.py b/pylint/checkers/classes/class_checker.py
index 01bad6883..c3e1a081c 100644
--- a/pylint/checkers/classes/class_checker.py
+++ b/pylint/checkers/classes/class_checker.py
@@ -1,30 +1,67 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Classes checker for Python code."""
+
 from __future__ import annotations
+
 from collections import defaultdict
 from collections.abc import Callable, Sequence
 from functools import cached_property
 from itertools import chain, zip_longest
 from re import Pattern
 from typing import TYPE_CHECKING, Any, NamedTuple, Union
+
 import astroid
 from astroid import bases, nodes, util
 from astroid.nodes import LocalsDictNodeNG
 from astroid.typing import SuccessfulInferenceResult
+
 from pylint.checkers import BaseChecker, utils
-from pylint.checkers.utils import PYMETHODS, class_is_abstract, decorated_with, decorated_with_property, get_outer_class, has_known_bases, is_attr_private, is_attr_protected, is_builtin_object, is_comprehension, is_iterable, is_property_setter, is_property_setter_or_deleter, node_frame_class, only_required_for_messages, safe_infer, unimplemented_abstract_methods, uninferable_final_decorators
+from pylint.checkers.utils import (
+    PYMETHODS,
+    class_is_abstract,
+    decorated_with,
+    decorated_with_property,
+    get_outer_class,
+    has_known_bases,
+    is_attr_private,
+    is_attr_protected,
+    is_builtin_object,
+    is_comprehension,
+    is_iterable,
+    is_property_setter,
+    is_property_setter_or_deleter,
+    node_frame_class,
+    only_required_for_messages,
+    safe_infer,
+    unimplemented_abstract_methods,
+    uninferable_final_decorators,
+)
 from pylint.interfaces import HIGH, INFERENCE
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter
+
+
 _AccessNodes = Union[nodes.Attribute, nodes.AssignAttr]
-INVALID_BASE_CLASSES = {'bool', 'range', 'slice', 'memoryview'}
-ALLOWED_PROPERTIES = {'bultins.property', 'functools.cached_property'}
-BUILTIN_DECORATORS = {'builtins.property', 'builtins.classmethod'}
-ASTROID_TYPE_COMPARATORS = {nodes.Const: lambda a, b: a.value == b.value,
-    nodes.ClassDef: lambda a, b: a.qname == b.qname, nodes.Tuple: lambda a,
-    b: a.elts == b.elts, nodes.List: lambda a, b: a.elts == b.elts, nodes.
-    Dict: lambda a, b: a.items == b.items, nodes.Name: lambda a, b: set(a.
-    infer()) == set(b.infer())}
+
+INVALID_BASE_CLASSES = {"bool", "range", "slice", "memoryview"}
+ALLOWED_PROPERTIES = {"bultins.property", "functools.cached_property"}
+BUILTIN_DECORATORS = {"builtins.property", "builtins.classmethod"}
+ASTROID_TYPE_COMPARATORS = {
+    nodes.Const: lambda a, b: a.value == b.value,
+    nodes.ClassDef: lambda a, b: a.qname == b.qname,
+    nodes.Tuple: lambda a, b: a.elts == b.elts,
+    nodes.List: lambda a, b: a.elts == b.elts,
+    nodes.Dict: lambda a, b: a.items == b.items,
+    nodes.Name: lambda a, b: set(a.infer()) == set(b.infer()),
+}
+
+# Dealing with useless override detection, with regard
+# to parameters vs arguments


 class _CallSignature(NamedTuple):
@@ -41,17 +78,131 @@ class _ParameterSignature(NamedTuple):
     kwargs: str


-def _definition_equivalent_to_call(definition: _ParameterSignature, call:
-    _CallSignature) ->bool:
+def _signature_from_call(call: nodes.Call) -> _CallSignature:
+    kws = {}
+    args = []
+    starred_kws = []
+    starred_args = []
+    for keyword in call.keywords or []:
+        arg, value = keyword.arg, keyword.value
+        if arg is None and isinstance(value, nodes.Name):
+            # Starred node, and we are interested only in names,
+            # otherwise some transformation might occur for the parameter.
+            starred_kws.append(value.name)
+        elif isinstance(value, nodes.Name):
+            kws[arg] = value.name
+        else:
+            kws[arg] = None
+
+    for arg in call.args:
+        if isinstance(arg, nodes.Starred) and isinstance(arg.value, nodes.Name):
+            # Positional variadic and a name, otherwise some transformation
+            # might have occurred.
+            starred_args.append(arg.value.name)
+        elif isinstance(arg, nodes.Name):
+            args.append(arg.name)
+        else:
+            args.append(None)
+
+    return _CallSignature(args, kws, starred_args, starred_kws)
+
+
+def _signature_from_arguments(arguments: nodes.Arguments) -> _ParameterSignature:
+    kwarg = arguments.kwarg
+    vararg = arguments.vararg
+    args = [
+        arg.name
+        for arg in chain(arguments.posonlyargs, arguments.args)
+        if arg.name != "self"
+    ]
+    kwonlyargs = [arg.name for arg in arguments.kwonlyargs]
+    return _ParameterSignature(args, kwonlyargs, vararg, kwarg)
+
+
+def _definition_equivalent_to_call(
+    definition: _ParameterSignature, call: _CallSignature
+) -> bool:
     """Check if a definition signature is equivalent to a call."""
-    pass
-
-
-def _is_trivial_super_delegation(function: nodes.FunctionDef) ->bool:
+    if definition.kwargs:
+        if definition.kwargs not in call.starred_kws:
+            return False
+    elif call.starred_kws:
+        return False
+    if definition.varargs:
+        if definition.varargs not in call.starred_args:
+            return False
+    elif call.starred_args:
+        return False
+    if any(kw not in call.kws for kw in definition.kwonlyargs):
+        return False
+    if definition.args != call.args:
+        return False
+
+    # No extra kwargs in call.
+    return all(kw in call.args or kw in definition.kwonlyargs for kw in call.kws)
+
+
+def _is_trivial_super_delegation(function: nodes.FunctionDef) -> bool:
     """Check whether a function definition is a method consisting only of a
     call to the same function on the superclass.
     """
-    pass
+    if (
+        not function.is_method()
+        # Adding decorators to a function changes behavior and
+        # constitutes a non-trivial change.
+        or function.decorators
+    ):
+        return False
+
+    body = function.body
+    if len(body) != 1:
+        # Multiple statements, which means this overridden method
+        # could do multiple things we are not aware of.
+        return False
+
+    statement = body[0]
+    if not isinstance(statement, (nodes.Expr, nodes.Return)):
+        # Doing something else than what we are interested in.
+        return False
+
+    call = statement.value
+    if (
+        not isinstance(call, nodes.Call)
+        # Not a super() attribute access.
+        or not isinstance(call.func, nodes.Attribute)
+    ):
+        return False
+
+    # Anything other than a super call is non-trivial.
+    super_call = safe_infer(call.func.expr)
+    if not isinstance(super_call, astroid.objects.Super):
+        return False
+
+    # The name should be the same.
+    if call.func.attrname != function.name:
+        return False
+
+    # Should be a super call with the MRO pointer being the
+    # current class and the type being the current instance.
+    current_scope = function.parent.scope()
+    if (
+        super_call.mro_pointer != current_scope
+        or not isinstance(super_call.type, astroid.Instance)
+        or super_call.type.name != current_scope.name
+    ):
+        return False
+
+    return True
+
+
+# Deal with parameters overriding in two methods.
+
+
+def _positional_parameters(method: nodes.FunctionDef) -> list[nodes.AssignName]:
+    positional = method.args.args
+    if method.is_bound() and method.type in {"classmethod", "method"}:
+        positional = positional[1:]
+    return positional  # type: ignore[no-any-return]


 class _DefaultMissing:
@@ -61,8 +212,9 @@ class _DefaultMissing:
 _DEFAULT_MISSING = _DefaultMissing()


-def _has_different_parameters_default_value(original: nodes.Arguments,
-    overridden: nodes.Arguments) ->bool:
+def _has_different_parameters_default_value(
+    original: nodes.Arguments, overridden: nodes.Arguments
+) -> bool:
     """Check if original and overridden methods arguments have different default values.

     Return True if one of the overridden arguments has a default
@@ -70,17 +222,101 @@ def _has_different_parameters_default_value(original: nodes.Arguments,
     If one of the method doesn't have argument (.args is None)
     return False
     """
-    pass
+    if original.args is None or overridden.args is None:
+        return False
+
+    for param in chain(original.args, original.kwonlyargs):
+        try:
+            original_default = original.default_value(param.name)
+        except astroid.exceptions.NoDefault:
+            original_default = _DEFAULT_MISSING
+        try:
+            overridden_default = overridden.default_value(param.name)
+            if original_default is _DEFAULT_MISSING:
+                # Only the original has a default.
+                return True
+        except astroid.exceptions.NoDefault:
+            if original_default is _DEFAULT_MISSING:
+                # Both have a default, no difference
+                continue
+            # Only the override has a default.
+            return True
+
+        original_type = type(original_default)
+        if not isinstance(overridden_default, original_type):
+            # Two args with same name but different types
+            return True
+        is_same_fn: Callable[[Any, Any], bool] | None = ASTROID_TYPE_COMPARATORS.get(
+            original_type
+        )
+        if is_same_fn is None:
+            # If the default value comparison is unhandled, assume the value is different
+            return True
+        if not is_same_fn(original_default, overridden_default):
+            # Two args with same type but different values
+            return True
+    return False


-def _has_different_keyword_only_parameters(original: list[nodes.AssignName],
-    overridden: list[nodes.AssignName]) ->list[str]:
+def _has_different_parameters(
+    original: list[nodes.AssignName],
+    overridden: list[nodes.AssignName],
+    dummy_parameter_regex: Pattern[str],
+) -> list[str]:
+    result: list[str] = []
+    zipped = zip_longest(original, overridden)
+    for original_param, overridden_param in zipped:
+        if not overridden_param:
+            return ["Number of parameters "]
+
+        if not original_param:
+            try:
+                overridden_param.parent.default_value(overridden_param.name)
+                continue
+            except astroid.NoDefault:
+                return ["Number of parameters "]
+
+        # check for the arguments' name
+        names = [param.name for param in (original_param, overridden_param)]
+        if any(dummy_parameter_regex.match(name) for name in names):
+            continue
+        if original_param.name != overridden_param.name:
+            result.append(
+                f"Parameter '{original_param.name}' has been renamed "
+                f"to '{overridden_param.name}' in"
+            )
+
+    return result
+
+
+def _has_different_keyword_only_parameters(
+    original: list[nodes.AssignName],
+    overridden: list[nodes.AssignName],
+) -> list[str]:
     """Determine if the two methods have different keyword only parameters."""
-    pass
+    original_names = [i.name for i in original]
+    overridden_names = [i.name for i in overridden]
+
+    if any(name not in overridden_names for name in original_names):
+        return ["Number of parameters "]
+
+    for name in overridden_names:
+        if name in original_names:
+            continue

+        try:
+            overridden[0].parent.default_value(name)
+        except astroid.NoDefault:
+            return ["Number of parameters "]

-def _different_parameters(original: nodes.FunctionDef, overridden: nodes.
-    FunctionDef, dummy_parameter_regex: Pattern[str]) ->list[str]:
+    return []
+
+
+def _different_parameters(
+    original: nodes.FunctionDef,
+    overridden: nodes.FunctionDef,
+    dummy_parameter_regex: Pattern[str],
+) -> list[str]:
     """Determine if the two methods have different parameters.

     They are considered to have different parameters if:
@@ -91,20 +327,125 @@ def _different_parameters(original: nodes.FunctionDef, overridden: nodes.

        * they have different keyword only parameters.
     """
-    pass
+    output_messages = []
+    original_parameters = _positional_parameters(original)
+    overridden_parameters = _positional_parameters(overridden)
+
+    # Copy kwonlyargs list so that we don't affect later function linting
+    original_kwonlyargs = original.args.kwonlyargs
+
+    # Allow positional/keyword variadic in overridden to match against any
+    # positional/keyword argument in original.
+    # Keep any arguments that are found separately in overridden to satisfy
+    # later tests
+    if overridden.args.vararg:
+        overridden_names = [v.name for v in overridden_parameters]
+        original_parameters = [
+            v for v in original_parameters if v.name in overridden_names
+        ]
+
+    if overridden.args.kwarg:
+        overridden_names = [v.name for v in overridden.args.kwonlyargs]
+        original_kwonlyargs = [
+            v for v in original.args.kwonlyargs if v.name in overridden_names
+        ]
+
+    different_positional = _has_different_parameters(
+        original_parameters, overridden_parameters, dummy_parameter_regex
+    )
+    different_kwonly = _has_different_keyword_only_parameters(
+        original_kwonlyargs, overridden.args.kwonlyargs
+    )
+    if different_kwonly and different_positional:
+        if "Number " in different_positional[0] and "Number " in different_kwonly[0]:
+            output_messages.append("Number of parameters ")
+            output_messages += different_positional[1:]
+            output_messages += different_kwonly[1:]
+        else:
+            output_messages += different_positional
+            output_messages += different_kwonly
+    else:
+        if different_positional:
+            output_messages += different_positional
+        if different_kwonly:
+            output_messages += different_kwonly
+
+    # Arguments will only violate LSP if there are variadics in the original
+    # that are then removed from the overridden
+    kwarg_lost = original.args.kwarg and not overridden.args.kwarg
+    vararg_lost = original.args.vararg and not overridden.args.vararg
+
+    if kwarg_lost or vararg_lost:
+        output_messages += ["Variadics removed in"]
+
+    if original.name in PYMETHODS:
+        # Ignore the difference for special methods. If the parameter
+        # numbers are different, then that is going to be caught by
+        # unexpected-special-method-signature.
+        # If the names are different, it doesn't matter, since they can't
+        # be used as keyword arguments anyway.
+        output_messages.clear()
+
+    return output_messages
+
+
+def _is_invalid_base_class(cls: nodes.ClassDef) -> bool:
+    return cls.name in INVALID_BASE_CLASSES and is_builtin_object(cls)
+
+
+def _has_data_descriptor(cls: nodes.ClassDef, attr: str) -> bool:
+    attributes = cls.getattr(attr)
+    for attribute in attributes:
+        try:
+            for inferred in attribute.infer():
+                if isinstance(inferred, astroid.Instance):
+                    try:
+                        inferred.getattr("__get__")
+                        inferred.getattr("__set__")
+                    except astroid.NotFoundError:
+                        continue
+                    else:
+                        return True
+        except astroid.InferenceError:
+            # Can't infer, avoid emitting a false positive in this case.
+            return True
+    return False


-def _called_in_methods(func: LocalsDictNodeNG, klass: nodes.ClassDef,
-    methods: Sequence[str]) ->bool:
+def _called_in_methods(
+    func: LocalsDictNodeNG,
+    klass: nodes.ClassDef,
+    methods: Sequence[str],
+) -> bool:
     """Check if the func was called in any of the given methods,
     belonging to the *klass*.

     Returns True if so, False otherwise.
     """
-    pass
+    if not isinstance(func, nodes.FunctionDef):
+        return False
+    for method in methods:
+        try:
+            inferred = klass.getattr(method)
+        except astroid.NotFoundError:
+            continue
+        for infer_method in inferred:
+            for call in infer_method.nodes_of_class(nodes.Call):
+                try:
+                    bound = next(call.func.infer())
+                except (astroid.InferenceError, StopIteration):
+                    continue
+                if not isinstance(bound, astroid.BoundMethod):
+                    continue
+                func_obj = bound._proxied
+                if isinstance(func_obj, astroid.UnboundMethod):
+                    func_obj = func_obj._proxied
+                if func_obj.name == func.name:
+                    return True
+    return False


-def _is_attribute_property(name: str, klass: nodes.ClassDef) ->bool:
+def _is_attribute_property(name: str, klass: nodes.ClassDef) -> bool:
     """Check if the given attribute *name* is a property in the given *klass*.

     It will look for `property` calls or for functions
@@ -113,147 +454,308 @@ def _is_attribute_property(name: str, klass: nodes.ClassDef) ->bool:
     Returns ``True`` if the name is a property in the given klass,
     ``False`` otherwise.
     """
-    pass
-
-
-MSGS: dict[str, MessageDefinitionTuple] = {'F0202': (
-    'Unable to check methods signature (%s / %s)', 'method-check-failed',
-    "Used when Pylint has been unable to check methods signature compatibility for an unexpected reason. Please report this kind if you don't make sense of it."
-    ), 'E0202': ('An attribute defined in %s line %s hides this method',
-    'method-hidden',
-    'Used when a class defines a method which is hidden by an instance attribute from an ancestor class or set by some client code.'
-    ), 'E0203': ('Access to member %r before its definition line %s',
-    'access-member-before-definition',
-    "Used when an instance member is accessed before it's actually assigned."
-    ), 'W0201': ('Attribute %r defined outside __init__',
-    'attribute-defined-outside-init',
-    'Used when an instance attribute is defined outside the __init__ method.'
-    ), 'W0212': ('Access to a protected member %s of a client class',
-    'protected-access',
-    "Used when a protected member (i.e. class member with a name beginning with an underscore) is access outside the class or a descendant of the class where it's defined."
-    ), 'W0213': (
-    'Flag member %(overlap)s shares bit positions with %(sources)s',
-    'implicit-flag-alias',
-    'Used when multiple integer values declared within an enum.IntFlag class share a common bit position.'
-    ), 'E0211': ('Method %r has no argument', 'no-method-argument',
-    'Used when a method which should have the bound instance as first argument has no argument defined.'
-    ), 'E0213': ('Method %r should have "self" as first argument',
-    'no-self-argument',
-    'Used when a method has an attribute different the "self" as first argument. This is considered as an error since this is a so common convention that you shouldn\'t break it!'
-    ), 'C0202': ('Class method %s should have %s as first argument',
-    'bad-classmethod-argument',
-    'Used when a class method has a first argument named differently than the value specified in valid-classmethod-first-arg option (default to "cls"), recommended to easily differentiate them from regular instance methods.'
-    ), 'C0203': ('Metaclass method %s should have %s as first argument',
-    'bad-mcs-method-argument',
-    'Used when a metaclass method has a first argument named differently than the value specified in valid-classmethod-first-arg option (default to "cls"), recommended to easily differentiate them from regular instance methods.'
-    ), 'C0204': (
-    'Metaclass class method %s should have %s as first argument',
-    'bad-mcs-classmethod-argument',
-    'Used when a metaclass class method has a first argument named differently than the value specified in valid-metaclass-classmethod-first-arg option (default to "mcs"), recommended to easily differentiate them from regular instance methods.'
-    ), 'W0211': ('Static method with %r as first argument',
-    'bad-staticmethod-argument',
-    'Used when a static method has "self" or a value specified in valid-classmethod-first-arg option or valid-metaclass-classmethod-first-arg option as first argument.'
-    ), 'W0221': ('%s %s %r method', 'arguments-differ',
-    'Used when a method has a different number of arguments than in the implemented interface or in an overridden method. Extra arguments with default values are ignored.'
-    ), 'W0222': ('Signature differs from %s %r method', 'signature-differs',
-    'Used when a method signature is different than in the implemented interface or in an overridden method.'
-    ), 'W0223': (
-    'Method %r is abstract in class %r but is not overridden in child class %r'
-    , 'abstract-method',
-    'Used when an abstract method (i.e. raise NotImplementedError) is not overridden in concrete class.'
-    ), 'W0231': ('__init__ method from base class %r is not called',
-    'super-init-not-called',
-    'Used when an ancestor class method has an __init__ method which is not called by a derived class.'
-    ), 'W0233': (
-    '__init__ method from a non direct base class %r is called',
-    'non-parent-init-called',
-    'Used when an __init__ method is called on a class which is not in the direct ancestors for the analysed class.'
-    ), 'W0246': ('Useless parent or super() delegation in method %r',
-    'useless-parent-delegation',
-    'Used whenever we can detect that an overridden method is useless, relying on parent or super() delegation to do the same thing as another method from the MRO.'
-    , {'old_names': [('W0235', 'useless-super-delegation')]}), 'W0236': (
-    'Method %r was expected to be %r, found it instead as %r',
-    'invalid-overridden-method',
-    'Used when we detect that a method was overridden in a way that does not match its base class which could result in potential bugs at runtime.'
-    ), 'W0237': ('%s %s %r method', 'arguments-renamed',
-    'Used when a method parameter has a different name than in the implemented interface or in an overridden method.'
-    ), 'W0238': ('Unused private member `%s.%s`', 'unused-private-member',
-    'Emitted when a private member of a class is defined but not used.'),
-    'W0239': (
-    'Method %r overrides a method decorated with typing.final which is defined in class %r'
-    , 'overridden-final-method',
-    'Used when a method decorated with typing.final has been overridden.'),
-    'W0240': (
-    'Class %r is a subclass of a class decorated with typing.final: %r',
-    'subclassed-final-class',
-    'Used when a class decorated with typing.final has been subclassed.'),
-    'W0244': ('Redefined slots %r in subclass',
-    'redefined-slots-in-subclass',
-    'Used when a slot is re-defined in a subclass.'), 'W0245': (
-    'Super call without brackets', 'super-without-brackets',
-    'Used when a call to super does not have brackets and thus is not an actual call and does not work as expected.'
-    ), 'E0236': (
-    'Invalid object %r in __slots__, must contain only non empty strings',
-    'invalid-slots-object',
-    'Used when an invalid (non-string) object occurs in __slots__.'),
-    'E0237': ('Assigning to attribute %r not defined in class slots',
-    'assigning-non-slot',
-    'Used when assigning to an attribute not defined in the class slots.'),
-    'E0238': ('Invalid __slots__ object', 'invalid-slots',
-    'Used when an invalid __slots__ is found in class. Only a string, an iterable or a sequence is permitted.'
-    ), 'E0239': ('Inheriting %r, which is not a class.',
-    'inherit-non-class',
-    'Used when a class inherits from something which is not a class.'),
-    'E0240': ('Inconsistent method resolution order for class %r',
-    'inconsistent-mro',
-    'Used when a class has an inconsistent method resolution order.'),
-    'E0241': ('Duplicate bases for class %r', 'duplicate-bases',
-    'Duplicate use of base classes in derived classes raise TypeErrors.'),
-    'E0242': ('Value %r in slots conflicts with class variable',
-    'class-variable-slots-conflict',
-    'Used when a value in __slots__ conflicts with a class variable, property or method.'
-    ), 'E0243': (
-    "Invalid assignment to '__class__'. Should be a class definition but got a '%s'"
-    , 'invalid-class-object',
-    'Used when an invalid object is assigned to a __class__ property. Only a class is permitted.'
-    ), 'E0244': ('Extending inherited Enum class "%s"',
-    'invalid-enum-extension',
-    'Used when a class tries to extend an inherited Enum class. Doing so will raise a TypeError at runtime.'
-    ), 'R0202': (
-    'Consider using a decorator instead of calling classmethod',
-    'no-classmethod-decorator',
-    'Used when a class method is defined without using the decorator syntax.'
-    ), 'R0203': (
-    'Consider using a decorator instead of calling staticmethod',
-    'no-staticmethod-decorator',
-    'Used when a static method is defined without using the decorator syntax.'
-    ), 'C0205': ('Class __slots__ should be a non-string iterable',
-    'single-string-used-for-slots',
-    'Used when a class __slots__ is a simple string, rather than an iterable.'
-    ), 'R0205': (
-    'Class %r inherits from object, can be safely removed from bases in python3'
-    , 'useless-object-inheritance',
-    'Used when a class inherit from object, which under python3 is implicit, hence can be safely removed from bases.'
-    ), 'R0206': ('Cannot have defined parameters for properties',
-    'property-with-parameters',
-    'Used when we detect that a property also has parameters, which are useless, given that properties cannot be called with additional arguments.'
-    )}
+    try:
+        attributes = klass.getattr(name)
+    except astroid.NotFoundError:
+        return False
+    property_name = "builtins.property"
+    for attr in attributes:
+        if isinstance(attr, util.UninferableBase):
+            continue
+        try:
+            inferred = next(attr.infer())
+        except astroid.InferenceError:
+            continue
+        if isinstance(inferred, nodes.FunctionDef) and decorated_with_property(
+            inferred
+        ):
+            return True
+        if inferred.pytype() != property_name:
+            continue
+
+        cls = node_frame_class(inferred)
+        if cls == klass.declared_metaclass():
+            continue
+        return True
+    return False
+
+
+def _has_same_layout_slots(
+    slots: list[nodes.Const | None], assigned_value: nodes.Name
+) -> bool:
+    inferred = next(assigned_value.infer())
+    if isinstance(inferred, nodes.ClassDef):
+        other_slots = inferred.slots()
+        if all(
+            first_slot and second_slot and first_slot.value == second_slot.value
+            for (first_slot, second_slot) in zip_longest(slots, other_slots)
+        ):
+            return True
+    return False
+
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "F0202": (
+        "Unable to check methods signature (%s / %s)",
+        "method-check-failed",
+        "Used when Pylint has been unable to check methods signature "
+        "compatibility for an unexpected reason. Please report this kind "
+        "if you don't make sense of it.",
+    ),
+    "E0202": (
+        "An attribute defined in %s line %s hides this method",
+        "method-hidden",
+        "Used when a class defines a method which is hidden by an "
+        "instance attribute from an ancestor class or set by some "
+        "client code.",
+    ),
+    "E0203": (
+        "Access to member %r before its definition line %s",
+        "access-member-before-definition",
+        "Used when an instance member is accessed before it's actually assigned.",
+    ),
+    "W0201": (
+        "Attribute %r defined outside __init__",
+        "attribute-defined-outside-init",
+        "Used when an instance attribute is defined outside the __init__ method.",
+    ),
+    "W0212": (
+        "Access to a protected member %s of a client class",  # E0214
+        "protected-access",
+        "Used when a protected member (i.e. class member with a name "
+        "beginning with an underscore) is access outside the class or a "
+        "descendant of the class where it's defined.",
+    ),
+    "W0213": (
+        "Flag member %(overlap)s shares bit positions with %(sources)s",
+        "implicit-flag-alias",
+        "Used when multiple integer values declared within an enum.IntFlag "
+        "class share a common bit position.",
+    ),
+    "E0211": (
+        "Method %r has no argument",
+        "no-method-argument",
+        "Used when a method which should have the bound instance as "
+        "first argument has no argument defined.",
+    ),
+    "E0213": (
+        'Method %r should have "self" as first argument',
+        "no-self-argument",
+        'Used when a method has an attribute different the "self" as '
+        "first argument. This is considered as an error since this is "
+        "a so common convention that you shouldn't break it!",
+    ),
+    "C0202": (
+        "Class method %s should have %s as first argument",
+        "bad-classmethod-argument",
+        "Used when a class method has a first argument named differently "
+        "than the value specified in valid-classmethod-first-arg option "
+        '(default to "cls"), recommended to easily differentiate them '
+        "from regular instance methods.",
+    ),
+    "C0203": (
+        "Metaclass method %s should have %s as first argument",
+        "bad-mcs-method-argument",
+        "Used when a metaclass method has a first argument named "
+        "differently than the value specified in valid-classmethod-first"
+        '-arg option (default to "cls"), recommended to easily '
+        "differentiate them from regular instance methods.",
+    ),
+    "C0204": (
+        "Metaclass class method %s should have %s as first argument",
+        "bad-mcs-classmethod-argument",
+        "Used when a metaclass class method has a first argument named "
+        "differently than the value specified in valid-metaclass-"
+        'classmethod-first-arg option (default to "mcs"), recommended to '
+        "easily differentiate them from regular instance methods.",
+    ),
+    "W0211": (
+        "Static method with %r as first argument",
+        "bad-staticmethod-argument",
+        'Used when a static method has "self" or a value specified in '
+        "valid-classmethod-first-arg option or "
+        "valid-metaclass-classmethod-first-arg option as first argument.",
+    ),
+    "W0221": (
+        "%s %s %r method",
+        "arguments-differ",
+        "Used when a method has a different number of arguments than in "
+        "the implemented interface or in an overridden method. Extra arguments "
+        "with default values are ignored.",
+    ),
+    "W0222": (
+        "Signature differs from %s %r method",
+        "signature-differs",
+        "Used when a method signature is different than in the "
+        "implemented interface or in an overridden method.",
+    ),
+    "W0223": (
+        "Method %r is abstract in class %r but is not overridden in child class %r",
+        "abstract-method",
+        "Used when an abstract method (i.e. raise NotImplementedError) is "
+        "not overridden in concrete class.",
+    ),
+    "W0231": (
+        "__init__ method from base class %r is not called",
+        "super-init-not-called",
+        "Used when an ancestor class method has an __init__ method "
+        "which is not called by a derived class.",
+    ),
+    "W0233": (
+        "__init__ method from a non direct base class %r is called",
+        "non-parent-init-called",
+        "Used when an __init__ method is called on a class which is not "
+        "in the direct ancestors for the analysed class.",
+    ),
+    "W0246": (
+        "Useless parent or super() delegation in method %r",
+        "useless-parent-delegation",
+        "Used whenever we can detect that an overridden method is useless, "
+        "relying on parent or super() delegation to do the same thing as another method "
+        "from the MRO.",
+        {"old_names": [("W0235", "useless-super-delegation")]},
+    ),
+    "W0236": (
+        "Method %r was expected to be %r, found it instead as %r",
+        "invalid-overridden-method",
+        "Used when we detect that a method was overridden in a way "
+        "that does not match its base class "
+        "which could result in potential bugs at runtime.",
+    ),
+    "W0237": (
+        "%s %s %r method",
+        "arguments-renamed",
+        "Used when a method parameter has a different name than in "
+        "the implemented interface or in an overridden method.",
+    ),
+    "W0238": (
+        "Unused private member `%s.%s`",
+        "unused-private-member",
+        "Emitted when a private member of a class is defined but not used.",
+    ),
+    "W0239": (
+        "Method %r overrides a method decorated with typing.final which is defined in class %r",
+        "overridden-final-method",
+        "Used when a method decorated with typing.final has been overridden.",
+    ),
+    "W0240": (
+        "Class %r is a subclass of a class decorated with typing.final: %r",
+        "subclassed-final-class",
+        "Used when a class decorated with typing.final has been subclassed.",
+    ),
+    "W0244": (
+        "Redefined slots %r in subclass",
+        "redefined-slots-in-subclass",
+        "Used when a slot is re-defined in a subclass.",
+    ),
+    "W0245": (
+        "Super call without brackets",
+        "super-without-brackets",
+        "Used when a call to super does not have brackets and thus is not an actual "
+        "call and does not work as expected.",
+    ),
+    "E0236": (
+        "Invalid object %r in __slots__, must contain only non empty strings",
+        "invalid-slots-object",
+        "Used when an invalid (non-string) object occurs in __slots__.",
+    ),
+    "E0237": (
+        "Assigning to attribute %r not defined in class slots",
+        "assigning-non-slot",
+        "Used when assigning to an attribute not defined in the class slots.",
+    ),
+    "E0238": (
+        "Invalid __slots__ object",
+        "invalid-slots",
+        "Used when an invalid __slots__ is found in class. "
+        "Only a string, an iterable or a sequence is permitted.",
+    ),
+    "E0239": (
+        "Inheriting %r, which is not a class.",
+        "inherit-non-class",
+        "Used when a class inherits from something which is not a class.",
+    ),
+    "E0240": (
+        "Inconsistent method resolution order for class %r",
+        "inconsistent-mro",
+        "Used when a class has an inconsistent method resolution order.",
+    ),
+    "E0241": (
+        "Duplicate bases for class %r",
+        "duplicate-bases",
+        "Duplicate use of base classes in derived classes raise TypeErrors.",
+    ),
+    "E0242": (
+        "Value %r in slots conflicts with class variable",
+        "class-variable-slots-conflict",
+        "Used when a value in __slots__ conflicts with a class variable, property or method.",
+    ),
+    "E0243": (
+        "Invalid assignment to '__class__'. Should be a class definition but got a '%s'",
+        "invalid-class-object",
+        "Used when an invalid object is assigned to a __class__ property. "
+        "Only a class is permitted.",
+    ),
+    "E0244": (
+        'Extending inherited Enum class "%s"',
+        "invalid-enum-extension",
+        "Used when a class tries to extend an inherited Enum class. "
+        "Doing so will raise a TypeError at runtime.",
+    ),
+    "R0202": (
+        "Consider using a decorator instead of calling classmethod",
+        "no-classmethod-decorator",
+        "Used when a class method is defined without using the decorator syntax.",
+    ),
+    "R0203": (
+        "Consider using a decorator instead of calling staticmethod",
+        "no-staticmethod-decorator",
+        "Used when a static method is defined without using the decorator syntax.",
+    ),
+    "C0205": (
+        "Class __slots__ should be a non-string iterable",
+        "single-string-used-for-slots",
+        "Used when a class __slots__ is a simple string, rather than an iterable.",
+    ),
+    "R0205": (
+        "Class %r inherits from object, can be safely removed from bases in python3",
+        "useless-object-inheritance",
+        "Used when a class inherit from object, which under python3 is implicit, "
+        "hence can be safely removed from bases.",
+    ),
+    "R0206": (
+        "Cannot have defined parameters for properties",
+        "property-with-parameters",
+        "Used when we detect that a property also has parameters, which are useless, "
+        "given that properties cannot be called with additional arguments.",
+    ),
+}
+
+
+def _scope_default() -> defaultdict[str, list[_AccessNodes]]:
+    # It's impossible to nest defaultdicts so we must use a function
+    return defaultdict(list)


 class ScopeAccessMap:
     """Store the accessed variables per scope."""

-    def __init__(self) ->None:
-        self._scopes: defaultdict[nodes.ClassDef, defaultdict[str, list[
-            _AccessNodes]]] = defaultdict(_scope_default)
+    def __init__(self) -> None:
+        self._scopes: defaultdict[
+            nodes.ClassDef, defaultdict[str, list[_AccessNodes]]
+        ] = defaultdict(_scope_default)

-    def set_accessed(self, node: _AccessNodes) ->None:
+    def set_accessed(self, node: _AccessNodes) -> None:
         """Set the given node as accessed."""
-        pass
+        frame = node_frame_class(node)
+        if frame is None:
+            # The node does not live in a class.
+            return
+        self._scopes[frame][node.attrname].append(node)

-    def accessed(self, scope: nodes.ClassDef) ->dict[str, list[_AccessNodes]]:
+    def accessed(self, scope: nodes.ClassDef) -> dict[str, list[_AccessNodes]]:
         """Get the accessed variables for the given scope."""
-        pass
+        return self._scopes.get(scope, {})


 class ClassChecker(BaseChecker):
@@ -266,78 +768,557 @@ class ClassChecker(BaseChecker):
     * attributes not defined in the __init__ method
     * unreachable code
     """
-    name = 'classes'
+
+    # configuration section name
+    name = "classes"
+    # messages
     msgs = MSGS
-    options = ('defining-attr-methods', {'default': ('__init__', '__new__',
-        'setUp', 'asyncSetUp', '__post_init__'), 'type': 'csv', 'metavar':
-        '<method names>', 'help':
-        'List of method names used to declare (i.e. assign) instance attributes.'
-        }), ('valid-classmethod-first-arg', {'default': ('cls',), 'type':
-        'csv', 'metavar': '<argument names>', 'help':
-        'List of valid names for the first argument in a class method.'}), (
-        'valid-metaclass-classmethod-first-arg', {'default': ('mcs',),
-        'type': 'csv', 'metavar': '<argument names>', 'help':
-        'List of valid names for the first argument in a metaclass class method.'
-        }), ('exclude-protected', {'default': ('_asdict', '_fields',
-        '_replace', '_source', '_make', 'os._exit'), 'type': 'csv',
-        'metavar': '<protected access exclusions>', 'help':
-        'List of member names, which should be excluded from the protected access warning.'
-        }), ('check-protected-access-in-special-methods', {'default': False,
-        'type': 'yn', 'metavar': '<y or n>', 'help':
-        'Warn about protected attribute access inside special methods'})
-
-    def __init__(self, linter: PyLinter) ->None:
+    # configuration options
+    options = (
+        (
+            "defining-attr-methods",
+            {
+                "default": (
+                    "__init__",
+                    "__new__",
+                    "setUp",
+                    "asyncSetUp",
+                    "__post_init__",
+                ),
+                "type": "csv",
+                "metavar": "<method names>",
+                "help": "List of method names used to declare (i.e. assign) \
+instance attributes.",
+            },
+        ),
+        (
+            "valid-classmethod-first-arg",
+            {
+                "default": ("cls",),
+                "type": "csv",
+                "metavar": "<argument names>",
+                "help": "List of valid names for the first argument in \
+a class method.",
+            },
+        ),
+        (
+            "valid-metaclass-classmethod-first-arg",
+            {
+                "default": ("mcs",),
+                "type": "csv",
+                "metavar": "<argument names>",
+                "help": "List of valid names for the first argument in \
+a metaclass class method.",
+            },
+        ),
+        (
+            "exclude-protected",
+            {
+                "default": (
+                    # namedtuple public API.
+                    "_asdict",
+                    "_fields",
+                    "_replace",
+                    "_source",
+                    "_make",
+                    "os._exit",
+                ),
+                "type": "csv",
+                "metavar": "<protected access exclusions>",
+                "help": (
+                    "List of member names, which should be excluded "
+                    "from the protected access warning."
+                ),
+            },
+        ),
+        (
+            "check-protected-access-in-special-methods",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Warn about protected attribute access inside special methods",
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._accessed = ScopeAccessMap()
         self._first_attrs: list[str | None] = []

-    @only_required_for_messages('abstract-method', 'invalid-slots',
-        'single-string-used-for-slots', 'invalid-slots-object',
-        'class-variable-slots-conflict', 'inherit-non-class',
-        'useless-object-inheritance', 'inconsistent-mro', 'duplicate-bases',
-        'redefined-slots-in-subclass', 'invalid-enum-extension',
-        'subclassed-final-class', 'implicit-flag-alias')
-    def visit_classdef(self, node: nodes.ClassDef) ->None:
+    def open(self) -> None:
+        self._mixin_class_rgx = self.linter.config.mixin_class_rgx
+        py_version = self.linter.config.py_version
+        self._py38_plus = py_version >= (3, 8)
+
+    @cached_property
+    def _dummy_rgx(self) -> Pattern[str]:
+        return self.linter.config.dummy_variables_rgx  # type: ignore[no-any-return]
+
+    @only_required_for_messages(
+        "abstract-method",
+        "invalid-slots",
+        "single-string-used-for-slots",
+        "invalid-slots-object",
+        "class-variable-slots-conflict",
+        "inherit-non-class",
+        "useless-object-inheritance",
+        "inconsistent-mro",
+        "duplicate-bases",
+        "redefined-slots-in-subclass",
+        "invalid-enum-extension",
+        "subclassed-final-class",
+        "implicit-flag-alias",
+    )
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
         """Init visit variable _accessed."""
-        pass
+        self._check_bases_classes(node)
+        self._check_slots(node)
+        self._check_proper_bases(node)
+        self._check_typing_final(node)
+        self._check_consistent_mro(node)

-    def _check_consistent_mro(self, node: nodes.ClassDef) ->None:
+    def _check_consistent_mro(self, node: nodes.ClassDef) -> None:
         """Detect that a class has a consistent mro or duplicate bases."""
-        pass
-
-    def _check_proper_bases(self, node: nodes.ClassDef) ->None:
+        try:
+            node.mro()
+        except astroid.InconsistentMroError:
+            self.add_message("inconsistent-mro", args=node.name, node=node)
+        except astroid.DuplicateBasesError:
+            self.add_message("duplicate-bases", args=node.name, node=node)
+
+    def _check_enum_base(self, node: nodes.ClassDef, ancestor: nodes.ClassDef) -> None:
+        members = ancestor.getattr("__members__")
+        if members and isinstance(members[0], nodes.Dict) and members[0].items:
+            for _, name_node in members[0].items:
+                # Exempt type annotations without value assignments
+                if all(
+                    isinstance(item.parent, nodes.AnnAssign)
+                    and item.parent.value is None
+                    for item in ancestor.getattr(name_node.name)
+                ):
+                    continue
+                self.add_message(
+                    "invalid-enum-extension",
+                    args=ancestor.name,
+                    node=node,
+                    confidence=INFERENCE,
+                )
+                break
+
+        if ancestor.is_subtype_of("enum.IntFlag"):
+            # Collect integer flag assignments present on the class
+            assignments = defaultdict(list)
+            for assign_name in node.nodes_of_class(nodes.AssignName):
+                if isinstance(assign_name.parent, nodes.Assign):
+                    value = getattr(assign_name.parent.value, "value", None)
+                    if isinstance(value, int):
+                        assignments[value].append(assign_name)
+
+            # For each bit position, collect all the flags that set the bit
+            bit_flags = defaultdict(set)
+            for flag in assignments:
+                flag_bits = (i for i, c in enumerate(reversed(bin(flag))) if c == "1")
+                for bit in flag_bits:
+                    bit_flags[bit].add(flag)
+
+            # Collect the minimum, unique values that each flag overlaps with
+            overlaps = defaultdict(list)
+            for flags in bit_flags.values():
+                source, *conflicts = sorted(flags)
+                for conflict in conflicts:
+                    overlaps[conflict].append(source)
+
+            # Report the overlapping values
+            for overlap in overlaps:
+                for assignment_node in assignments[overlap]:
+                    self.add_message(
+                        "implicit-flag-alias",
+                        node=assignment_node,
+                        args={
+                            "overlap": f"<{node.name}.{assignment_node.name}: {overlap}>",
+                            "sources": ", ".join(
+                                f"<{node.name}.{assignments[source][0].name}: {source}> "
+                                f"({overlap} & {source} = {overlap & source})"
+                                for source in overlaps[overlap]
+                            ),
+                        },
+                        confidence=INFERENCE,
+                    )
+
+    def _check_proper_bases(self, node: nodes.ClassDef) -> None:
         """Detect that a class inherits something which is not
         a class or a type.
         """
-        pass
-
-    def _check_typing_final(self, node: nodes.ClassDef) ->None:
+        for base in node.bases:
+            ancestor = safe_infer(base)
+            if not ancestor:
+                continue
+            if isinstance(ancestor, astroid.Instance) and (
+                ancestor.is_subtype_of("builtins.type")
+                or ancestor.is_subtype_of(".Protocol")
+            ):
+                continue
+
+            if not isinstance(ancestor, nodes.ClassDef) or _is_invalid_base_class(
+                ancestor
+            ):
+                self.add_message("inherit-non-class", args=base.as_string(), node=node)
+
+            if isinstance(ancestor, nodes.ClassDef) and ancestor.is_subtype_of(
+                "enum.Enum"
+            ):
+                self._check_enum_base(node, ancestor)
+
+            if ancestor.name == object.__name__:
+                self.add_message(
+                    "useless-object-inheritance", args=node.name, node=node
+                )
+
+    def _check_typing_final(self, node: nodes.ClassDef) -> None:
         """Detect that a class does not subclass a class decorated with
         `typing.final`.
         """
-        pass
-
-    @only_required_for_messages('unused-private-member',
-        'attribute-defined-outside-init', 'access-member-before-definition')
-    def leave_classdef(self, node: nodes.ClassDef) ->None:
+        if not self._py38_plus:
+            return
+        for base in node.bases:
+            ancestor = safe_infer(base)
+            if not ancestor:
+                continue
+
+            if isinstance(ancestor, nodes.ClassDef) and (
+                decorated_with(ancestor, ["typing.final"])
+                or uninferable_final_decorators(ancestor.decorators)
+            ):
+                self.add_message(
+                    "subclassed-final-class",
+                    args=(node.name, ancestor.name),
+                    node=node,
+                )
+
+    @only_required_for_messages(
+        "unused-private-member",
+        "attribute-defined-outside-init",
+        "access-member-before-definition",
+    )
+    def leave_classdef(self, node: nodes.ClassDef) -> None:
         """Checker for Class nodes.

         check that instance attributes are defined in __init__ and check
         access to existent members
         """
-        pass
-
-    def _check_unused_private_variables(self, node: nodes.ClassDef) ->None:
+        self._check_unused_private_functions(node)
+        self._check_unused_private_variables(node)
+        self._check_unused_private_attributes(node)
+        self._check_attribute_defined_outside_init(node)
+
+    def _check_unused_private_functions(self, node: nodes.ClassDef) -> None:
+        for function_def in node.nodes_of_class(nodes.FunctionDef):
+            if not is_attr_private(function_def.name):
+                continue
+            parent_scope = function_def.parent.scope()
+            if isinstance(parent_scope, nodes.FunctionDef):
+                # Handle nested functions
+                if function_def.name in (
+                    n.name for n in parent_scope.nodes_of_class(nodes.Name)
+                ):
+                    continue
+            for child in node.nodes_of_class((nodes.Name, nodes.Attribute)):
+                # Check for cases where the functions are used as a variable instead of as a
+                # method call
+                if isinstance(child, nodes.Name) and child.name == function_def.name:
+                    break
+                if isinstance(child, nodes.Attribute):
+                    # Ignore recursive calls
+                    if (
+                        child.attrname != function_def.name
+                        or child.scope() == function_def
+                    ):
+                        continue
+
+                    # Check self.__attrname, cls.__attrname, node_name.__attrname
+                    if isinstance(child.expr, nodes.Name) and child.expr.name in {
+                        "self",
+                        "cls",
+                        node.name,
+                    }:
+                        break
+
+                    # Check type(self).__attrname
+                    if isinstance(child.expr, nodes.Call):
+                        inferred = safe_infer(child.expr)
+                        if (
+                            isinstance(inferred, nodes.ClassDef)
+                            and inferred.name == node.name
+                        ):
+                            break
+            else:
+                name_stack = []
+                curr = parent_scope
+                # Generate proper names for nested functions
+                while curr != node:
+                    name_stack.append(curr.name)
+                    curr = curr.parent.scope()
+
+                outer_level_names = f"{'.'.join(reversed(name_stack))}"
+                function_repr = f"{outer_level_names}.{function_def.name}({function_def.args.as_string()})"
+                self.add_message(
+                    "unused-private-member",
+                    node=function_def,
+                    args=(node.name, function_repr.lstrip(".")),
+                )
+
+    def _check_unused_private_variables(self, node: nodes.ClassDef) -> None:
         """Check if private variables are never used within a class."""
-        pass
-
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+        for assign_name in node.nodes_of_class(nodes.AssignName):
+            if isinstance(assign_name.parent, nodes.Arguments):
+                continue  # Ignore function arguments
+            if not is_attr_private(assign_name.name):
+                continue
+            for child in node.nodes_of_class((nodes.Name, nodes.Attribute)):
+                if isinstance(child, nodes.Name) and child.name == assign_name.name:
+                    break
+                if isinstance(child, nodes.Attribute):
+                    if not isinstance(child.expr, nodes.Name):
+                        break
+                    if child.attrname == assign_name.name and child.expr.name in (
+                        "self",
+                        "cls",
+                        node.name,
+                    ):
+                        break
+            else:
+                args = (node.name, assign_name.name)
+                self.add_message("unused-private-member", node=assign_name, args=args)
+
+    def _check_unused_private_attributes(self, node: nodes.ClassDef) -> None:
+        for assign_attr in node.nodes_of_class(nodes.AssignAttr):
+            if not is_attr_private(assign_attr.attrname) or not isinstance(
+                assign_attr.expr, nodes.Name
+            ):
+                continue
+
+            # Logic for checking false positive when using __new__,
+            # Get the returned object names of the __new__ magic function
+            # Then check if the attribute was consumed in other instance methods
+            acceptable_obj_names: list[str] = ["self"]
+            scope = assign_attr.scope()
+            if isinstance(scope, nodes.FunctionDef) and scope.name == "__new__":
+                acceptable_obj_names.extend(
+                    [
+                        return_node.value.name
+                        for return_node in scope.nodes_of_class(nodes.Return)
+                        if isinstance(return_node.value, nodes.Name)
+                    ]
+                )
+
+            for attribute in node.nodes_of_class(nodes.Attribute):
+                if attribute.attrname != assign_attr.attrname:
+                    continue
+
+                if not isinstance(attribute.expr, nodes.Name):
+                    continue
+
+                if assign_attr.expr.name in {
+                    "cls",
+                    node.name,
+                } and attribute.expr.name in {"cls", "self", node.name}:
+                    # If assigned to cls or class name, can be accessed by cls/self/class name
+                    break
+
+                if (
+                    assign_attr.expr.name in acceptable_obj_names
+                    and attribute.expr.name == "self"
+                ):
+                    # If assigned to self.attrib, can only be accessed by self
+                    # Or if __new__ was used, the returned object names are acceptable
+                    break
+
+                if assign_attr.expr.name == attribute.expr.name == node.name:
+                    # Recognise attributes which are accessed via the class name
+                    break
+
+            else:
+                args = (node.name, assign_attr.attrname)
+                self.add_message("unused-private-member", node=assign_attr, args=args)
+
+    def _check_attribute_defined_outside_init(self, cnode: nodes.ClassDef) -> None:
+        # check access to existent members on non metaclass classes
+        if (
+            "attribute-defined-outside-init"
+            in self.linter.config.ignored_checks_for_mixins
+            and self._mixin_class_rgx.match(cnode.name)
+        ):
+            # We are in a mixin class. No need to try to figure out if
+            # something is missing, since it is most likely that it will
+            # miss.
+            return
+
+        accessed = self._accessed.accessed(cnode)
+        if cnode.type != "metaclass":
+            self._check_accessed_members(cnode, accessed)
+        # checks attributes are defined in an allowed method such as __init__
+        if not self.linter.is_message_enabled("attribute-defined-outside-init"):
+            return
+        defining_methods = self.linter.config.defining_attr_methods
+        current_module = cnode.root()
+        for attr, nodes_lst in cnode.instance_attrs.items():
+            # Exclude `__dict__` as it is already defined.
+            if attr == "__dict__":
+                continue
+
+            # Skip nodes which are not in the current module and it may screw up
+            # the output, while it's not worth it
+            nodes_lst = [
+                n
+                for n in nodes_lst
+                if not isinstance(n.statement(), (nodes.Delete, nodes.AugAssign))
+                and n.root() is current_module
+            ]
+            if not nodes_lst:
+                continue  # error detected by typechecking
+
+            # Check if any method attr is defined in is a defining method
+            # or if we have the attribute defined in a setter.
+            frames = (node.frame() for node in nodes_lst)
+            if any(
+                frame.name in defining_methods or is_property_setter(frame)
+                for frame in frames
+            ):
+                continue
+
+            # check attribute is defined in a parent's __init__
+            for parent in cnode.instance_attr_ancestors(attr):
+                attr_defined = False
+                # check if any parent method attr is defined in is a defining method
+                for node in parent.instance_attrs[attr]:
+                    if node.frame().name in defining_methods:
+                        attr_defined = True
+                if attr_defined:
+                    # we're done :)
+                    break
+            else:
+                # check attribute is defined as a class attribute
+                try:
+                    cnode.local_attr(attr)
+                except astroid.NotFoundError:
+                    for node in nodes_lst:
+                        if node.frame().name not in defining_methods:
+                            # If the attribute was set by a call in any
+                            # of the defining methods, then don't emit
+                            # the warning.
+                            if _called_in_methods(
+                                node.frame(), cnode, defining_methods
+                            ):
+                                continue
+                            self.add_message(
+                                "attribute-defined-outside-init", args=attr, node=node
+                            )
+
+    # pylint: disable = too-many-branches
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Check method arguments, overriding."""
-        pass
+        # ignore actual functions
+        if not node.is_method():
+            return
+
+        self._check_useless_super_delegation(node)
+        self._check_property_with_parameters(node)
+
+        # 'is_method()' is called and makes sure that this is a 'nodes.ClassDef'
+        klass: nodes.ClassDef = node.parent.frame()
+        # check first argument is self if this is actually a method
+        self._check_first_arg_for_type(node, klass.type == "metaclass")
+        if node.name == "__init__":
+            self._check_init(node, klass)
+            return
+        # check signature if the method overloads inherited method
+        for overridden in klass.local_attr_ancestors(node.name):
+            # get astroid for the searched method
+            try:
+                parent_function = overridden[node.name]
+            except KeyError:
+                # we have found the method but it's not in the local
+                # dictionary.
+                # This may happen with astroid build from living objects
+                continue
+            if not isinstance(parent_function, nodes.FunctionDef):
+                continue
+            self._check_signature(node, parent_function, klass)
+            self._check_invalid_overridden_method(node, parent_function)
+            break
+
+        if node.decorators:
+            for decorator in node.decorators.nodes:
+                if isinstance(decorator, nodes.Attribute) and decorator.attrname in {
+                    "getter",
+                    "setter",
+                    "deleter",
+                }:
+                    # attribute affectation will call this method, not hiding it
+                    return
+                if isinstance(decorator, nodes.Name):
+                    if decorator.name in ALLOWED_PROPERTIES:
+                        # attribute affectation will either call a setter or raise
+                        # an attribute error, anyway not hiding the function
+                        return
+
+                if isinstance(decorator, nodes.Attribute):
+                    if self._check_functools_or_not(decorator):
+                        return
+
+                # Infer the decorator and see if it returns something useful
+                inferred = safe_infer(decorator)
+                if not inferred:
+                    return
+                if isinstance(inferred, nodes.FunctionDef):
+                    # Okay, it's a decorator, let's see what it can infer.
+                    try:
+                        inferred = next(inferred.infer_call_result(inferred))
+                    except astroid.InferenceError:
+                        return
+                try:
+                    if (
+                        isinstance(inferred, (astroid.Instance, nodes.ClassDef))
+                        and inferred.getattr("__get__")
+                        and inferred.getattr("__set__")
+                    ):
+                        return
+                except astroid.AttributeInferenceError:
+                    pass
+
+        # check if the method is hidden by an attribute
+        # pylint: disable = too-many-try-statements
+        try:
+            overridden = klass.instance_attr(node.name)[0]
+            overridden_frame = overridden.frame()
+            if (
+                isinstance(overridden_frame, nodes.FunctionDef)
+                and overridden_frame.type == "method"
+            ):
+                overridden_frame = overridden_frame.parent.frame()
+            if not (
+                isinstance(overridden_frame, nodes.ClassDef)
+                and klass.is_subtype_of(overridden_frame.qname())
+            ):
+                return
+
+            # If a subclass defined the method then it's not our fault.
+            for ancestor in klass.ancestors():
+                if node.name in ancestor.instance_attrs and is_attr_private(node.name):
+                    return
+                for obj in ancestor.lookup(node.name)[1]:
+                    if isinstance(obj, nodes.FunctionDef):
+                        return
+            args = (overridden.root().name, overridden.fromlineno)
+            self.add_message("method-hidden", args=args, node=node)
+        except astroid.NotFoundError:
+            pass
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_useless_super_delegation(self, function: nodes.FunctionDef
-        ) ->None:
+    def _check_useless_super_delegation(self, function: nodes.FunctionDef) -> None:
         """Check if the given function node is an useless method override.

         We consider it *useless* if it uses the super() builtin, but having
@@ -348,23 +1329,279 @@ class ClassChecker(BaseChecker):
         this method, then the method could be removed altogether, by letting
         other implementation to take precedence.
         """
-        pass
-
-    def _check_redefined_slots(self, node: nodes.ClassDef, slots_node:
-        nodes.NodeNG, slots_list: list[nodes.NodeNG]) ->None:
+        if not _is_trivial_super_delegation(function):
+            return
+
+        call: nodes.Call = function.body[0].value
+
+        # Classes that override __eq__ should also override
+        # __hash__, even a trivial override is meaningful
+        if function.name == "__hash__":
+            for other_method in function.parent.mymethods():
+                if other_method.name == "__eq__":
+                    return
+
+        # Check values of default args
+        klass = function.parent.frame()
+        meth_node = None
+        for overridden in klass.local_attr_ancestors(function.name):
+            # get astroid for the searched method
+            try:
+                meth_node = overridden[function.name]
+            except KeyError:
+                # we have found the method but it's not in the local
+                # dictionary.
+                # This may happen with astroid build from living objects
+                continue
+            if (
+                not isinstance(meth_node, nodes.FunctionDef)
+                # If the method have an ancestor which is not a
+                # function then it is legitimate to redefine it
+                or _has_different_parameters_default_value(
+                    meth_node.args, function.args
+                )
+                # arguments to builtins such as Exception.__init__() cannot be inspected
+                or (meth_node.args.args is None and function.argnames() != ["self"])
+            ):
+                return
+            break
+
+        # Detect if the parameters are the same as the call's arguments.
+        params = _signature_from_arguments(function.args)
+        args = _signature_from_call(call)
+
+        if meth_node is not None:
+            # Detect if the super method uses varargs and the function doesn't or makes some of those explicit
+            if meth_node.args.vararg and (
+                not function.args.vararg
+                or len(function.args.args) > len(meth_node.args.args)
+            ):
+                return
+
+            def form_annotations(arguments: nodes.Arguments) -> list[str]:
+                annotations = chain(
+                    (arguments.posonlyargs_annotations or []), arguments.annotations
+                )
+                return [ann.as_string() for ann in annotations if ann is not None]
+
+            called_annotations = form_annotations(function.args)
+            overridden_annotations = form_annotations(meth_node.args)
+            if called_annotations and overridden_annotations:
+                if called_annotations != overridden_annotations:
+                    return
+
+            if (
+                function.returns is not None
+                and meth_node.returns is not None
+                and meth_node.returns.as_string() != function.returns.as_string()
+            ):
+                # Override adds typing information to the return type
+                return
+
+        if _definition_equivalent_to_call(params, args):
+            self.add_message(
+                "useless-parent-delegation",
+                node=function,
+                args=(function.name,),
+                confidence=INFERENCE,
+            )
+
+    def _check_property_with_parameters(self, node: nodes.FunctionDef) -> None:
+        if (
+            len(node.args.arguments) > 1
+            and decorated_with_property(node)
+            and not is_property_setter(node)
+        ):
+            self.add_message("property-with-parameters", node=node, confidence=HIGH)
+
+    def _check_invalid_overridden_method(
+        self,
+        function_node: nodes.FunctionDef,
+        parent_function_node: nodes.FunctionDef,
+    ) -> None:
+        parent_is_property = decorated_with_property(
+            parent_function_node
+        ) or is_property_setter_or_deleter(parent_function_node)
+        current_is_property = decorated_with_property(
+            function_node
+        ) or is_property_setter_or_deleter(function_node)
+        if parent_is_property and not current_is_property:
+            self.add_message(
+                "invalid-overridden-method",
+                args=(function_node.name, "property", function_node.type),
+                node=function_node,
+            )
+        elif not parent_is_property and current_is_property:
+            self.add_message(
+                "invalid-overridden-method",
+                args=(function_node.name, "method", "property"),
+                node=function_node,
+            )
+
+        parent_is_async = isinstance(parent_function_node, nodes.AsyncFunctionDef)
+        current_is_async = isinstance(function_node, nodes.AsyncFunctionDef)
+
+        if parent_is_async and not current_is_async:
+            self.add_message(
+                "invalid-overridden-method",
+                args=(function_node.name, "async", "non-async"),
+                node=function_node,
+            )
+
+        elif not parent_is_async and current_is_async:
+            self.add_message(
+                "invalid-overridden-method",
+                args=(function_node.name, "non-async", "async"),
+                node=function_node,
+            )
+        if (
+            decorated_with(parent_function_node, ["typing.final"])
+            or uninferable_final_decorators(parent_function_node.decorators)
+        ) and self._py38_plus:
+            self.add_message(
+                "overridden-final-method",
+                args=(function_node.name, parent_function_node.parent.frame().name),
+                node=function_node,
+            )
+
+    def _check_functools_or_not(self, decorator: nodes.Attribute) -> bool:
+        if decorator.attrname != "cached_property":
+            return False
+
+        if not isinstance(decorator.expr, nodes.Name):
+            return False
+
+        _, import_nodes = decorator.expr.lookup(decorator.expr.name)
+
+        if not import_nodes:
+            return False
+        import_node = import_nodes[0]
+
+        if not isinstance(import_node, (astroid.Import, astroid.ImportFrom)):
+            return False
+
+        return "functools" in dict(import_node.names)
+
+    def _check_slots(self, node: nodes.ClassDef) -> None:
+        if "__slots__" not in node.locals:
+            return
+
+        try:
+            inferred_slots = tuple(node.ilookup("__slots__"))
+        except astroid.InferenceError:
+            return
+        for slots in inferred_slots:
+            # check if __slots__ is a valid type
+            if isinstance(slots, util.UninferableBase):
+                continue
+            if not is_iterable(slots) and not is_comprehension(slots):
+                self.add_message("invalid-slots", node=node)
+                continue
+
+            if isinstance(slots, nodes.Const):
+                # a string, ignore the following checks
+                self.add_message("single-string-used-for-slots", node=node)
+                continue
+            if not hasattr(slots, "itered"):
+                # we can't obtain the values, maybe a .deque?
+                continue
+
+            if isinstance(slots, nodes.Dict):
+                values = [item[0] for item in slots.items]
+            else:
+                values = slots.itered()
+            if isinstance(values, util.UninferableBase):
+                continue
+            for elt in values:
+                try:
+                    self._check_slots_elt(elt, node)
+                except astroid.InferenceError:
+                    continue
+            self._check_redefined_slots(node, slots, values)
+
+    def _check_redefined_slots(
+        self,
+        node: nodes.ClassDef,
+        slots_node: nodes.NodeNG,
+        slots_list: list[nodes.NodeNG],
+    ) -> None:
         """Check if `node` redefines a slot which is defined in an ancestor class."""
-        pass
-
-    def leave_functiondef(self, node: nodes.FunctionDef) ->None:
+        slots_names: list[str] = []
+        for slot in slots_list:
+            if isinstance(slot, nodes.Const):
+                slots_names.append(slot.value)
+            else:
+                inferred_slot = safe_infer(slot)
+                inferred_slot_value = getattr(inferred_slot, "value", None)
+                if isinstance(inferred_slot_value, str):
+                    slots_names.append(inferred_slot_value)
+
+        # Slots of all parent classes
+        ancestors_slots_names = {
+            slot.value
+            for ancestor in node.local_attr_ancestors("__slots__")
+            for slot in ancestor.slots() or []
+        }
+
+        # Slots which are common to `node` and its parent classes
+        redefined_slots = ancestors_slots_names.intersection(slots_names)
+
+        if redefined_slots:
+            self.add_message(
+                "redefined-slots-in-subclass",
+                args=([name for name in slots_names if name in redefined_slots],),
+                node=slots_node,
+            )
+
+    def _check_slots_elt(
+        self, elt: SuccessfulInferenceResult, node: nodes.ClassDef
+    ) -> None:
+        for inferred in elt.infer():
+            if isinstance(inferred, util.UninferableBase):
+                continue
+            if not isinstance(inferred, nodes.Const) or not isinstance(
+                inferred.value, str
+            ):
+                self.add_message(
+                    "invalid-slots-object",
+                    args=elt.as_string(),
+                    node=elt,
+                    confidence=INFERENCE,
+                )
+                continue
+            if not inferred.value:
+                self.add_message(
+                    "invalid-slots-object",
+                    args=elt.as_string(),
+                    node=elt,
+                    confidence=INFERENCE,
+                )
+
+            # Check if we have a conflict with a class variable.
+            class_variable = node.locals.get(inferred.value)
+            if class_variable:
+                # Skip annotated assignments which don't conflict at all with slots.
+                if len(class_variable) == 1:
+                    parent = class_variable[0].parent
+                    if isinstance(parent, nodes.AnnAssign) and parent.value is None:
+                        return
+                self.add_message(
+                    "class-variable-slots-conflict", args=(inferred.value,), node=elt
+                )
+
+    def leave_functiondef(self, node: nodes.FunctionDef) -> None:
         """On method node, check if this method couldn't be a function.

         ignore class, static and abstract methods, initializer,
         methods overridden from a parent class.
         """
-        pass
+        if node.is_method():
+            if node.args.args is not None:
+                self._first_attrs.pop()
+
     leave_asyncfunctiondef = leave_functiondef

-    def visit_attribute(self, node: nodes.Attribute) ->None:
+    def visit_attribute(self, node: nodes.Attribute) -> None:
         """Check if the getattr is an access to a class member
         if so, register it.

@@ -372,19 +1609,160 @@ class ClassChecker(BaseChecker):
         class member from outside its class (but ignore __special__
         methods)
         """
-        pass
+        self._check_super_without_brackets(node)

-    def _check_super_without_brackets(self, node: nodes.Attribute) ->None:
-        """Check if there is a function call on a super call without brackets."""
-        pass
+        # Check self
+        if self._uses_mandatory_method_param(node):
+            self._accessed.set_accessed(node)
+            return
+        if not self.linter.is_message_enabled("protected-access"):
+            return

-    def _check_in_slots(self, node: nodes.AssignAttr) ->None:
+        self._check_protected_attribute_access(node)
+
+    def _check_super_without_brackets(self, node: nodes.Attribute) -> None:
+        """Check if there is a function call on a super call without brackets."""
+        # Check if attribute call is in frame definition in class definition
+        frame = node.frame()
+        if not isinstance(frame, nodes.FunctionDef):
+            return
+        if not isinstance(frame.parent.frame(), nodes.ClassDef):
+            return
+        if not isinstance(node.parent, nodes.Call):
+            return
+        if not isinstance(node.expr, nodes.Name):
+            return
+        if node.expr.name == "super":
+            self.add_message("super-without-brackets", node=node.expr, confidence=HIGH)
+
+    @only_required_for_messages(
+        "assigning-non-slot", "invalid-class-object", "access-member-before-definition"
+    )
+    def visit_assignattr(self, node: nodes.AssignAttr) -> None:
+        if isinstance(
+            node.assign_type(), nodes.AugAssign
+        ) and self._uses_mandatory_method_param(node):
+            self._accessed.set_accessed(node)
+        self._check_in_slots(node)
+        self._check_invalid_class_object(node)
+
+    def _check_invalid_class_object(self, node: nodes.AssignAttr) -> None:
+        if not node.attrname == "__class__":
+            return
+        if isinstance(node.parent, nodes.Tuple):
+            class_index = -1
+            for i, elt in enumerate(node.parent.elts):
+                if hasattr(elt, "attrname") and elt.attrname == "__class__":
+                    class_index = i
+            if class_index == -1:
+                # This should not happen because we checked that the node name
+                # is '__class__' earlier, but let's not be too confident here
+                return  # pragma: no cover
+            inferred = safe_infer(node.parent.parent.value.elts[class_index])
+        else:
+            inferred = safe_infer(node.parent.value)
+        if (
+            isinstance(inferred, (nodes.ClassDef, util.UninferableBase))
+            or inferred is None
+        ):
+            # If is uninferable, we allow it to prevent false positives
+            return
+        self.add_message(
+            "invalid-class-object",
+            node=node,
+            args=inferred.__class__.__name__,
+            confidence=INFERENCE,
+        )
+
+    def _check_in_slots(self, node: nodes.AssignAttr) -> None:
         """Check that the given AssignAttr node
         is defined in the class slots.
         """
-        pass
-
-    def _check_classmethod_declaration(self, node: nodes.Assign) ->None:
+        inferred = safe_infer(node.expr)
+        if not isinstance(inferred, astroid.Instance):
+            return
+
+        klass = inferred._proxied
+        if not has_known_bases(klass):
+            return
+        if "__slots__" not in klass.locals or not klass.newstyle:
+            return
+        # If `__setattr__` is defined on the class, then we can't reason about
+        # what will happen when assigning to an attribute.
+        if any(
+            base.locals.get("__setattr__")
+            for base in klass.mro()
+            if base.qname() != "builtins.object"
+        ):
+            return
+
+        # If 'typing.Generic' is a base of bases of klass, the cached version
+        # of 'slots()' might have been evaluated incorrectly, thus deleted cache entry.
+        if any(base.qname() == "typing.Generic" for base in klass.mro()):
+            cache = getattr(klass, "__cache", None)
+            if cache and cache.get(klass.slots) is not None:
+                del cache[klass.slots]
+
+        slots = klass.slots()
+        if slots is None:
+            return
+        # If any ancestor doesn't use slots, the slots
+        # defined for this class are superfluous.
+        if any(
+            "__slots__" not in ancestor.locals
+            and ancestor.name not in ("Generic", "object")
+            for ancestor in klass.ancestors()
+        ):
+            return
+
+        if not any(slot.value == node.attrname for slot in slots):
+            # If we have a '__dict__' in slots, then
+            # assigning any name is valid.
+            if not any(slot.value == "__dict__" for slot in slots):
+                if _is_attribute_property(node.attrname, klass):
+                    # Properties circumvent the slots mechanism,
+                    # so we should not emit a warning for them.
+                    return
+                if node.attrname != "__class__" and utils.is_class_attr(
+                    node.attrname, klass
+                ):
+                    return
+                if node.attrname in klass.locals:
+                    for local_name in klass.locals.get(node.attrname):
+                        statement = local_name.statement()
+                        if (
+                            isinstance(statement, nodes.AnnAssign)
+                            and not statement.value
+                        ):
+                            return
+                    if _has_data_descriptor(klass, node.attrname):
+                        # Descriptors circumvent the slots mechanism as well.
+                        return
+                if node.attrname == "__class__" and _has_same_layout_slots(
+                    slots, node.parent.value
+                ):
+                    return
+                self.add_message(
+                    "assigning-non-slot",
+                    args=(node.attrname,),
+                    node=node,
+                    confidence=INFERENCE,
+                )
+
+    @only_required_for_messages(
+        "protected-access", "no-classmethod-decorator", "no-staticmethod-decorator"
+    )
+    def visit_assign(self, assign_node: nodes.Assign) -> None:
+        self._check_classmethod_declaration(assign_node)
+        node = assign_node.targets[0]
+        if not isinstance(node, nodes.AssignAttr):
+            return
+
+        if self._uses_mandatory_method_param(node):
+            return
+        self._check_protected_attribute_access(node)
+
+    def _check_classmethod_declaration(self, node: nodes.Assign) -> None:
         """Checks for uses of classmethod() or staticmethod().

         When a @classmethod or @staticmethod decorator should be used instead.
@@ -393,10 +1771,39 @@ class ClassChecker(BaseChecker):
         is defined.
         `node` is an assign node.
         """
-        pass
-
-    def _check_protected_attribute_access(self, node: (nodes.Attribute |
-        nodes.AssignAttr)) ->None:
+        if not isinstance(node.value, nodes.Call):
+            return
+
+        # check the function called is "classmethod" or "staticmethod"
+        func = node.value.func
+        if not isinstance(func, nodes.Name) or func.name not in (
+            "classmethod",
+            "staticmethod",
+        ):
+            return
+
+        msg = (
+            "no-classmethod-decorator"
+            if func.name == "classmethod"
+            else "no-staticmethod-decorator"
+        )
+        # assignment must be at a class scope
+        parent_class = node.scope()
+        if not isinstance(parent_class, nodes.ClassDef):
+            return
+
+        # Check if the arg passed to classmethod is a class member
+        classmeth_arg = node.value.args[0]
+        if not isinstance(classmeth_arg, nodes.Name):
+            return
+
+        method_name = classmeth_arg.name
+        if any(method_name == member.name for member in parent_class.mymethods()):
+            self.add_message(msg, node=node.targets[0])
+
+    def _check_protected_attribute_access(
+        self, node: nodes.Attribute | nodes.AssignAttr
+    ) -> None:
         """Given an attribute access node (set or get), check if attribute
         access is legitimate.

@@ -408,44 +1815,213 @@ class ClassChecker(BaseChecker):
         * Klass2._attr inside "Klass" class when Klass2 is a base class of
             Klass.
         """
-        pass
+        attrname = node.attrname
+
+        if (
+            not is_attr_protected(attrname)
+            or attrname in self.linter.config.exclude_protected
+        ):
+            return
+
+        # Typing annotations in function definitions can include protected members
+        if utils.is_node_in_type_annotation_context(node):
+            return
+
+        # Return if `attrname` is defined at the module-level or as a class attribute
+        # and is listed in `exclude-protected`.
+        inferred = safe_infer(node.expr)
+        if (
+            inferred
+            and isinstance(inferred, (nodes.ClassDef, nodes.Module))
+            and f"{inferred.name}.{attrname}" in self.linter.config.exclude_protected
+        ):
+            return
+
+        klass = node_frame_class(node)
+        if klass is None:
+            # We are not in a class, no remaining valid case
+            self.add_message("protected-access", node=node, args=attrname)
+            return
+
+        # In classes, check we are not getting a parent method
+        # through the class object or through super
+
+        # If the expression begins with a call to super, that's ok.
+        if (
+            isinstance(node.expr, nodes.Call)
+            and isinstance(node.expr.func, nodes.Name)
+            and node.expr.func.name == "super"
+        ):
+            return
+
+        # If the expression begins with a call to type(self), that's ok.
+        if self._is_type_self_call(node.expr):
+            return
+
+        # Check if we are inside the scope of a class or nested inner class
+        inside_klass = True
+        outer_klass = klass
+        callee = node.expr.as_string()
+        parents_callee = callee.split(".")
+        parents_callee.reverse()
+        for callee in parents_callee:
+            if not outer_klass or callee != outer_klass.name:
+                inside_klass = False
+                break
+
+            # Move up one level within the nested classes
+            outer_klass = get_outer_class(outer_klass)
+
+        # We are in a class, one remaining valid cases, Klass._attr inside
+        # Klass
+        if not (inside_klass or callee in klass.basenames):
+            # Detect property assignments in the body of the class.
+            # This is acceptable:
+            #
+            # class A:
+            #     b = property(lambda: self._b)
+
+            stmt = node.parent.statement()
+            if (
+                isinstance(stmt, nodes.Assign)
+                and len(stmt.targets) == 1
+                and isinstance(stmt.targets[0], nodes.AssignName)
+            ):
+                name = stmt.targets[0].name
+                if _is_attribute_property(name, klass):
+                    return
+
+            if (
+                self._is_classmethod(node.frame())
+                and self._is_inferred_instance(node.expr, klass)
+                and self._is_class_or_instance_attribute(attrname, klass)
+            ):
+                return
+
+            licit_protected_member = not attrname.startswith("__")
+            if (
+                not self.linter.config.check_protected_access_in_special_methods
+                and licit_protected_member
+                and self._is_called_inside_special_method(node)
+            ):
+                return
+
+            self.add_message("protected-access", node=node, args=attrname)

     @staticmethod
-    def _is_called_inside_special_method(node: nodes.NodeNG) ->bool:
+    def _is_called_inside_special_method(node: nodes.NodeNG) -> bool:
         """Returns true if the node is located inside a special (aka dunder) method."""
-        pass
+        frame_name = node.frame().name
+        return frame_name and frame_name in PYMETHODS
+
+    def _is_type_self_call(self, expr: nodes.NodeNG) -> bool:
+        return (
+            isinstance(expr, nodes.Call)
+            and isinstance(expr.func, nodes.Name)
+            and expr.func.name == "type"
+            and len(expr.args) == 1
+            and self._is_mandatory_method_param(expr.args[0])
+        )

     @staticmethod
-    def _is_classmethod(func: LocalsDictNodeNG) ->bool:
+    def _is_classmethod(func: LocalsDictNodeNG) -> bool:
         """Check if the given *func* node is a class method."""
-        pass
+        return isinstance(func, nodes.FunctionDef) and (
+            func.type == "classmethod" or func.name == "__class_getitem__"
+        )

     @staticmethod
-    def _is_inferred_instance(expr: nodes.NodeNG, klass: nodes.ClassDef
-        ) ->bool:
+    def _is_inferred_instance(expr: nodes.NodeNG, klass: nodes.ClassDef) -> bool:
         """Check if the inferred value of the given *expr* is an instance of
         *klass*.
         """
-        pass
+        inferred = safe_infer(expr)
+        if not isinstance(inferred, astroid.Instance):
+            return False
+        return inferred._proxied is klass

     @staticmethod
-    def _is_class_or_instance_attribute(name: str, klass: nodes.ClassDef
-        ) ->bool:
+    def _is_class_or_instance_attribute(name: str, klass: nodes.ClassDef) -> bool:
         """Check if the given attribute *name* is a class or instance member of the
         given *klass*.

         Returns ``True`` if the name is a property in the given klass,
         ``False`` otherwise.
         """
-        pass
-
-    def _check_accessed_members(self, node: nodes.ClassDef, accessed: dict[
-        str, list[_AccessNodes]]) ->None:
+        if utils.is_class_attr(name, klass):
+            return True
+
+        try:
+            klass.instance_attr(name)
+            return True
+        except astroid.NotFoundError:
+            return False
+
+    def _check_accessed_members(
+        self, node: nodes.ClassDef, accessed: dict[str, list[_AccessNodes]]
+    ) -> None:
         """Check that accessed members are defined."""
-        pass
-
-    def _check_first_arg_for_type(self, node: nodes.FunctionDef, metaclass:
-        bool) ->None:
+        excs = ("AttributeError", "Exception", "BaseException")
+        for attr, nodes_lst in accessed.items():
+            try:
+                # is it a class attribute ?
+                node.local_attr(attr)
+                # yes, stop here
+                continue
+            except astroid.NotFoundError:
+                pass
+            # is it an instance attribute of a parent class ?
+            try:
+                next(node.instance_attr_ancestors(attr))
+                # yes, stop here
+                continue
+            except StopIteration:
+                pass
+            # is it an instance attribute ?
+            try:
+                defstmts = node.instance_attr(attr)
+            except astroid.NotFoundError:
+                pass
+            else:
+                # filter out augment assignment nodes
+                defstmts = [stmt for stmt in defstmts if stmt not in nodes_lst]
+                if not defstmts:
+                    # only augment assignment for this node, no-member should be
+                    # triggered by the typecheck checker
+                    continue
+                # filter defstmts to only pick the first one when there are
+                # several assignments in the same scope
+                scope = defstmts[0].scope()
+                defstmts = [
+                    stmt
+                    for i, stmt in enumerate(defstmts)
+                    if i == 0 or stmt.scope() is not scope
+                ]
+                # if there are still more than one, don't attempt to be smarter
+                # than we can be
+                if len(defstmts) == 1:
+                    defstmt = defstmts[0]
+                    # check that if the node is accessed in the same method as
+                    # it's defined, it's accessed after the initial assignment
+                    frame = defstmt.frame()
+                    lno = defstmt.fromlineno
+                    for _node in nodes_lst:
+                        if (
+                            _node.frame() is frame
+                            and _node.fromlineno < lno
+                            and not astroid.are_exclusive(
+                                _node.statement(), defstmt, excs
+                            )
+                        ):
+                            self.add_message(
+                                "access-member-before-definition",
+                                node=_node,
+                                args=(attr, lno),
+                            )
+
+    def _check_first_arg_for_type(
+        self, node: nodes.FunctionDef, metaclass: bool
+    ) -> None:
         """Check the name of first argument, expect:.

         * 'self' for a regular method
@@ -455,46 +2031,327 @@ class ClassChecker(BaseChecker):
           valid-metaclass-classmethod-first-arg)
         * not one of the above for a static method
         """
-        pass
-
-    def _check_bases_classes(self, node: nodes.ClassDef) ->None:
+        # don't care about functions with unknown argument (builtins)
+        if node.args.args is None:
+            return
+        if node.args.posonlyargs:
+            first_arg = node.args.posonlyargs[0].name
+        elif node.args.args:
+            first_arg = node.argnames()[0]
+        else:
+            first_arg = None
+        self._first_attrs.append(first_arg)
+        first = self._first_attrs[-1]
+        # static method
+        if node.type == "staticmethod":
+            if (
+                first_arg == "self"
+                or first_arg in self.linter.config.valid_classmethod_first_arg
+                or first_arg in self.linter.config.valid_metaclass_classmethod_first_arg
+            ):
+                self.add_message("bad-staticmethod-argument", args=first, node=node)
+                return
+            self._first_attrs[-1] = None
+        elif "builtins.staticmethod" in node.decoratornames():
+            # Check if there is a decorator which is not named `staticmethod`
+            # but is assigned to one.
+            return
+        # class / regular method with no args
+        elif not (
+            node.args.args
+            or node.args.posonlyargs
+            or node.args.vararg
+            or node.args.kwarg
+        ):
+            self.add_message("no-method-argument", node=node, args=node.name)
+        # metaclass
+        elif metaclass:
+            # metaclass __new__ or classmethod
+            if node.type == "classmethod":
+                self._check_first_arg_config(
+                    first,
+                    self.linter.config.valid_metaclass_classmethod_first_arg,
+                    node,
+                    "bad-mcs-classmethod-argument",
+                    node.name,
+                )
+            # metaclass regular method
+            else:
+                self._check_first_arg_config(
+                    first,
+                    self.linter.config.valid_classmethod_first_arg,
+                    node,
+                    "bad-mcs-method-argument",
+                    node.name,
+                )
+        # regular class with class method
+        elif node.type == "classmethod" or node.name == "__class_getitem__":
+            self._check_first_arg_config(
+                first,
+                self.linter.config.valid_classmethod_first_arg,
+                node,
+                "bad-classmethod-argument",
+                node.name,
+            )
+        # regular class with regular method without self as argument
+        elif first != "self":
+            self.add_message("no-self-argument", node=node, args=node.name)
+
+    def _check_first_arg_config(
+        self,
+        first: str | None,
+        config: Sequence[str],
+        node: nodes.FunctionDef,
+        message: str,
+        method_name: str,
+    ) -> None:
+        if first not in config:
+            if len(config) == 1:
+                valid = repr(config[0])
+            else:
+                valid = ", ".join(repr(v) for v in config[:-1])
+                valid = f"{valid} or {config[-1]!r}"
+            self.add_message(message, args=(method_name, valid), node=node)
+
+    def _check_bases_classes(self, node: nodes.ClassDef) -> None:
         """Check that the given class node implements abstract methods from
         base classes.
         """
-        pass

-    def _check_init(self, node: nodes.FunctionDef, klass_node: nodes.ClassDef
-        ) ->None:
+        def is_abstract(method: nodes.FunctionDef) -> bool:
+            return method.is_abstract(pass_is_abstract=False)  # type: ignore[no-any-return]
+
+        # check if this class abstract
+        if class_is_abstract(node):
+            return
+
+        methods = sorted(
+            unimplemented_abstract_methods(node, is_abstract).items(),
+            key=lambda item: item[0],
+        )
+        for name, method in methods:
+            owner = method.parent.frame()
+            if owner is node:
+                continue
+            # owner is not this class, it must be a parent class
+            # check that the ancestor's method is not abstract
+            if name in node.locals:
+                # it is redefined as an attribute or with a descriptor
+                continue
+
+            self.add_message(
+                "abstract-method",
+                node=node,
+                args=(name, owner.name, node.name),
+                confidence=INFERENCE,
+            )
+
+    def _check_init(self, node: nodes.FunctionDef, klass_node: nodes.ClassDef) -> None:
         """Check that the __init__ method call super or ancestors'__init__
         method (unless it is used for type hinting with `typing.overload`).
         """
-        pass
-
-    def _check_signature(self, method1: nodes.FunctionDef, refmethod: nodes
-        .FunctionDef, cls: nodes.ClassDef) ->None:
+        if not self.linter.is_message_enabled(
+            "super-init-not-called"
+        ) and not self.linter.is_message_enabled("non-parent-init-called"):
+            return
+        to_call = _ancestors_to_call(klass_node)
+        not_called_yet = dict(to_call)
+        parents_with_called_inits: set[bases.UnboundMethod] = set()
+        for stmt in node.nodes_of_class(nodes.Call):
+            expr = stmt.func
+            if not isinstance(expr, nodes.Attribute) or expr.attrname != "__init__":
+                continue
+            # skip the test if using super
+            if (
+                isinstance(expr.expr, nodes.Call)
+                and isinstance(expr.expr.func, nodes.Name)
+                and expr.expr.func.name == "super"
+            ):
+                return
+            # pylint: disable = too-many-try-statements
+            try:
+                for klass in expr.expr.infer():
+                    if isinstance(klass, util.UninferableBase):
+                        continue
+                    # The inferred klass can be super(), which was
+                    # assigned to a variable and the `__init__`
+                    # was called later.
+                    #
+                    # base = super()
+                    # base.__init__(...)
+
+                    if (
+                        isinstance(klass, astroid.Instance)
+                        and isinstance(klass._proxied, nodes.ClassDef)
+                        and is_builtin_object(klass._proxied)
+                        and klass._proxied.name == "super"
+                    ):
+                        return
+                    if isinstance(klass, astroid.objects.Super):
+                        return
+                    try:
+                        method = not_called_yet.pop(klass)
+                        # Record that the class' init has been called
+                        parents_with_called_inits.add(node_frame_class(method))
+                    except KeyError:
+                        if klass not in klass_node.ancestors(recurs=False):
+                            self.add_message(
+                                "non-parent-init-called", node=expr, args=klass.name
+                            )
+            except astroid.InferenceError:
+                continue
+        for klass, method in not_called_yet.items():
+            # Check if the init of the class that defines this init has already
+            # been called.
+            if node_frame_class(method) in parents_with_called_inits:
+                return
+
+            if utils.is_protocol_class(klass):
+                return
+
+            if decorated_with(node, ["typing.overload"]):
+                continue
+            self.add_message(
+                "super-init-not-called",
+                args=klass.name,
+                node=node,
+                confidence=INFERENCE,
+            )
+
+    def _check_signature(
+        self,
+        method1: nodes.FunctionDef,
+        refmethod: nodes.FunctionDef,
+        cls: nodes.ClassDef,
+    ) -> None:
         """Check that the signature of the two given methods match."""
-        pass
-
-    def _uses_mandatory_method_param(self, node: (nodes.Attribute | nodes.
-        Assign | nodes.AssignAttr)) ->bool:
+        if not (
+            isinstance(method1, nodes.FunctionDef)
+            and isinstance(refmethod, nodes.FunctionDef)
+        ):
+            self.add_message(
+                "method-check-failed", args=(method1, refmethod), node=method1
+            )
+            return
+
+        instance = cls.instantiate_class()
+        method1 = astroid.scoped_nodes.function_to_method(method1, instance)
+        refmethod = astroid.scoped_nodes.function_to_method(refmethod, instance)
+
+        # Don't care about functions with unknown argument (builtins).
+        if method1.args.args is None or refmethod.args.args is None:
+            return
+
+        # Ignore private to class methods.
+        if is_attr_private(method1.name):
+            return
+        # Ignore setters, they have an implicit extra argument,
+        # which shouldn't be taken in consideration.
+        if is_property_setter(method1):
+            return
+
+        arg_differ_output = _different_parameters(
+            refmethod, method1, dummy_parameter_regex=self._dummy_rgx
+        )
+
+        class_type = "overriding"
+
+        if len(arg_differ_output) > 0:
+            for msg in arg_differ_output:
+                if "Number" in msg:
+                    total_args_method1 = len(method1.args.args)
+                    if method1.args.vararg:
+                        total_args_method1 += 1
+                    if method1.args.kwarg:
+                        total_args_method1 += 1
+                    if method1.args.kwonlyargs:
+                        total_args_method1 += len(method1.args.kwonlyargs)
+                    total_args_refmethod = len(refmethod.args.args)
+                    if refmethod.args.vararg:
+                        total_args_refmethod += 1
+                    if refmethod.args.kwarg:
+                        total_args_refmethod += 1
+                    if refmethod.args.kwonlyargs:
+                        total_args_refmethod += len(refmethod.args.kwonlyargs)
+                    error_type = "arguments-differ"
+                    msg_args = (
+                        msg
+                        + f"was {total_args_refmethod} in '{refmethod.parent.frame().name}.{refmethod.name}' and "
+                        f"is now {total_args_method1} in",
+                        class_type,
+                        f"{method1.parent.frame().name}.{method1.name}",
+                    )
+                elif "renamed" in msg:
+                    error_type = "arguments-renamed"
+                    msg_args = (
+                        msg,
+                        class_type,
+                        f"{method1.parent.frame().name}.{method1.name}",
+                    )
+                else:
+                    error_type = "arguments-differ"
+                    msg_args = (
+                        msg,
+                        class_type,
+                        f"{method1.parent.frame().name}.{method1.name}",
+                    )
+                self.add_message(error_type, args=msg_args, node=method1)
+        elif (
+            len(method1.args.defaults) < len(refmethod.args.defaults)
+            and not method1.args.vararg
+        ):
+            class_type = "overridden"
+            self.add_message(
+                "signature-differs", args=(class_type, method1.name), node=method1
+            )
+
+    def _uses_mandatory_method_param(
+        self, node: nodes.Attribute | nodes.Assign | nodes.AssignAttr
+    ) -> bool:
         """Check that attribute lookup name use first attribute variable name.

         Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
         """
-        pass
+        return self._is_mandatory_method_param(node.expr)

-    def _is_mandatory_method_param(self, node: nodes.NodeNG) ->bool:
+    def _is_mandatory_method_param(self, node: nodes.NodeNG) -> bool:
         """Check if nodes.Name corresponds to first attribute variable name.

         Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
         Static methods return False.
         """
-        pass
-
-
-def _ancestors_to_call(klass_node: nodes.ClassDef, method_name: str='__init__'
-    ) ->dict[nodes.ClassDef, bases.UnboundMethod]:
+        if self._first_attrs:
+            first_attr = self._first_attrs[-1]
+        else:
+            # It's possible the function was already unregistered.
+            closest_func = utils.get_node_first_ancestor_of_type(
+                node, nodes.FunctionDef
+            )
+            if closest_func is None:
+                return False
+            if not closest_func.is_bound():
+                return False
+            if not closest_func.args.args:
+                return False
+            first_attr = closest_func.args.args[0].name
+        return isinstance(node, nodes.Name) and node.name == first_attr
+
+
+def _ancestors_to_call(
+    klass_node: nodes.ClassDef, method_name: str = "__init__"
+) -> dict[nodes.ClassDef, bases.UnboundMethod]:
     """Return a dictionary where keys are the list of base classes providing
     the queried method, and so that should/may be called from the method node.
     """
-    pass
+    to_call: dict[nodes.ClassDef, bases.UnboundMethod] = {}
+    for base_node in klass_node.ancestors(recurs=False):
+        try:
+            init_node = next(base_node.igetattr(method_name))
+            if not isinstance(init_node, astroid.UnboundMethod):
+                continue
+            if init_node.is_abstract():
+                continue
+            to_call[base_node] = init_node
+        except astroid.InferenceError:
+            continue
+    return to_call
diff --git a/pylint/checkers/classes/special_methods_checker.py b/pylint/checkers/classes/special_methods_checker.py
index 98a772892..025f28562 100644
--- a/pylint/checkers/classes/special_methods_checker.py
+++ b/pylint/checkers/classes/special_methods_checker.py
@@ -1,85 +1,403 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Special methods checker and helper function's module."""
+
 from __future__ import annotations
+
 from collections.abc import Callable
+
 import astroid
 from astroid import bases, nodes, util
 from astroid.context import InferenceContext
 from astroid.typing import InferenceResult
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import PYMETHODS, SPECIAL_METHODS_PARAMS, decorated_with, is_function_body_ellipsis, only_required_for_messages, safe_infer
+from pylint.checkers.utils import (
+    PYMETHODS,
+    SPECIAL_METHODS_PARAMS,
+    decorated_with,
+    is_function_body_ellipsis,
+    only_required_for_messages,
+    safe_infer,
+)
 from pylint.lint.pylinter import PyLinter
-NEXT_METHOD = '__next__'
+
+NEXT_METHOD = "__next__"


-def _safe_infer_call_result(node: nodes.FunctionDef, caller: nodes.
-    FunctionDef, context: (InferenceContext | None)=None) ->(InferenceResult |
-    None):
+def _safe_infer_call_result(
+    node: nodes.FunctionDef,
+    caller: nodes.FunctionDef,
+    context: InferenceContext | None = None,
+) -> InferenceResult | None:
     """Safely infer the return value of a function.

     Returns None if inference failed or if there is some ambiguity (more than
     one node has been inferred). Otherwise, returns inferred value.
     """
-    pass
+    try:
+        inferit = node.infer_call_result(caller, context=context)
+        value = next(inferit)
+    except astroid.InferenceError:
+        return None  # inference failed
+    except StopIteration:
+        return None  # no values inferred
+    try:
+        next(inferit)
+        return None  # there is ambiguity on the inferred node
+    except astroid.InferenceError:
+        return None  # there is some kind of ambiguity
+    except StopIteration:
+        return value


 class SpecialMethodsChecker(BaseChecker):
     """Checker which verifies that special methods
     are implemented correctly.
     """
-    name = 'classes'
-    msgs = {'E0301': ('__iter__ returns non-iterator',
-        'non-iterator-returned',
-        f'Used when an __iter__ method returns something which is not an iterable (i.e. has no `{NEXT_METHOD}` method)'
-        , {'old_names': [('W0234', 'old-non-iterator-returned-1'), ('E0234',
-        'old-non-iterator-returned-2')]}), 'E0302': (
-        'The special method %r expects %s param(s), %d %s given',
-        'unexpected-special-method-signature',
-        'Emitted when a special method was defined with an invalid number of parameters. If it has too few or too many, it might not work at all.'
-        , {'old_names': [('E0235', 'bad-context-manager')]}), 'E0303': (
-        '__len__ does not return non-negative integer',
-        'invalid-length-returned',
-        'Used when a __len__ method returns something which is not a non-negative integer'
-        ), 'E0304': ('__bool__ does not return bool',
-        'invalid-bool-returned',
-        'Used when a __bool__ method returns something which is not a bool'
-        ), 'E0305': ('__index__ does not return int',
-        'invalid-index-returned',
-        'Used when an __index__ method returns something which is not an integer'
-        ), 'E0306': ('__repr__ does not return str',
-        'invalid-repr-returned',
-        'Used when a __repr__ method returns something which is not a string'
-        ), 'E0307': ('__str__ does not return str', 'invalid-str-returned',
-        'Used when a __str__ method returns something which is not a string'
-        ), 'E0308': ('__bytes__ does not return bytes',
-        'invalid-bytes-returned',
-        'Used when a __bytes__ method returns something which is not bytes'
-        ), 'E0309': ('__hash__ does not return int',
-        'invalid-hash-returned',
-        'Used when a __hash__ method returns something which is not an integer'
-        ), 'E0310': ('__length_hint__ does not return non-negative integer',
-        'invalid-length-hint-returned',
-        'Used when a __length_hint__ method returns something which is not a non-negative integer'
-        ), 'E0311': ('__format__ does not return str',
-        'invalid-format-returned',
-        'Used when a __format__ method returns something which is not a string'
-        ), 'E0312': ('__getnewargs__ does not return a tuple',
-        'invalid-getnewargs-returned',
-        'Used when a __getnewargs__ method returns something which is not a tuple'
-        ), 'E0313': (
-        '__getnewargs_ex__ does not return a tuple containing (tuple, dict)',
-        'invalid-getnewargs-ex-returned',
-        'Used when a __getnewargs_ex__ method returns something which is not of the form tuple(tuple, dict)'
-        )}
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "classes"
+    msgs = {
+        "E0301": (
+            "__iter__ returns non-iterator",
+            "non-iterator-returned",
+            "Used when an __iter__ method returns something which is not an "
+            f"iterable (i.e. has no `{NEXT_METHOD}` method)",
+            {
+                "old_names": [
+                    ("W0234", "old-non-iterator-returned-1"),
+                    ("E0234", "old-non-iterator-returned-2"),
+                ]
+            },
+        ),
+        "E0302": (
+            "The special method %r expects %s param(s), %d %s given",
+            "unexpected-special-method-signature",
+            "Emitted when a special method was defined with an "
+            "invalid number of parameters. If it has too few or "
+            "too many, it might not work at all.",
+            {"old_names": [("E0235", "bad-context-manager")]},
+        ),
+        "E0303": (
+            "__len__ does not return non-negative integer",
+            "invalid-length-returned",
+            "Used when a __len__ method returns something which is not a "
+            "non-negative integer",
+        ),
+        "E0304": (
+            "__bool__ does not return bool",
+            "invalid-bool-returned",
+            "Used when a __bool__ method returns something which is not a bool",
+        ),
+        "E0305": (
+            "__index__ does not return int",
+            "invalid-index-returned",
+            "Used when an __index__ method returns something which is not "
+            "an integer",
+        ),
+        "E0306": (
+            "__repr__ does not return str",
+            "invalid-repr-returned",
+            "Used when a __repr__ method returns something which is not a string",
+        ),
+        "E0307": (
+            "__str__ does not return str",
+            "invalid-str-returned",
+            "Used when a __str__ method returns something which is not a string",
+        ),
+        "E0308": (
+            "__bytes__ does not return bytes",
+            "invalid-bytes-returned",
+            "Used when a __bytes__ method returns something which is not bytes",
+        ),
+        "E0309": (
+            "__hash__ does not return int",
+            "invalid-hash-returned",
+            "Used when a __hash__ method returns something which is not an integer",
+        ),
+        "E0310": (
+            "__length_hint__ does not return non-negative integer",
+            "invalid-length-hint-returned",
+            "Used when a __length_hint__ method returns something which is not a "
+            "non-negative integer",
+        ),
+        "E0311": (
+            "__format__ does not return str",
+            "invalid-format-returned",
+            "Used when a __format__ method returns something which is not a string",
+        ),
+        "E0312": (
+            "__getnewargs__ does not return a tuple",
+            "invalid-getnewargs-returned",
+            "Used when a __getnewargs__ method returns something which is not "
+            "a tuple",
+        ),
+        "E0313": (
+            "__getnewargs_ex__ does not return a tuple containing (tuple, dict)",
+            "invalid-getnewargs-ex-returned",
+            "Used when a __getnewargs_ex__ method returns something which is not "
+            "of the form tuple(tuple, dict)",
+        ),
+    }
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
-        self._protocol_map: dict[str, Callable[[nodes.FunctionDef,
-            InferenceResult], None]] = {'__iter__': self._check_iter,
-            '__len__': self._check_len, '__bool__': self._check_bool,
-            '__index__': self._check_index, '__repr__': self._check_repr,
-            '__str__': self._check_str, '__bytes__': self._check_bytes,
-            '__hash__': self._check_hash, '__length_hint__': self.
-            _check_length_hint, '__format__': self._check_format,
-            '__getnewargs__': self._check_getnewargs, '__getnewargs_ex__':
-            self._check_getnewargs_ex}
+        self._protocol_map: dict[
+            str, Callable[[nodes.FunctionDef, InferenceResult], None]
+        ] = {
+            "__iter__": self._check_iter,
+            "__len__": self._check_len,
+            "__bool__": self._check_bool,
+            "__index__": self._check_index,
+            "__repr__": self._check_repr,
+            "__str__": self._check_str,
+            "__bytes__": self._check_bytes,
+            "__hash__": self._check_hash,
+            "__length_hint__": self._check_length_hint,
+            "__format__": self._check_format,
+            "__getnewargs__": self._check_getnewargs,
+            "__getnewargs_ex__": self._check_getnewargs_ex,
+        }
+
+    @only_required_for_messages(
+        "unexpected-special-method-signature",
+        "non-iterator-returned",
+        "invalid-length-returned",
+        "invalid-bool-returned",
+        "invalid-index-returned",
+        "invalid-repr-returned",
+        "invalid-str-returned",
+        "invalid-bytes-returned",
+        "invalid-hash-returned",
+        "invalid-length-hint-returned",
+        "invalid-format-returned",
+        "invalid-getnewargs-returned",
+        "invalid-getnewargs-ex-returned",
+    )
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        if not node.is_method():
+            return
+
+        inferred = _safe_infer_call_result(node, node)
+        # Only want to check types that we are able to infer
+        if (
+            inferred
+            and node.name in self._protocol_map
+            and not is_function_body_ellipsis(node)
+        ):
+            self._protocol_map[node.name](node, inferred)
+
+        if node.name in PYMETHODS:
+            self._check_unexpected_method_signature(node)
+
     visit_asyncfunctiondef = visit_functiondef
+
+    def _check_unexpected_method_signature(self, node: nodes.FunctionDef) -> None:
+        expected_params = SPECIAL_METHODS_PARAMS[node.name]
+
+        if expected_params is None:
+            # This can support a variable number of parameters.
+            return
+        if not node.args.args and not node.args.vararg:
+            # Method has no parameter, will be caught
+            # by no-method-argument.
+            return
+
+        if decorated_with(node, ["builtins.staticmethod"]):
+            # We expect to not take in consideration self.
+            all_args = node.args.args
+        else:
+            all_args = node.args.args[1:]
+        mandatory = len(all_args) - len(node.args.defaults)
+        optional = len(node.args.defaults)
+        current_params = mandatory + optional
+
+        emit = False  # If we don't know we choose a false negative
+        if isinstance(expected_params, tuple):
+            # The expected number of parameters can be any value from this
+            # tuple, although the user should implement the method
+            # to take all of them in consideration.
+            emit = mandatory not in expected_params
+            # mypy thinks that expected_params has type tuple[int, int] | int | None
+            # But at this point it must be 'tuple[int, int]' because of the type check
+            expected_params = f"between {expected_params[0]} or {expected_params[1]}"  # type: ignore[assignment]
+        else:
+            # If the number of mandatory parameters doesn't
+            # suffice, the expected parameters for this
+            # function will be deduced from the optional
+            # parameters.
+            rest = expected_params - mandatory
+            if rest == 0:
+                emit = False
+            elif rest < 0:
+                emit = True
+            elif rest > 0:
+                emit = not ((optional - rest) >= 0 or node.args.vararg)
+
+        if emit:
+            verb = "was" if current_params <= 1 else "were"
+            self.add_message(
+                "unexpected-special-method-signature",
+                args=(node.name, expected_params, current_params, verb),
+                node=node,
+            )
+
+    @staticmethod
+    def _is_wrapped_type(node: InferenceResult, type_: str) -> bool:
+        return (
+            isinstance(node, bases.Instance)
+            and node.name == type_
+            and not isinstance(node, nodes.Const)
+        )
+
+    @staticmethod
+    def _is_int(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "int"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, int)
+
+    @staticmethod
+    def _is_str(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "str"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, str)
+
+    @staticmethod
+    def _is_bool(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "bool"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, bool)
+
+    @staticmethod
+    def _is_bytes(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "bytes"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, bytes)
+
+    @staticmethod
+    def _is_tuple(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "tuple"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, tuple)
+
+    @staticmethod
+    def _is_dict(node: InferenceResult) -> bool:
+        if SpecialMethodsChecker._is_wrapped_type(node, "dict"):
+            return True
+
+        return isinstance(node, nodes.Const) and isinstance(node.value, dict)
+
+    @staticmethod
+    def _is_iterator(node: InferenceResult) -> bool:
+        if isinstance(node, bases.Generator):
+            # Generators can be iterated.
+            return True
+        if isinstance(node, nodes.ComprehensionScope):
+            # Comprehensions can be iterated.
+            return True
+
+        if isinstance(node, bases.Instance):
+            try:
+                node.local_attr(NEXT_METHOD)
+                return True
+            except astroid.NotFoundError:
+                pass
+        elif isinstance(node, nodes.ClassDef):
+            metaclass = node.metaclass()
+            if metaclass and isinstance(metaclass, nodes.ClassDef):
+                try:
+                    metaclass.local_attr(NEXT_METHOD)
+                    return True
+                except astroid.NotFoundError:
+                    pass
+        return False
+
+    def _check_iter(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_iterator(inferred):
+            self.add_message("non-iterator-returned", node=node)
+
+    def _check_len(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_int(inferred):
+            self.add_message("invalid-length-returned", node=node)
+        elif isinstance(inferred, nodes.Const) and inferred.value < 0:
+            self.add_message("invalid-length-returned", node=node)
+
+    def _check_bool(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_bool(inferred):
+            self.add_message("invalid-bool-returned", node=node)
+
+    def _check_index(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_int(inferred):
+            self.add_message("invalid-index-returned", node=node)
+
+    def _check_repr(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_str(inferred):
+            self.add_message("invalid-repr-returned", node=node)
+
+    def _check_str(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_str(inferred):
+            self.add_message("invalid-str-returned", node=node)
+
+    def _check_bytes(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_bytes(inferred):
+            self.add_message("invalid-bytes-returned", node=node)
+
+    def _check_hash(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_int(inferred):
+            self.add_message("invalid-hash-returned", node=node)
+
+    def _check_length_hint(
+        self, node: nodes.FunctionDef, inferred: InferenceResult
+    ) -> None:
+        if not self._is_int(inferred):
+            self.add_message("invalid-length-hint-returned", node=node)
+        elif isinstance(inferred, nodes.Const) and inferred.value < 0:
+            self.add_message("invalid-length-hint-returned", node=node)
+
+    def _check_format(self, node: nodes.FunctionDef, inferred: InferenceResult) -> None:
+        if not self._is_str(inferred):
+            self.add_message("invalid-format-returned", node=node)
+
+    def _check_getnewargs(
+        self, node: nodes.FunctionDef, inferred: InferenceResult
+    ) -> None:
+        if not self._is_tuple(inferred):
+            self.add_message("invalid-getnewargs-returned", node=node)
+
+    def _check_getnewargs_ex(
+        self, node: nodes.FunctionDef, inferred: InferenceResult
+    ) -> None:
+        if not self._is_tuple(inferred):
+            self.add_message("invalid-getnewargs-ex-returned", node=node)
+            return
+
+        if not isinstance(inferred, nodes.Tuple):
+            # If it's not an astroid.Tuple we can't analyze it further
+            return
+
+        found_error = False
+
+        if len(inferred.elts) != 2:
+            found_error = True
+        else:
+            for arg, check in (
+                (inferred.elts[0], self._is_tuple),
+                (inferred.elts[1], self._is_dict),
+            ):
+                if isinstance(arg, nodes.Call):
+                    arg = safe_infer(arg)
+
+                if arg and not isinstance(arg, util.UninferableBase):
+                    if not check(arg):
+                        found_error = True
+                        break
+
+        if found_error:
+            self.add_message("invalid-getnewargs-ex-returned", node=node)
diff --git a/pylint/checkers/dataclass_checker.py b/pylint/checkers/dataclass_checker.py
index bf68dc00a..60b1b23cd 100644
--- a/pylint/checkers/dataclass_checker.py
+++ b/pylint/checkers/dataclass_checker.py
@@ -1,25 +1,37 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Dataclass checkers for Python code."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
 from astroid.brain.brain_dataclasses import DATACLASS_MODULES
+
 from pylint.checkers import BaseChecker, utils
 from pylint.interfaces import INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


-def _is_dataclasses_module(node: nodes.Module) ->bool:
+def _is_dataclasses_module(node: nodes.Module) -> bool:
     """Utility function to check if node is from dataclasses_module."""
-    pass
+    return node.name in DATACLASS_MODULES


-def _check_name_or_attrname_eq_to(node: (nodes.Name | nodes.Attribute),
-    check_with: str) ->bool:
+def _check_name_or_attrname_eq_to(
+    node: nodes.Name | nodes.Attribute, check_with: str
+) -> bool:
     """Utility function to check either a Name/Attribute node's name/attrname with a
     given string.
     """
-    pass
+    if isinstance(node, nodes.Name):
+        return str(node.name) == check_with
+    return str(node.attrname) == check_with


 class DataclassChecker(BaseChecker):
@@ -28,12 +40,22 @@ class DataclassChecker(BaseChecker):
     Checks for
     * invalid-field-call
     """
-    name = 'dataclass'
-    msgs = {'E3701': ('Invalid usage of field(), %s', 'invalid-field-call',
-        'The dataclasses.field() specifier should only be used as the value of an assignment within a dataclass, or within the make_dataclass() function.'
-        )}

-    def _check_invalid_field_call(self, node: nodes.Call) ->None:
+    name = "dataclass"
+    msgs = {
+        "E3701": (
+            "Invalid usage of field(), %s",
+            "invalid-field-call",
+            "The dataclasses.field() specifier should only be used as the value of "
+            "an assignment within a dataclass, or within the make_dataclass() function.",
+        ),
+    }
+
+    @utils.only_required_for_messages("invalid-field-call")
+    def visit_call(self, node: nodes.Call) -> None:
+        self._check_invalid_field_call(node)
+
+    def _check_invalid_field_call(self, node: nodes.Call) -> None:
         """Checks for correct usage of the dataclasses.field() specifier in
         dataclasses or within the make_dataclass() function.

@@ -42,11 +64,66 @@ class DataclassChecker(BaseChecker):
         @dataclass decorator and outside make_dataclass() function, or when it
         is used improperly within a dataclass.
         """
-        pass
+        if not isinstance(node.func, (nodes.Name, nodes.Attribute)):
+            return
+        if not _check_name_or_attrname_eq_to(node.func, "field"):
+            return
+        inferred_func = utils.safe_infer(node.func)
+        if not (
+            isinstance(inferred_func, nodes.FunctionDef)
+            and _is_dataclasses_module(inferred_func.root())
+        ):
+            return
+        scope_node = node.parent
+        while scope_node and not isinstance(scope_node, (nodes.ClassDef, nodes.Call)):
+            scope_node = scope_node.parent
+
+        if isinstance(scope_node, nodes.Call):
+            self._check_invalid_field_call_within_call(node, scope_node)
+            return
+
+        if not scope_node or not scope_node.is_dataclass:
+            self.add_message(
+                "invalid-field-call",
+                node=node,
+                args=(
+                    "it should be used within a dataclass or the make_dataclass() function.",
+                ),
+                confidence=INFERENCE,
+            )
+            return

-    def _check_invalid_field_call_within_call(self, node: nodes.Call,
-        scope_node: nodes.Call) ->None:
+        if not (isinstance(node.parent, nodes.AnnAssign) and node == node.parent.value):
+            self.add_message(
+                "invalid-field-call",
+                node=node,
+                args=("it should be the value of an assignment within a dataclass.",),
+                confidence=INFERENCE,
+            )
+
+    def _check_invalid_field_call_within_call(
+        self, node: nodes.Call, scope_node: nodes.Call
+    ) -> None:
         """Checks for special case where calling field is valid as an argument of the
         make_dataclass() function.
         """
-        pass
+        inferred_func = utils.safe_infer(scope_node.func)
+        if (
+            isinstance(scope_node.func, (nodes.Name, nodes.AssignName))
+            and scope_node.func.name == "make_dataclass"
+            and isinstance(inferred_func, nodes.FunctionDef)
+            and _is_dataclasses_module(inferred_func.root())
+        ):
+            return
+        self.add_message(
+            "invalid-field-call",
+            node=node,
+            args=(
+                "it should be used within a dataclass or the make_dataclass() function.",
+            ),
+            confidence=INFERENCE,
+        )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DataclassChecker(linter))
diff --git a/pylint/checkers/deprecated.py b/pylint/checkers/deprecated.py
index fbe3222fa..028dc13f3 100644
--- a/pylint/checkers/deprecated.py
+++ b/pylint/checkers/deprecated.py
@@ -1,17 +1,31 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker mixin for deprecated functionality."""
+
 from __future__ import annotations
+
 from collections.abc import Container, Iterable
 from itertools import chain
+
 import astroid
 from astroid import nodes
 from astroid.bases import Instance
+
 from pylint.checkers import utils
 from pylint.checkers.base_checker import BaseChecker
 from pylint.checkers.utils import get_import_name, infer_all, safe_infer
 from pylint.interfaces import INFERENCE
 from pylint.typing import MessageDefinitionTuple
-ACCEPTABLE_NODES = (astroid.BoundMethod, astroid.UnboundMethod, nodes.
-    FunctionDef, nodes.ClassDef, astroid.Attribute)
+
+ACCEPTABLE_NODES = (
+    astroid.BoundMethod,
+    astroid.UnboundMethod,
+    nodes.FunctionDef,
+    nodes.ClassDef,
+    astroid.Attribute,
+)


 class DeprecatedMixin(BaseChecker):
@@ -19,78 +33,134 @@ class DeprecatedMixin(BaseChecker):

     A class implementing mixin must define "deprecated-method" Message.
     """
-    DEPRECATED_ATTRIBUTE_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4906':
-        ('Using deprecated attribute %r', 'deprecated-attribute',
-        'The attribute is marked as deprecated and will be removed in the future.'
-        , {'shared': True})}
-    DEPRECATED_MODULE_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4901':
-        ('Deprecated module %r', 'deprecated-module',
-        'A module marked as deprecated is imported.', {'old_names': [(
-        'W0402', 'old-deprecated-module')], 'shared': True})}
-    DEPRECATED_METHOD_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4902':
-        ('Using deprecated method %s()', 'deprecated-method',
-        'The method is marked as deprecated and will be removed in the future.'
-        , {'old_names': [('W1505', 'old-deprecated-method')], 'shared': True})}
-    DEPRECATED_ARGUMENT_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4903':
-        ('Using deprecated argument %s of method %s()',
-        'deprecated-argument',
-        'The argument is marked as deprecated and will be removed in the future.'
-        , {'old_names': [('W1511', 'old-deprecated-argument')], 'shared': 
-        True})}
-    DEPRECATED_CLASS_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4904':
-        ('Using deprecated class %s of module %s', 'deprecated-class',
-        'The class is marked as deprecated and will be removed in the future.',
-        {'old_names': [('W1512', 'old-deprecated-class')], 'shared': True})}
-    DEPRECATED_DECORATOR_MESSAGE: dict[str, MessageDefinitionTuple] = {'W4905':
-        ('Using deprecated decorator %s()', 'deprecated-decorator',
-        'The decorator is marked as deprecated and will be removed in the future.'
-        , {'old_names': [('W1513', 'old-deprecated-decorator')], 'shared': 
-        True})}
-
-    @utils.only_required_for_messages('deprecated-attribute')
-    def visit_attribute(self, node: astroid.Attribute) ->None:
+
+    DEPRECATED_ATTRIBUTE_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4906": (
+            "Using deprecated attribute %r",
+            "deprecated-attribute",
+            "The attribute is marked as deprecated and will be removed in the future.",
+            {"shared": True},
+        ),
+    }
+
+    DEPRECATED_MODULE_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4901": (
+            "Deprecated module %r",
+            "deprecated-module",
+            "A module marked as deprecated is imported.",
+            {"old_names": [("W0402", "old-deprecated-module")], "shared": True},
+        ),
+    }
+
+    DEPRECATED_METHOD_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4902": (
+            "Using deprecated method %s()",
+            "deprecated-method",
+            "The method is marked as deprecated and will be removed in the future.",
+            {"old_names": [("W1505", "old-deprecated-method")], "shared": True},
+        ),
+    }
+
+    DEPRECATED_ARGUMENT_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4903": (
+            "Using deprecated argument %s of method %s()",
+            "deprecated-argument",
+            "The argument is marked as deprecated and will be removed in the future.",
+            {"old_names": [("W1511", "old-deprecated-argument")], "shared": True},
+        ),
+    }
+
+    DEPRECATED_CLASS_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4904": (
+            "Using deprecated class %s of module %s",
+            "deprecated-class",
+            "The class is marked as deprecated and will be removed in the future.",
+            {"old_names": [("W1512", "old-deprecated-class")], "shared": True},
+        ),
+    }
+
+    DEPRECATED_DECORATOR_MESSAGE: dict[str, MessageDefinitionTuple] = {
+        "W4905": (
+            "Using deprecated decorator %s()",
+            "deprecated-decorator",
+            "The decorator is marked as deprecated and will be removed in the future.",
+            {"old_names": [("W1513", "old-deprecated-decorator")], "shared": True},
+        ),
+    }
+
+    @utils.only_required_for_messages("deprecated-attribute")
+    def visit_attribute(self, node: astroid.Attribute) -> None:
         """Called when an `astroid.Attribute` node is visited."""
-        pass
+        self.check_deprecated_attribute(node)

-    @utils.only_required_for_messages('deprecated-method',
-        'deprecated-argument', 'deprecated-class')
-    def visit_call(self, node: nodes.Call) ->None:
+    @utils.only_required_for_messages(
+        "deprecated-method",
+        "deprecated-argument",
+        "deprecated-class",
+    )
+    def visit_call(self, node: nodes.Call) -> None:
         """Called when a :class:`nodes.Call` node is visited."""
-        pass
+        self.check_deprecated_class_in_call(node)
+        for inferred in infer_all(node.func):
+            # Calling entry point for deprecation check logic.
+            self.check_deprecated_method(node, inferred)

-    @utils.only_required_for_messages('deprecated-module', 'deprecated-class')
-    def visit_import(self, node: nodes.Import) ->None:
+    @utils.only_required_for_messages(
+        "deprecated-module",
+        "deprecated-class",
+    )
+    def visit_import(self, node: nodes.Import) -> None:
         """Triggered when an import statement is seen."""
-        pass
+        for name in (name for name, _ in node.names):
+            self.check_deprecated_module(node, name)
+            if "." in name:
+                # Checking deprecation for import module with class
+                mod_name, class_name = name.split(".", 1)
+                self.check_deprecated_class(node, mod_name, (class_name,))

-    def deprecated_decorators(self) ->Iterable[str]:
+    def deprecated_decorators(self) -> Iterable[str]:
         """Callback returning the deprecated decorators.

         Returns:
             collections.abc.Container of deprecated decorator names.
         """
-        pass
+        return ()

-    @utils.only_required_for_messages('deprecated-decorator')
-    def visit_decorators(self, node: nodes.Decorators) ->None:
+    @utils.only_required_for_messages("deprecated-decorator")
+    def visit_decorators(self, node: nodes.Decorators) -> None:
         """Triggered when a decorator statement is seen."""
-        pass
+        children = list(node.get_children())
+        if not children:
+            return
+        if isinstance(children[0], nodes.Call):
+            inf = safe_infer(children[0].func)
+        else:
+            inf = safe_infer(children[0])
+        qname = inf.qname() if inf else None
+        if qname in self.deprecated_decorators():
+            self.add_message("deprecated-decorator", node=node, args=qname)

-    @utils.only_required_for_messages('deprecated-module', 'deprecated-class')
-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+    @utils.only_required_for_messages(
+        "deprecated-module",
+        "deprecated-class",
+    )
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Triggered when a from statement is seen."""
-        pass
+        basename = node.modname
+        basename = get_import_name(node, basename)
+        self.check_deprecated_module(node, basename)
+        class_names = (name for name, _ in node.names)
+        self.check_deprecated_class(node, basename, class_names)

-    def deprecated_methods(self) ->Container[str]:
+    def deprecated_methods(self) -> Container[str]:
         """Callback returning the deprecated methods/functions.

         Returns:
             collections.abc.Container of deprecated function/method names.
         """
-        pass
+        return ()

-    def deprecated_arguments(self, method: str) ->Iterable[tuple[int | None,
-        str]]:
+    def deprecated_arguments(self, method: str) -> Iterable[tuple[int | None, str]]:
         """Callback returning the deprecated arguments of method/function.

         Args:
@@ -113,17 +183,18 @@ class DeprecatedMixin(BaseChecker):
             .. code-block:: python
                 ((1, 'arg2'), (3, 'arg4'))
         """
-        pass
+        # pylint: disable=unused-argument
+        return ()

-    def deprecated_modules(self) ->Iterable[str]:
+    def deprecated_modules(self) -> Iterable[str]:
         """Callback returning the deprecated modules.

         Returns:
             collections.abc.Container of deprecated module names.
         """
-        pass
+        return ()

-    def deprecated_classes(self, module: str) ->Iterable[str]:
+    def deprecated_classes(self, module: str) -> Iterable[str]:
         """Callback returning the deprecated classes of module.

         Args:
@@ -132,34 +203,85 @@ class DeprecatedMixin(BaseChecker):
         Returns:
             collections.abc.Container of deprecated class names.
         """
-        pass
+        # pylint: disable=unused-argument
+        return ()

-    def deprecated_attributes(self) ->Iterable[str]:
+    def deprecated_attributes(self) -> Iterable[str]:
         """Callback returning the deprecated attributes."""
-        pass
+        return ()

-    def check_deprecated_attribute(self, node: astroid.Attribute) ->None:
+    def check_deprecated_attribute(self, node: astroid.Attribute) -> None:
         """Checks if the attribute is deprecated."""
-        pass
+        inferred_expr = safe_infer(node.expr)
+        if not isinstance(inferred_expr, (nodes.ClassDef, Instance, nodes.Module)):
+            return
+        attribute_qname = ".".join((inferred_expr.qname(), node.attrname))
+        for deprecated_name in self.deprecated_attributes():
+            if attribute_qname == deprecated_name:
+                self.add_message(
+                    "deprecated-attribute",
+                    node=node,
+                    args=(attribute_qname,),
+                    confidence=INFERENCE,
+                )

-    def check_deprecated_module(self, node: nodes.Import, mod_path: (str |
-        None)) ->None:
+    def check_deprecated_module(self, node: nodes.Import, mod_path: str | None) -> None:
         """Checks if the module is deprecated."""
-        pass
+        for mod_name in self.deprecated_modules():
+            if mod_path == mod_name or mod_path and mod_path.startswith(mod_name + "."):
+                self.add_message("deprecated-module", node=node, args=mod_path)

-    def check_deprecated_method(self, node: nodes.Call, inferred: nodes.NodeNG
-        ) ->None:
+    def check_deprecated_method(self, node: nodes.Call, inferred: nodes.NodeNG) -> None:
         """Executes the checker for the given node.

         This method should be called from the checker implementing this mixin.
         """
-        pass
+        # Reject nodes which aren't of interest to us.
+        if not isinstance(inferred, ACCEPTABLE_NODES):
+            return
+
+        if isinstance(node.func, nodes.Attribute):
+            func_name = node.func.attrname
+        elif isinstance(node.func, nodes.Name):
+            func_name = node.func.name
+        else:
+            # Not interested in other nodes.
+            return
+
+        qnames = {inferred.qname(), func_name}
+        if any(name in self.deprecated_methods() for name in qnames):
+            self.add_message("deprecated-method", node=node, args=(func_name,))
+            return
+        num_of_args = len(node.args)
+        kwargs = {kw.arg for kw in node.keywords} if node.keywords else {}
+        deprecated_arguments = (self.deprecated_arguments(qn) for qn in qnames)
+        for position, arg_name in chain(*deprecated_arguments):
+            if arg_name in kwargs:
+                # function was called with deprecated argument as keyword argument
+                self.add_message(
+                    "deprecated-argument", node=node, args=(arg_name, func_name)
+                )
+            elif position is not None and position < num_of_args:
+                # function was called with deprecated argument as positional argument
+                self.add_message(
+                    "deprecated-argument", node=node, args=(arg_name, func_name)
+                )

-    def check_deprecated_class(self, node: nodes.NodeNG, mod_name: str,
-        class_names: Iterable[str]) ->None:
+    def check_deprecated_class(
+        self, node: nodes.NodeNG, mod_name: str, class_names: Iterable[str]
+    ) -> None:
         """Checks if the class is deprecated."""
-        pass
+        for class_name in class_names:
+            if class_name in self.deprecated_classes(mod_name):
+                self.add_message(
+                    "deprecated-class", node=node, args=(class_name, mod_name)
+                )

-    def check_deprecated_class_in_call(self, node: nodes.Call) ->None:
+    def check_deprecated_class_in_call(self, node: nodes.Call) -> None:
         """Checks if call the deprecated class."""
-        pass
+        if isinstance(node.func, nodes.Attribute) and isinstance(
+            node.func.expr, nodes.Name
+        ):
+            mod_name = node.func.expr.name
+            class_name = node.func.attrname
+            self.check_deprecated_class(node, mod_name, (class_name,))
diff --git a/pylint/checkers/design_analysis.py b/pylint/checkers/design_analysis.py
index 0c785dc35..78378e92c 100644
--- a/pylint/checkers/design_analysis.py
+++ b/pylint/checkers/design_analysis.py
@@ -1,117 +1,291 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for signs of poor design."""
+
 from __future__ import annotations
+
 import re
 from collections import defaultdict
 from collections.abc import Iterator
 from typing import TYPE_CHECKING
+
 import astroid
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import is_enum, only_required_for_messages
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-MSGS: dict[str, MessageDefinitionTuple] = {'R0901': (
-    'Too many ancestors (%s/%s)', 'too-many-ancestors',
-    'Used when class has too many parent classes, try to reduce this to get a simpler (and so easier to use) class.'
-    ), 'R0902': ('Too many instance attributes (%s/%s)',
-    'too-many-instance-attributes',
-    'Used when class has too many instance attributes, try to reduce this to get a simpler (and so easier to use) class.'
-    ), 'R0903': ('Too few public methods (%s/%s)', 'too-few-public-methods',
-    "Used when class has too few public methods, so be sure it's really worth it."
-    ), 'R0904': ('Too many public methods (%s/%s)',
-    'too-many-public-methods',
-    'Used when class has too many public methods, try to reduce this to get a simpler (and so easier to use) class.'
-    ), 'R0911': ('Too many return statements (%s/%s)',
-    'too-many-return-statements',
-    'Used when a function or method has too many return statement, making it hard to follow.'
-    ), 'R0912': ('Too many branches (%s/%s)', 'too-many-branches',
-    'Used when a function or method has too many branches, making it hard to follow.'
-    ), 'R0913': ('Too many arguments (%s/%s)', 'too-many-arguments',
-    'Used when a function or method takes too many arguments.'), 'R0914': (
-    'Too many local variables (%s/%s)', 'too-many-locals',
-    'Used when a function or method has too many local variables.'),
-    'R0915': ('Too many statements (%s/%s)', 'too-many-statements',
-    'Used when a function or method has too many statements. You should then split it in smaller functions / methods.'
-    ), 'R0916': ('Too many boolean expressions in if statement (%s/%s)',
-    'too-many-boolean-expressions',
-    'Used when an if statement contains too many boolean expressions.'),
-    'R0917': ('Too many positional arguments in a function call.',
-    'too-many-positional',
-    'Will be implemented in https://github.com/pylint-dev/pylint/issues/9099,msgid/symbol pair reserved for compatibility with ruff, see https://github.com/astral-sh/ruff/issues/8946.'
-    )}
-SPECIAL_OBJ = re.compile('^_{2}[a-z]+_{2}$')
-DATACLASSES_DECORATORS = frozenset({'dataclass', 'attrs'})
-DATACLASS_IMPORT = 'dataclasses'
-ATTRS_DECORATORS = frozenset({'define', 'frozen'})
-ATTRS_IMPORT = 'attrs'
-TYPING_NAMEDTUPLE = 'typing.NamedTuple'
-TYPING_TYPEDDICT = 'typing.TypedDict'
-TYPING_EXTENSIONS_TYPEDDICT = 'typing_extensions.TypedDict'
-STDLIB_CLASSES_IGNORE_ANCESTOR = frozenset(('builtins.object',
-    'builtins.tuple', 'builtins.dict', 'builtins.list', 'builtins.set',
-    'bulitins.frozenset', 'collections.ChainMap', 'collections.Counter',
-    'collections.OrderedDict', 'collections.UserDict',
-    'collections.UserList', 'collections.UserString',
-    'collections.defaultdict', 'collections.deque',
-    'collections.namedtuple', '_collections_abc.Awaitable',
-    '_collections_abc.Coroutine', '_collections_abc.AsyncIterable',
-    '_collections_abc.AsyncIterator', '_collections_abc.AsyncGenerator',
-    '_collections_abc.Hashable', '_collections_abc.Iterable',
-    '_collections_abc.Iterator', '_collections_abc.Generator',
-    '_collections_abc.Reversible', '_collections_abc.Sized',
-    '_collections_abc.Container', '_collections_abc.Collection',
-    '_collections_abc.Set', '_collections_abc.MutableSet',
-    '_collections_abc.Mapping', '_collections_abc.MutableMapping',
-    '_collections_abc.MappingView', '_collections_abc.KeysView',
-    '_collections_abc.ItemsView', '_collections_abc.ValuesView',
-    '_collections_abc.Sequence', '_collections_abc.MutableSequence',
-    '_collections_abc.ByteString', 'typing.Tuple', 'typing.List',
-    'typing.Dict', 'typing.Set', 'typing.FrozenSet', 'typing.Deque',
-    'typing.DefaultDict', 'typing.OrderedDict', 'typing.Counter',
-    'typing.ChainMap', 'typing.Awaitable', 'typing.Coroutine',
-    'typing.AsyncIterable', 'typing.AsyncIterator', 'typing.AsyncGenerator',
-    'typing.Iterable', 'typing.Iterator', 'typing.Generator',
-    'typing.Reversible', 'typing.Container', 'typing.Collection',
-    'typing.AbstractSet', 'typing.MutableSet', 'typing.Mapping',
-    'typing.MutableMapping', 'typing.Sequence', 'typing.MutableSequence',
-    'typing.ByteString', 'typing.MappingView', 'typing.KeysView',
-    'typing.ItemsView', 'typing.ValuesView', 'typing.ContextManager',
-    'typing.AsyncContextManager', 'typing.Hashable', 'typing.Sized',
-    TYPING_NAMEDTUPLE, TYPING_TYPEDDICT, TYPING_EXTENSIONS_TYPEDDICT))
-
-
-def _is_exempt_from_public_methods(node: astroid.ClassDef) ->bool:
+
+MSGS: dict[str, MessageDefinitionTuple] = (
+    {  # pylint: disable=consider-using-namedtuple-or-dataclass
+        "R0901": (
+            "Too many ancestors (%s/%s)",
+            "too-many-ancestors",
+            "Used when class has too many parent classes, try to reduce "
+            "this to get a simpler (and so easier to use) class.",
+        ),
+        "R0902": (
+            "Too many instance attributes (%s/%s)",
+            "too-many-instance-attributes",
+            "Used when class has too many instance attributes, try to reduce "
+            "this to get a simpler (and so easier to use) class.",
+        ),
+        "R0903": (
+            "Too few public methods (%s/%s)",
+            "too-few-public-methods",
+            "Used when class has too few public methods, so be sure it's "
+            "really worth it.",
+        ),
+        "R0904": (
+            "Too many public methods (%s/%s)",
+            "too-many-public-methods",
+            "Used when class has too many public methods, try to reduce "
+            "this to get a simpler (and so easier to use) class.",
+        ),
+        "R0911": (
+            "Too many return statements (%s/%s)",
+            "too-many-return-statements",
+            "Used when a function or method has too many return statement, "
+            "making it hard to follow.",
+        ),
+        "R0912": (
+            "Too many branches (%s/%s)",
+            "too-many-branches",
+            "Used when a function or method has too many branches, "
+            "making it hard to follow.",
+        ),
+        "R0913": (
+            "Too many arguments (%s/%s)",
+            "too-many-arguments",
+            "Used when a function or method takes too many arguments.",
+        ),
+        "R0914": (
+            "Too many local variables (%s/%s)",
+            "too-many-locals",
+            "Used when a function or method has too many local variables.",
+        ),
+        "R0915": (
+            "Too many statements (%s/%s)",
+            "too-many-statements",
+            "Used when a function or method has too many statements. You "
+            "should then split it in smaller functions / methods.",
+        ),
+        "R0916": (
+            "Too many boolean expressions in if statement (%s/%s)",
+            "too-many-boolean-expressions",
+            "Used when an if statement contains too many boolean expressions.",
+        ),
+        "R0917": (
+            "Too many positional arguments in a function call.",
+            "too-many-positional",
+            "Will be implemented in https://github.com/pylint-dev/pylint/issues/9099,"
+            "msgid/symbol pair reserved for compatibility with ruff, "
+            "see https://github.com/astral-sh/ruff/issues/8946.",
+        ),
+    }
+)
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+DATACLASSES_DECORATORS = frozenset({"dataclass", "attrs"})
+DATACLASS_IMPORT = "dataclasses"
+ATTRS_DECORATORS = frozenset({"define", "frozen"})
+ATTRS_IMPORT = "attrs"
+TYPING_NAMEDTUPLE = "typing.NamedTuple"
+TYPING_TYPEDDICT = "typing.TypedDict"
+TYPING_EXTENSIONS_TYPEDDICT = "typing_extensions.TypedDict"
+
+# Set of stdlib classes to ignore when calculating number of ancestors
+STDLIB_CLASSES_IGNORE_ANCESTOR = frozenset(
+    (
+        "builtins.object",
+        "builtins.tuple",
+        "builtins.dict",
+        "builtins.list",
+        "builtins.set",
+        "bulitins.frozenset",
+        "collections.ChainMap",
+        "collections.Counter",
+        "collections.OrderedDict",
+        "collections.UserDict",
+        "collections.UserList",
+        "collections.UserString",
+        "collections.defaultdict",
+        "collections.deque",
+        "collections.namedtuple",
+        "_collections_abc.Awaitable",
+        "_collections_abc.Coroutine",
+        "_collections_abc.AsyncIterable",
+        "_collections_abc.AsyncIterator",
+        "_collections_abc.AsyncGenerator",
+        "_collections_abc.Hashable",
+        "_collections_abc.Iterable",
+        "_collections_abc.Iterator",
+        "_collections_abc.Generator",
+        "_collections_abc.Reversible",
+        "_collections_abc.Sized",
+        "_collections_abc.Container",
+        "_collections_abc.Collection",
+        "_collections_abc.Set",
+        "_collections_abc.MutableSet",
+        "_collections_abc.Mapping",
+        "_collections_abc.MutableMapping",
+        "_collections_abc.MappingView",
+        "_collections_abc.KeysView",
+        "_collections_abc.ItemsView",
+        "_collections_abc.ValuesView",
+        "_collections_abc.Sequence",
+        "_collections_abc.MutableSequence",
+        "_collections_abc.ByteString",
+        "typing.Tuple",
+        "typing.List",
+        "typing.Dict",
+        "typing.Set",
+        "typing.FrozenSet",
+        "typing.Deque",
+        "typing.DefaultDict",
+        "typing.OrderedDict",
+        "typing.Counter",
+        "typing.ChainMap",
+        "typing.Awaitable",
+        "typing.Coroutine",
+        "typing.AsyncIterable",
+        "typing.AsyncIterator",
+        "typing.AsyncGenerator",
+        "typing.Iterable",
+        "typing.Iterator",
+        "typing.Generator",
+        "typing.Reversible",
+        "typing.Container",
+        "typing.Collection",
+        "typing.AbstractSet",
+        "typing.MutableSet",
+        "typing.Mapping",
+        "typing.MutableMapping",
+        "typing.Sequence",
+        "typing.MutableSequence",
+        "typing.ByteString",
+        "typing.MappingView",
+        "typing.KeysView",
+        "typing.ItemsView",
+        "typing.ValuesView",
+        "typing.ContextManager",
+        "typing.AsyncContextManager",
+        "typing.Hashable",
+        "typing.Sized",
+        TYPING_NAMEDTUPLE,
+        TYPING_TYPEDDICT,
+        TYPING_EXTENSIONS_TYPEDDICT,
+    )
+)
+
+
+def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool:
     """Check if a class is exempt from too-few-public-methods."""
-    pass
+    # If it's a typing.Namedtuple, typing.TypedDict or an Enum
+    for ancestor in node.ancestors():
+        if is_enum(ancestor):
+            return True
+        if ancestor.qname() in (
+            TYPING_NAMEDTUPLE,
+            TYPING_TYPEDDICT,
+            TYPING_EXTENSIONS_TYPEDDICT,
+        ):
+            return True
+
+    # Or if it's a dataclass
+    if not node.decorators:
+        return False
+
+    root_locals = set(node.root().locals)
+    for decorator in node.decorators.nodes:
+        if isinstance(decorator, astroid.Call):
+            decorator = decorator.func
+        if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
+            continue
+        if isinstance(decorator, astroid.Name):
+            name = decorator.name
+        else:
+            name = decorator.attrname
+        if name in DATACLASSES_DECORATORS and (
+            root_locals.intersection(DATACLASSES_DECORATORS)
+            or DATACLASS_IMPORT in root_locals
+        ):
+            return True
+        if name in ATTRS_DECORATORS and (
+            root_locals.intersection(ATTRS_DECORATORS) or ATTRS_IMPORT in root_locals
+        ):
+            return True
+    return False


-def _count_boolean_expressions(bool_op: nodes.BoolOp) ->int:
+def _count_boolean_expressions(bool_op: nodes.BoolOp) -> int:
     """Counts the number of boolean expressions in BoolOp `bool_op` (recursive).

     example: a and (b or c or (d and e)) ==> 5 boolean expressions
     """
-    pass
+    nb_bool_expr = 0
+    for bool_expr in bool_op.get_children():
+        if isinstance(bool_expr, astroid.BoolOp):
+            nb_bool_expr += _count_boolean_expressions(bool_expr)
+        else:
+            nb_bool_expr += 1
+    return nb_bool_expr


-def _get_parents_iter(node: nodes.ClassDef, ignored_parents: frozenset[str]
-    ) ->Iterator[nodes.ClassDef]:
-    """Get parents of ``node``, excluding ancestors of ``ignored_parents``.
+def _count_methods_in_class(node: nodes.ClassDef) -> int:
+    all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
+    # Special methods count towards the number of public methods,
+    # but don't count towards there being too many methods.
+    for method in node.mymethods():
+        if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
+            all_methods += 1
+    return all_methods
+
+
+def _get_parents_iter(
+    node: nodes.ClassDef, ignored_parents: frozenset[str]
+) -> Iterator[nodes.ClassDef]:
+    r"""Get parents of ``node``, excluding ancestors of ``ignored_parents``.

     If we have the following inheritance diagram:

              F
             /
         D  E
-         \\/
+         \/
           B  C
-           \\/
+           \/
             A      # class A(B, C): ...

     And ``ignored_parents`` is ``{"E"}``, then this function will return
     ``{A, B, C, D}`` -- both ``E`` and its ancestors are excluded.
     """
-    pass
+    parents: set[nodes.ClassDef] = set()
+    to_explore = list(node.ancestors(recurs=False))
+    while to_explore:
+        parent = to_explore.pop()
+        if parent.qname() in ignored_parents:
+            continue
+        if parent not in parents:
+            # This guard might appear to be performing the same function as
+            # adding the resolved parents to a set to eliminate duplicates
+            # (legitimate due to diamond inheritance patterns), but its
+            # additional purpose is to prevent cycles (not normally possible,
+            # but potential due to inference) and thus guarantee termination
+            # of the while-loop
+            yield parent
+            parents.add(parent)
+            to_explore.extend(parent.ancestors(recurs=False))
+
+
+def _get_parents(
+    node: nodes.ClassDef, ignored_parents: frozenset[str]
+) -> set[nodes.ClassDef]:
+    return set(_get_parents_iter(node, ignored_parents))


 class MisdesignChecker(BaseChecker):
@@ -121,115 +295,382 @@ class MisdesignChecker(BaseChecker):
     * number of methods, attributes, local variables...
     * size, complexity of functions, methods
     """
-    name = 'design'
+
+    # configuration section name
+    name = "design"
+    # messages
     msgs = MSGS
-    options = ('max-args', {'default': 5, 'type': 'int', 'metavar': '<int>',
-        'help': 'Maximum number of arguments for function / method.'}), (
-        'max-locals', {'default': 15, 'type': 'int', 'metavar': '<int>',
-        'help': 'Maximum number of locals for function / method body.'}), (
-        'max-returns', {'default': 6, 'type': 'int', 'metavar': '<int>',
-        'help': 'Maximum number of return / yield for function / method body.'}
-        ), ('max-branches', {'default': 12, 'type': 'int', 'metavar':
-        '<int>', 'help':
-        'Maximum number of branch for function / method body.'}), (
-        'max-statements', {'default': 50, 'type': 'int', 'metavar': '<int>',
-        'help': 'Maximum number of statements in function / method body.'}), (
-        'max-parents', {'default': 7, 'type': 'int', 'metavar': '<num>',
-        'help': 'Maximum number of parents for a class (see R0901).'}), (
-        'ignored-parents', {'default': (), 'type': 'csv', 'metavar':
-        '<comma separated list of class names>', 'help':
-        'List of qualified class names to ignore when counting class parents (see R0901)'
-        }), ('max-attributes', {'default': 7, 'type': 'int', 'metavar':
-        '<num>', 'help':
-        'Maximum number of attributes for a class (see R0902).'}), (
-        'min-public-methods', {'default': 2, 'type': 'int', 'metavar':
-        '<num>', 'help':
-        'Minimum number of public methods for a class (see R0903).'}), (
-        'max-public-methods', {'default': 20, 'type': 'int', 'metavar':
-        '<num>', 'help':
-        'Maximum number of public methods for a class (see R0904).'}), (
-        'max-bool-expr', {'default': 5, 'type': 'int', 'metavar': '<num>',
-        'help':
-        'Maximum number of boolean expressions in an if statement (see R0916).'
-        }), ('exclude-too-few-public-methods', {'default': [], 'type':
-        'regexp_csv', 'metavar': '<pattern>[,<pattern>...]', 'help':
-        'List of regular expressions of class ancestor names to ignore when counting public methods (see R0903)'
-        })
-
-    def __init__(self, linter: PyLinter) ->None:
+    # configuration options
+    options = (
+        (
+            "max-args",
+            {
+                "default": 5,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of arguments for function / method.",
+            },
+        ),
+        (
+            "max-locals",
+            {
+                "default": 15,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of locals for function / method body.",
+            },
+        ),
+        (
+            "max-returns",
+            {
+                "default": 6,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of return / yield for function / "
+                "method body.",
+            },
+        ),
+        (
+            "max-branches",
+            {
+                "default": 12,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of branch for function / method body.",
+            },
+        ),
+        (
+            "max-statements",
+            {
+                "default": 50,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of statements in function / method body.",
+            },
+        ),
+        (
+            "max-parents",
+            {
+                "default": 7,
+                "type": "int",
+                "metavar": "<num>",
+                "help": "Maximum number of parents for a class (see R0901).",
+            },
+        ),
+        (
+            "ignored-parents",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<comma separated list of class names>",
+                "help": "List of qualified class names to ignore when counting class parents (see R0901)",
+            },
+        ),
+        (
+            "max-attributes",
+            {
+                "default": 7,
+                "type": "int",
+                "metavar": "<num>",
+                "help": "Maximum number of attributes for a class \
+(see R0902).",
+            },
+        ),
+        (
+            "min-public-methods",
+            {
+                "default": 2,
+                "type": "int",
+                "metavar": "<num>",
+                "help": "Minimum number of public methods for a class \
+(see R0903).",
+            },
+        ),
+        (
+            "max-public-methods",
+            {
+                "default": 20,
+                "type": "int",
+                "metavar": "<num>",
+                "help": "Maximum number of public methods for a class \
+(see R0904).",
+            },
+        ),
+        (
+            "max-bool-expr",
+            {
+                "default": 5,
+                "type": "int",
+                "metavar": "<num>",
+                "help": "Maximum number of boolean expressions in an if "
+                "statement (see R0916).",
+            },
+        ),
+        (
+            "exclude-too-few-public-methods",
+            {
+                "default": [],
+                "type": "regexp_csv",
+                "metavar": "<pattern>[,<pattern>...]",
+                "help": "List of regular expressions of class ancestor names "
+                "to ignore when counting public methods (see R0903)",
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._returns: list[int]
         self._branches: defaultdict[nodes.LocalsDictNodeNG, int]
         self._stmts: list[int]

-    def open(self) ->None:
+    def open(self) -> None:
         """Initialize visit variables."""
-        pass
+        self.linter.stats.reset_node_count()
+        self._returns = []
+        self._branches = defaultdict(int)
+        self._stmts = []
+        self._exclude_too_few_public_methods = (
+            self.linter.config.exclude_too_few_public_methods
+        )

-    @only_required_for_messages('too-many-ancestors',
-        'too-many-instance-attributes', 'too-few-public-methods',
-        'too-many-public-methods')
-    def visit_classdef(self, node: nodes.ClassDef) ->None:
+    def _inc_all_stmts(self, amount: int) -> None:
+        for i, _ in enumerate(self._stmts):
+            self._stmts[i] += amount
+
+    @only_required_for_messages(
+        "too-many-ancestors",
+        "too-many-instance-attributes",
+        "too-few-public-methods",
+        "too-many-public-methods",
+    )
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
         """Check size of inheritance hierarchy and number of instance attributes."""
-        pass
+        parents = _get_parents(
+            node,
+            STDLIB_CLASSES_IGNORE_ANCESTOR.union(self.linter.config.ignored_parents),
+        )
+        nb_parents = len(parents)
+        if nb_parents > self.linter.config.max_parents:
+            self.add_message(
+                "too-many-ancestors",
+                node=node,
+                args=(nb_parents, self.linter.config.max_parents),
+            )
+
+        # Something at inference time is modifying instance_attrs to add
+        # properties from parent classes. Given how much we cache inference
+        # results, mutating instance_attrs can become a real mess. Filter
+        # them out here until the root cause is solved.
+        # https://github.com/pylint-dev/astroid/issues/2273
+        root = node.root()
+        filtered_attrs = [
+            k for (k, v) in node.instance_attrs.items() if v[0].root() is root
+        ]
+        if len(filtered_attrs) > self.linter.config.max_attributes:
+            self.add_message(
+                "too-many-instance-attributes",
+                node=node,
+                args=(len(filtered_attrs), self.linter.config.max_attributes),
+            )

-    @only_required_for_messages('too-few-public-methods',
-        'too-many-public-methods')
-    def leave_classdef(self, node: nodes.ClassDef) ->None:
+    @only_required_for_messages("too-few-public-methods", "too-many-public-methods")
+    def leave_classdef(self, node: nodes.ClassDef) -> None:
         """Check number of public methods."""
-        pass
+        my_methods = sum(
+            1 for method in node.mymethods() if not method.name.startswith("_")
+        )
+
+        # Does the class contain less than n public methods ?
+        # This checks only the methods defined in the current class,
+        # since the user might not have control over the classes
+        # from the ancestors. It avoids some false positives
+        # for classes such as unittest.TestCase, which provides
+        # a lot of assert methods. It doesn't make sense to warn
+        # when the user subclasses TestCase to add his own tests.
+        if my_methods > self.linter.config.max_public_methods:
+            self.add_message(
+                "too-many-public-methods",
+                node=node,
+                args=(my_methods, self.linter.config.max_public_methods),
+            )
+
+        # Stop here if the class is excluded via configuration.
+        if node.type == "class" and self._exclude_too_few_public_methods:
+            for ancestor in node.ancestors():
+                if any(
+                    pattern.match(ancestor.qname())
+                    for pattern in self._exclude_too_few_public_methods
+                ):
+                    return
+
+        # Stop here for exception, metaclass, interface classes and other
+        # classes for which we don't need to count the methods.
+        if node.type != "class" or _is_exempt_from_public_methods(node):
+            return
+
+        # Does the class contain more than n public methods ?
+        # This checks all the methods defined by ancestors and
+        # by the current class.
+        all_methods = _count_methods_in_class(node)
+        if all_methods < self.linter.config.min_public_methods:
+            self.add_message(
+                "too-few-public-methods",
+                node=node,
+                args=(all_methods, self.linter.config.min_public_methods),
+            )

-    @only_required_for_messages('too-many-return-statements',
-        'too-many-branches', 'too-many-arguments', 'too-many-locals',
-        'too-many-statements', 'keyword-arg-before-vararg')
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+    @only_required_for_messages(
+        "too-many-return-statements",
+        "too-many-branches",
+        "too-many-arguments",
+        "too-many-locals",
+        "too-many-statements",
+        "keyword-arg-before-vararg",
+    )
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Check function name, docstring, arguments, redefinition,
         variable names, max locals.
         """
-        pass
+        # init branch and returns counters
+        self._returns.append(0)
+        # check number of arguments
+        args = node.args.args + node.args.posonlyargs + node.args.kwonlyargs
+        ignored_argument_names = self.linter.config.ignored_argument_names
+        if args is not None:
+            ignored_args_num = 0
+            if ignored_argument_names:
+                ignored_args_num = sum(
+                    1 for arg in args if ignored_argument_names.match(arg.name)
+                )
+
+            argnum = len(args) - ignored_args_num
+            if argnum > self.linter.config.max_args:
+                self.add_message(
+                    "too-many-arguments",
+                    node=node,
+                    args=(len(args), self.linter.config.max_args),
+                )
+        else:
+            ignored_args_num = 0
+        # check number of local variables
+        locnum = len(node.locals) - ignored_args_num
+
+        # decrement number of local variables if '_' is one of them
+        if "_" in node.locals:
+            locnum -= 1
+
+        if locnum > self.linter.config.max_locals:
+            self.add_message(
+                "too-many-locals",
+                node=node,
+                args=(locnum, self.linter.config.max_locals),
+            )
+        # init new statements counter
+        self._stmts.append(1)
+
     visit_asyncfunctiondef = visit_functiondef

-    @only_required_for_messages('too-many-return-statements',
-        'too-many-branches', 'too-many-arguments', 'too-many-locals',
-        'too-many-statements')
-    def leave_functiondef(self, node: nodes.FunctionDef) ->None:
+    @only_required_for_messages(
+        "too-many-return-statements",
+        "too-many-branches",
+        "too-many-arguments",
+        "too-many-locals",
+        "too-many-statements",
+    )
+    def leave_functiondef(self, node: nodes.FunctionDef) -> None:
         """Most of the work is done here on close:
         checks for max returns, branch, return in __init__.
         """
-        pass
+        returns = self._returns.pop()
+        if returns > self.linter.config.max_returns:
+            self.add_message(
+                "too-many-return-statements",
+                node=node,
+                args=(returns, self.linter.config.max_returns),
+            )
+        branches = self._branches[node]
+        if branches > self.linter.config.max_branches:
+            self.add_message(
+                "too-many-branches",
+                node=node,
+                args=(branches, self.linter.config.max_branches),
+            )
+        # check number of statements
+        stmts = self._stmts.pop()
+        if stmts > self.linter.config.max_statements:
+            self.add_message(
+                "too-many-statements",
+                node=node,
+                args=(stmts, self.linter.config.max_statements),
+            )
+
     leave_asyncfunctiondef = leave_functiondef

-    def visit_return(self, _: nodes.Return) ->None:
+    def visit_return(self, _: nodes.Return) -> None:
         """Count number of returns."""
-        pass
+        if not self._returns:
+            return  # return outside function, reported by the base checker
+        self._returns[-1] += 1

-    def visit_default(self, node: nodes.NodeNG) ->None:
+    def visit_default(self, node: nodes.NodeNG) -> None:
         """Default visit method -> increments the statements counter if
         necessary.
         """
-        pass
+        if node.is_statement:
+            self._inc_all_stmts(1)

-    def visit_try(self, node: nodes.Try) ->None:
+    def visit_try(self, node: nodes.Try) -> None:
         """Increments the branches counter."""
-        pass
+        branches = len(node.handlers)
+        if node.orelse:
+            branches += 1
+        if node.finalbody:
+            branches += 1
+        self._inc_branch(node, branches)
+        self._inc_all_stmts(branches)

-    @only_required_for_messages('too-many-boolean-expressions',
-        'too-many-branches')
-    def visit_if(self, node: nodes.If) ->None:
+    @only_required_for_messages("too-many-boolean-expressions", "too-many-branches")
+    def visit_if(self, node: nodes.If) -> None:
         """Increments the branches counter and checks boolean expressions."""
-        pass
+        self._check_boolean_expressions(node)
+        branches = 1
+        # don't double count If nodes coming from some 'elif'
+        if node.orelse and (
+            len(node.orelse) > 1 or not isinstance(node.orelse[0], astroid.If)
+        ):
+            branches += 1
+        self._inc_branch(node, branches)
+        self._inc_all_stmts(branches)

-    def _check_boolean_expressions(self, node: nodes.If) ->None:
+    def _check_boolean_expressions(self, node: nodes.If) -> None:
         """Go through "if" node `node` and count its boolean expressions
         if the 'if' node test is a BoolOp node.
         """
-        pass
+        condition = node.test
+        if not isinstance(condition, astroid.BoolOp):
+            return
+        nb_bool_expr = _count_boolean_expressions(condition)
+        if nb_bool_expr > self.linter.config.max_bool_expr:
+            self.add_message(
+                "too-many-boolean-expressions",
+                node=condition,
+                args=(nb_bool_expr, self.linter.config.max_bool_expr),
+            )

-    def visit_while(self, node: nodes.While) ->None:
+    def visit_while(self, node: nodes.While) -> None:
         """Increments the branches counter."""
-        pass
+        branches = 1
+        if node.orelse:
+            branches += 1
+        self._inc_branch(node, branches)
+
     visit_for = visit_while

-    def _inc_branch(self, node: nodes.NodeNG, branchesnum: int=1) ->None:
+    def _inc_branch(self, node: nodes.NodeNG, branchesnum: int = 1) -> None:
         """Increments the branches counter."""
-        pass
+        self._branches[node.scope()] += branchesnum
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(MisdesignChecker(linter))
diff --git a/pylint/checkers/dunder_methods.py b/pylint/checkers/dunder_methods.py
index 461ad9d7e..4bd89c2a1 100644
--- a/pylint/checkers/dunder_methods.py
+++ b/pylint/checkers/dunder_methods.py
@@ -1,11 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import Instance, nodes
 from astroid.util import UninferableBase
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import safe_infer
 from pylint.constants import DUNDER_METHODS, UNNECESSARY_DUNDER_CALL_LAMBDA_EXCEPTIONS
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -24,18 +32,72 @@ class DunderCallChecker(BaseChecker):
     We also exclude dunder method calls on super() since
     these can't be written in an alternative manner.
     """
-    name = 'unnecessary-dunder-call'
-    msgs = {'C2801': ('Unnecessarily calls dunder method %s. %s.',
-        'unnecessary-dunder-call',
-        'Used when a dunder method is manually called instead of using the corresponding function/method/operator.'
-        )}
+
+    name = "unnecessary-dunder-call"
+    msgs = {
+        "C2801": (
+            "Unnecessarily calls dunder method %s. %s.",
+            "unnecessary-dunder-call",
+            "Used when a dunder method is manually called instead "
+            "of using the corresponding function/method/operator.",
+        ),
+    }
     options = ()

+    def open(self) -> None:
+        self._dunder_methods: dict[str, str] = {}
+        for since_vers, dunder_methods in DUNDER_METHODS.items():
+            if since_vers <= self.linter.config.py_version:
+                self._dunder_methods.update(dunder_methods)
+
     @staticmethod
-    def within_dunder_or_lambda_def(node: nodes.NodeNG) ->bool:
+    def within_dunder_or_lambda_def(node: nodes.NodeNG) -> bool:
         """Check if dunder method call is within a dunder method definition."""
-        pass
+        parent = node.parent
+        while parent is not None:
+            if (
+                isinstance(parent, nodes.FunctionDef)
+                and parent.name.startswith("__")
+                and parent.name.endswith("__")
+                or DunderCallChecker.is_lambda_rule_exception(parent, node)
+            ):
+                return True
+            parent = parent.parent
+        return False
+
+    @staticmethod
+    def is_lambda_rule_exception(ancestor: nodes.NodeNG, node: nodes.NodeNG) -> bool:
+        return (
+            isinstance(ancestor, nodes.Lambda)
+            and node.func.attrname in UNNECESSARY_DUNDER_CALL_LAMBDA_EXCEPTIONS
+        )

-    def visit_call(self, node: nodes.Call) ->None:
+    def visit_call(self, node: nodes.Call) -> None:
         """Check if method being called is an unnecessary dunder method."""
-        pass
+        if (
+            isinstance(node.func, nodes.Attribute)
+            and node.func.attrname in self._dunder_methods
+            and not self.within_dunder_or_lambda_def(node)
+            and not (
+                isinstance(node.func.expr, nodes.Call)
+                and isinstance(node.func.expr.func, nodes.Name)
+                and node.func.expr.func.name == "super"
+            )
+        ):
+            inf_expr = safe_infer(node.func.expr)
+            if not (
+                inf_expr is None or isinstance(inf_expr, (Instance, UninferableBase))
+            ):
+                # Skip dunder calls to non instantiated classes.
+                return
+
+            self.add_message(
+                "unnecessary-dunder-call",
+                node=node,
+                args=(node.func.attrname, self._dunder_methods[node.func.attrname]),
+                confidence=HIGH,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DunderCallChecker(linter))
diff --git a/pylint/checkers/ellipsis_checker.py b/pylint/checkers/ellipsis_checker.py
index 95b4666d6..4e7e3bd35 100644
--- a/pylint/checkers/ellipsis_checker.py
+++ b/pylint/checkers/ellipsis_checker.py
@@ -1,22 +1,37 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Ellipsis checker for Python code."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class EllipsisChecker(BaseChecker):
-    name = 'unnecessary_ellipsis'
-    msgs = {'W2301': ('Unnecessary ellipsis constant',
-        'unnecessary-ellipsis',
-        'Used when the ellipsis constant is encountered and can be avoided. A line of code consisting of an ellipsis is unnecessary if there is a docstring on the preceding line or if there is a statement in the same scope.'
-        )}
-
-    @only_required_for_messages('unnecessary-ellipsis')
-    def visit_const(self, node: nodes.Const) ->None:
+    name = "unnecessary_ellipsis"
+    msgs = {
+        "W2301": (
+            "Unnecessary ellipsis constant",
+            "unnecessary-ellipsis",
+            "Used when the ellipsis constant is encountered and can be avoided. "
+            "A line of code consisting of an ellipsis is unnecessary if "
+            "there is a docstring on the preceding line or if there is a "
+            "statement in the same scope.",
+        )
+    }
+
+    @only_required_for_messages("unnecessary-ellipsis")
+    def visit_const(self, node: nodes.Const) -> None:
         """Check if the ellipsis constant is used unnecessarily.

         Emits a warning when:
@@ -25,4 +40,19 @@ class EllipsisChecker(BaseChecker):
            For example: A function consisting of an ellipsis followed by a
            return statement on the next line.
         """
-        pass
+        if (
+            node.pytype() == "builtins.Ellipsis"
+            and isinstance(node.parent, nodes.Expr)
+            and (
+                (
+                    isinstance(node.parent.parent, (nodes.ClassDef, nodes.FunctionDef))
+                    and node.parent.parent.doc_node
+                )
+                or len(node.parent.parent.body) > 1
+            )
+        ):
+            self.add_message("unnecessary-ellipsis", node=node)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(EllipsisChecker(linter))
diff --git a/pylint/checkers/exceptions.py b/pylint/checkers/exceptions.py
index fe18d2c24..688dc829a 100644
--- a/pylint/checkers/exceptions.py
+++ b/pylint/checkers/exceptions.py
@@ -1,137 +1,656 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checks for various exception related errors."""
+
 from __future__ import annotations
+
 import builtins
 import inspect
 from collections.abc import Generator
 from typing import TYPE_CHECKING, Any
+
 import astroid
 from astroid import nodes, objects, util
 from astroid.context import InferenceContext
 from astroid.typing import InferenceResult, SuccessfulInferenceResult
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.interfaces import HIGH, INFERENCE
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


-def _annotated_unpack_infer(stmt: nodes.NodeNG, context: (InferenceContext |
-    None)=None) ->Generator[tuple[nodes.NodeNG, SuccessfulInferenceResult],
-    None, None]:
+def _builtin_exceptions() -> set[str]:
+    def predicate(obj: Any) -> bool:
+        return isinstance(obj, type) and issubclass(obj, BaseException)
+
+    members = inspect.getmembers(builtins, predicate)
+    return {exc.__name__ for (_, exc) in members}
+
+
+def _annotated_unpack_infer(
+    stmt: nodes.NodeNG, context: InferenceContext | None = None
+) -> Generator[tuple[nodes.NodeNG, SuccessfulInferenceResult], None, None]:
     """Recursively generate nodes inferred by the given statement.

     If the inferred value is a list or a tuple, recurse on the elements.
     Returns an iterator which yields tuples in the format
     ('original node', 'inferred node').
     """
-    pass
+    if isinstance(stmt, (nodes.List, nodes.Tuple)):
+        for elt in stmt.elts:
+            inferred = utils.safe_infer(elt)
+            if inferred and not isinstance(inferred, util.UninferableBase):
+                yield elt, inferred
+        return
+    for inferred in stmt.infer(context):
+        if isinstance(inferred, util.UninferableBase):
+            continue
+        yield stmt, inferred


-def _is_raising(body: list[nodes.NodeNG]) ->bool:
+def _is_raising(body: list[nodes.NodeNG]) -> bool:
     """Return whether the given statement node raises an exception."""
-    pass
-
-
-MSGS: dict[str, MessageDefinitionTuple] = {'E0701': (
-    'Bad except clauses order (%s)', 'bad-except-order',
-    "Used when except clauses are not in the correct order (from the more specific to the more generic). If you don't fix the order, some exceptions may not be caught by the most specific handler."
-    ), 'E0702': ('Raising %s while only classes or instances are allowed',
-    'raising-bad-type',
-    'Used when something which is neither a class nor an instance is raised (i.e. a `TypeError` will be raised).'
-    ), 'E0704': ('The raise statement is not inside an except clause',
-    'misplaced-bare-raise',
-    'Used when a bare raise is not used inside an except clause. This generates an error, since there are no active exceptions to be reraised. An exception to this rule is represented by a bare raise inside a finally clause, which might work, as long as an exception is raised inside the try block, but it is nevertheless a code smell that must not be relied upon.'
-    ), 'E0705': (
-    'Exception cause set to something which is not an exception, nor None',
-    'bad-exception-cause',
-    'Used when using the syntax "raise ... from ...", where the exception cause is not an exception, nor None.'
-    , {'old_names': [('E0703', 'bad-exception-context')]}), 'E0710': (
-    "Raising a new style class which doesn't inherit from BaseException",
-    'raising-non-exception',
-    "Used when a new style class which doesn't inherit from BaseException is raised."
-    ), 'E0711': ('NotImplemented raised - should raise NotImplementedError',
-    'notimplemented-raised',
-    'Used when NotImplemented is raised instead of NotImplementedError'),
-    'E0712': (
-    "Catching an exception which doesn't inherit from Exception: %s",
-    'catching-non-exception',
-    "Used when a class which doesn't inherit from Exception is used as an exception in an except clause."
-    ), 'W0702': ('No exception type(s) specified', 'bare-except',
-    'A bare ``except:`` clause will catch ``SystemExit`` and ``KeyboardInterrupt`` exceptions, making it harder to interrupt a program with ``Control-C``, and can disguise other problems. If you want to catch all exceptions that signal program errors, use ``except Exception:`` (bare except is equivalent to ``except BaseException:``).'
-    ), 'W0718': ('Catching too general exception %s',
-    'broad-exception-caught',
-    'If you use a naked ``except Exception:`` clause, you might end up catching exceptions other than the ones you expect to catch. This can hide bugs or make it harder to debug programs when unrelated errors are hidden.'
-    , {'old_names': [('W0703', 'broad-except')]}), 'W0705': (
-    'Catching previously caught exception type %s', 'duplicate-except',
-    'Used when an except catches a type that was already caught by a previous handler.'
-    ), 'W0706': ('The except handler raises immediately',
-    'try-except-raise',
-    'Used when an except handler uses raise as its first or only operator. This is useless because it raises back the exception immediately. Remove the raise operator or the entire try-except-raise block!'
-    ), 'W0707': ("Consider explicitly re-raising using %s'%s from %s'",
-    'raise-missing-from',
-    "Python's exception chaining shows the traceback of the current exception, but also of the original exception. When you raise a new exception after another exception was caught it's likely that the second exception is a friendly re-wrapping of the first exception. In such cases `raise from` provides a better link between the two tracebacks in the final error."
-    ), 'W0711': (
-    'Exception to catch is the result of a binary "%s" operation',
-    'binary-op-exception',
-    'Used when the exception to catch is of the form "except A or B:".  If intending to catch multiple, rewrite as "except (A, B):"'
-    ), 'W0715': (
-    'Exception arguments suggest string formatting might be intended',
-    'raising-format-tuple',
-    'Used when passing multiple arguments to an exception constructor, the first of them a string literal containing what appears to be placeholders intended for formatting'
-    ), 'W0716': ('Invalid exception operation. %s',
-    'wrong-exception-operation',
-    'Used when an operation is done against an exception, but the operation is not valid for the exception in question. Usually emitted when having binary operations between exceptions in except handlers.'
-    ), 'W0719': ('Raising too general exception: %s',
-    'broad-exception-raised',
-    'Raising exceptions that are too generic force you to catch exceptions generically too. It will force you to use a naked ``except Exception:`` clause. You might then end up catching exceptions other than the ones you expect to catch. This can hide bugs or make it harder to debug programs when unrelated errors are hidden.'
-    )}
+    return any(isinstance(node, nodes.Raise) for node in body)
+
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "E0701": (
+        "Bad except clauses order (%s)",
+        "bad-except-order",
+        "Used when except clauses are not in the correct order (from the "
+        "more specific to the more generic). If you don't fix the order, "
+        "some exceptions may not be caught by the most specific handler.",
+    ),
+    "E0702": (
+        "Raising %s while only classes or instances are allowed",
+        "raising-bad-type",
+        "Used when something which is neither a class nor an instance "
+        "is raised (i.e. a `TypeError` will be raised).",
+    ),
+    "E0704": (
+        "The raise statement is not inside an except clause",
+        "misplaced-bare-raise",
+        "Used when a bare raise is not used inside an except clause. "
+        "This generates an error, since there are no active exceptions "
+        "to be reraised. An exception to this rule is represented by "
+        "a bare raise inside a finally clause, which might work, as long "
+        "as an exception is raised inside the try block, but it is "
+        "nevertheless a code smell that must not be relied upon.",
+    ),
+    "E0705": (
+        "Exception cause set to something which is not an exception, nor None",
+        "bad-exception-cause",
+        'Used when using the syntax "raise ... from ...", '
+        "where the exception cause is not an exception, "
+        "nor None.",
+        {"old_names": [("E0703", "bad-exception-context")]},
+    ),
+    "E0710": (
+        "Raising a new style class which doesn't inherit from BaseException",
+        "raising-non-exception",
+        "Used when a new style class which doesn't inherit from "
+        "BaseException is raised.",
+    ),
+    "E0711": (
+        "NotImplemented raised - should raise NotImplementedError",
+        "notimplemented-raised",
+        "Used when NotImplemented is raised instead of NotImplementedError",
+    ),
+    "E0712": (
+        "Catching an exception which doesn't inherit from Exception: %s",
+        "catching-non-exception",
+        "Used when a class which doesn't inherit from "
+        "Exception is used as an exception in an except clause.",
+    ),
+    "W0702": (
+        "No exception type(s) specified",
+        "bare-except",
+        "A bare ``except:`` clause will catch ``SystemExit`` and "
+        "``KeyboardInterrupt`` exceptions, making it harder to interrupt a program "
+        "with ``Control-C``, and can disguise other problems. If you want to catch "
+        "all exceptions that signal program errors, use ``except Exception:`` (bare "
+        "except is equivalent to ``except BaseException:``).",
+    ),
+    "W0718": (
+        "Catching too general exception %s",
+        "broad-exception-caught",
+        "If you use a naked ``except Exception:`` clause, you might end up catching "
+        "exceptions other than the ones you expect to catch. This can hide bugs or "
+        "make it harder to debug programs when unrelated errors are hidden.",
+        {"old_names": [("W0703", "broad-except")]},
+    ),
+    "W0705": (
+        "Catching previously caught exception type %s",
+        "duplicate-except",
+        "Used when an except catches a type that was already caught by "
+        "a previous handler.",
+    ),
+    "W0706": (
+        "The except handler raises immediately",
+        "try-except-raise",
+        "Used when an except handler uses raise as its first or only "
+        "operator. This is useless because it raises back the exception "
+        "immediately. Remove the raise operator or the entire "
+        "try-except-raise block!",
+    ),
+    "W0707": (
+        "Consider explicitly re-raising using %s'%s from %s'",
+        "raise-missing-from",
+        "Python's exception chaining shows the traceback of the current exception, "
+        "but also of the original exception. When you raise a new exception after "
+        "another exception was caught it's likely that the second exception is a "
+        "friendly re-wrapping of the first exception. In such cases `raise from` "
+        "provides a better link between the two tracebacks in the final error.",
+    ),
+    "W0711": (
+        'Exception to catch is the result of a binary "%s" operation',
+        "binary-op-exception",
+        "Used when the exception to catch is of the form "
+        '"except A or B:".  If intending to catch multiple, '
+        'rewrite as "except (A, B):"',
+    ),
+    "W0715": (
+        "Exception arguments suggest string formatting might be intended",
+        "raising-format-tuple",
+        "Used when passing multiple arguments to an exception "
+        "constructor, the first of them a string literal containing what "
+        "appears to be placeholders intended for formatting",
+    ),
+    "W0716": (
+        "Invalid exception operation. %s",
+        "wrong-exception-operation",
+        "Used when an operation is done against an exception, but the operation "
+        "is not valid for the exception in question. Usually emitted when having "
+        "binary operations between exceptions in except handlers.",
+    ),
+    "W0719": (
+        "Raising too general exception: %s",
+        "broad-exception-raised",
+        "Raising exceptions that are too generic force you to catch exceptions "
+        "generically too. It will force you to use a naked ``except Exception:`` "
+        "clause. You might then end up catching exceptions other than the ones "
+        "you expect to catch. This can hide bugs or make it harder to debug programs "
+        "when unrelated errors are hidden.",
+    ),
+}


 class BaseVisitor:
     """Base class for visitors defined in this module."""

-    def __init__(self, checker: ExceptionsChecker, node: nodes.Raise) ->None:
+    def __init__(self, checker: ExceptionsChecker, node: nodes.Raise) -> None:
         self._checker = checker
         self._node = node

-    def visit_default(self, _: nodes.NodeNG) ->None:
+    def visit(self, node: SuccessfulInferenceResult) -> None:
+        name = node.__class__.__name__.lower()
+        dispatch_meth = getattr(self, "visit_" + name, None)
+        if dispatch_meth:
+            dispatch_meth(node)
+        else:
+            self.visit_default(node)
+
+    def visit_default(self, _: nodes.NodeNG) -> None:
         """Default implementation for all the nodes."""
-        pass


 class ExceptionRaiseRefVisitor(BaseVisitor):
     """Visit references (anything that is not an AST leaf)."""

+    def visit_name(self, node: nodes.Name) -> None:
+        if node.name == "NotImplemented":
+            self._checker.add_message(
+                "notimplemented-raised", node=self._node, confidence=HIGH
+            )
+            return
+        try:
+            exceptions = [
+                c
+                for _, c in _annotated_unpack_infer(node)
+                if isinstance(c, nodes.ClassDef)
+            ]
+        except astroid.InferenceError:
+            return
+
+        for exception in exceptions:
+            if self._checker._is_overgeneral_exception(exception):
+                self._checker.add_message(
+                    "broad-exception-raised",
+                    args=exception.name,
+                    node=self._node,
+                    confidence=INFERENCE,
+                )
+
+    def visit_call(self, node: nodes.Call) -> None:
+        if isinstance(node.func, nodes.Name):
+            self.visit_name(node.func)
+        if (
+            len(node.args) > 1
+            and isinstance(node.args[0], nodes.Const)
+            and isinstance(node.args[0].value, str)
+        ):
+            msg = node.args[0].value
+            if "%" in msg or ("{" in msg and "}" in msg):
+                self._checker.add_message(
+                    "raising-format-tuple", node=self._node, confidence=HIGH
+                )
+

 class ExceptionRaiseLeafVisitor(BaseVisitor):
     """Visitor for handling leaf kinds of a raise value."""
+
+    def visit_const(self, node: nodes.Const) -> None:
+        self._checker.add_message(
+            "raising-bad-type",
+            node=self._node,
+            args=node.value.__class__.__name__,
+            confidence=INFERENCE,
+        )
+
+    def visit_instance(self, instance: objects.ExceptionInstance) -> None:
+        cls = instance._proxied
+        self.visit_classdef(cls)
+
+    # Exception instances have a particular class type
     visit_exceptioninstance = visit_instance

+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        if not utils.inherit_from_std_ex(node) and utils.has_known_bases(node):
+            if node.newstyle:
+                self._checker.add_message(
+                    "raising-non-exception",
+                    node=self._node,
+                    confidence=INFERENCE,
+                )
+
+    def visit_tuple(self, _: nodes.Tuple) -> None:
+        self._checker.add_message(
+            "raising-bad-type",
+            node=self._node,
+            args="tuple",
+            confidence=INFERENCE,
+        )
+
+    def visit_default(self, node: nodes.NodeNG) -> None:
+        name = getattr(node, "name", node.__class__.__name__)
+        self._checker.add_message(
+            "raising-bad-type",
+            node=self._node,
+            args=name,
+            confidence=INFERENCE,
+        )
+

 class ExceptionsChecker(checkers.BaseChecker):
     """Exception related checks."""
-    name = 'exceptions'
+
+    name = "exceptions"
     msgs = MSGS
-    options = ('overgeneral-exceptions', {'default': (
-        'builtins.BaseException', 'builtins.Exception'), 'type': 'csv',
-        'metavar': '<comma-separated class names>', 'help':
-        'Exceptions that will emit a warning when caught.'}),
+    options = (
+        (
+            "overgeneral-exceptions",
+            {
+                "default": ("builtins.BaseException", "builtins.Exception"),
+                "type": "csv",
+                "metavar": "<comma-separated class names>",
+                "help": "Exceptions that will emit a warning when caught.",
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        self._builtin_exceptions = _builtin_exceptions()
+        super().open()
+
+    @utils.only_required_for_messages(
+        "misplaced-bare-raise",
+        "raising-bad-type",
+        "raising-non-exception",
+        "notimplemented-raised",
+        "bad-exception-cause",
+        "raising-format-tuple",
+        "raise-missing-from",
+        "broad-exception-raised",
+    )
+    def visit_raise(self, node: nodes.Raise) -> None:
+        if node.exc is None:
+            self._check_misplaced_bare_raise(node)
+            return
+
+        if node.cause is None:
+            self._check_raise_missing_from(node)
+        else:
+            self._check_bad_exception_cause(node)
+
+        expr = node.exc
+        ExceptionRaiseRefVisitor(self, node).visit(expr)
+
+        inferred = utils.safe_infer(expr)
+        if inferred is None or isinstance(inferred, util.UninferableBase):
+            return
+        ExceptionRaiseLeafVisitor(self, node).visit(inferred)

-    def _check_bad_exception_cause(self, node: nodes.Raise) ->None:
+    def _check_misplaced_bare_raise(self, node: nodes.Raise) -> None:
+        # Filter out if it's present in __exit__.
+        scope = node.scope()
+        if (
+            isinstance(scope, nodes.FunctionDef)
+            and scope.is_method()
+            and scope.name == "__exit__"
+        ):
+            return
+
+        current = node
+        # Stop when a new scope is generated or when the raise
+        # statement is found inside a Try.
+        ignores = (nodes.ExceptHandler, nodes.FunctionDef)
+        while current and not isinstance(current.parent, ignores):
+            current = current.parent
+
+        expected = (nodes.ExceptHandler,)
+        if not current or not isinstance(current.parent, expected):
+            self.add_message("misplaced-bare-raise", node=node, confidence=HIGH)
+
+    def _check_bad_exception_cause(self, node: nodes.Raise) -> None:
         """Verify that the exception cause is properly set.

         An exception cause can be only `None` or an exception.
         """
-        pass
+        cause = utils.safe_infer(node.cause)
+        if cause is None or isinstance(cause, util.UninferableBase):
+            return
+
+        if isinstance(cause, nodes.Const):
+            if cause.value is not None:
+                self.add_message("bad-exception-cause", node=node, confidence=INFERENCE)
+        elif not isinstance(cause, nodes.ClassDef) and not utils.inherit_from_std_ex(
+            cause
+        ):
+            self.add_message("bad-exception-cause", node=node, confidence=INFERENCE)
+
+    def _check_raise_missing_from(self, node: nodes.Raise) -> None:
+        if node.exc is None:
+            # This is a plain `raise`, raising the previously-caught exception. No need for a
+            # cause.
+            return
+        # We'd like to check whether we're inside an `except` clause:
+        containing_except_node = utils.find_except_wrapper_node_in_scope(node)
+        if not containing_except_node:
+            return
+        # We found a surrounding `except`! We're almost done proving there's a
+        # `raise-missing-from` here. The only thing we need to protect against is that maybe
+        # the `raise` is raising the exception that was caught, possibly with some shenanigans
+        # like `exc.with_traceback(whatever)`. We won't analyze these, we'll just assume
+        # there's a violation on two simple cases: `raise SomeException(whatever)` and `raise
+        # SomeException`.
+        if containing_except_node.name is None:
+            # The `except` doesn't have an `as exception:` part, meaning there's no way that
+            # the `raise` is raising the same exception.
+            class_of_old_error = "Exception"
+            if isinstance(containing_except_node.type, (nodes.Name, nodes.Tuple)):
+                # 'except ZeroDivisionError' or 'except (ZeroDivisionError, ValueError)'
+                class_of_old_error = containing_except_node.type.as_string()
+            self.add_message(
+                "raise-missing-from",
+                node=node,
+                args=(
+                    f"'except {class_of_old_error} as exc' and ",
+                    node.as_string(),
+                    "exc",
+                ),
+                confidence=HIGH,
+            )
+        elif (
+            isinstance(node.exc, nodes.Call)
+            and isinstance(node.exc.func, nodes.Name)
+            or isinstance(node.exc, nodes.Name)
+            and node.exc.name != containing_except_node.name.name
+        ):
+            # We have a `raise SomeException(whatever)` or a `raise SomeException`
+            self.add_message(
+                "raise-missing-from",
+                node=node,
+                args=("", node.as_string(), containing_except_node.name.name),
+                confidence=HIGH,
+            )
+
+    def _check_catching_non_exception(
+        self,
+        handler: nodes.ExceptHandler,
+        exc: SuccessfulInferenceResult,
+        part: nodes.NodeNG,
+    ) -> None:
+        if isinstance(exc, nodes.Tuple):
+            # Check if it is a tuple of exceptions.
+            inferred = [utils.safe_infer(elt) for elt in exc.elts]
+            if any(isinstance(node, util.UninferableBase) for node in inferred):
+                # Don't emit if we don't know every component.
+                return
+            if all(
+                node
+                and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
+                for node in inferred
+            ):
+                return
+
+        if not isinstance(exc, nodes.ClassDef):
+            # Don't emit the warning if the inferred stmt
+            # is None, but the exception handler is something else,
+            # maybe it was redefined.
+            if isinstance(exc, nodes.Const) and exc.value is None:
+                if (
+                    isinstance(handler.type, nodes.Const) and handler.type.value is None
+                ) or handler.type.parent_of(exc):
+                    # If the exception handler catches None or
+                    # the exception component, which is None, is
+                    # defined by the entire exception handler, then
+                    # emit a warning.
+                    self.add_message(
+                        "catching-non-exception",
+                        node=handler.type,
+                        args=(part.as_string(),),
+                    )
+            else:
+                self.add_message(
+                    "catching-non-exception",
+                    node=handler.type,
+                    args=(part.as_string(),),
+                )
+            return
+
+        if (
+            not utils.inherit_from_std_ex(exc)
+            and exc.name not in self._builtin_exceptions
+        ):
+            if utils.has_known_bases(exc):
+                self.add_message(
+                    "catching-non-exception", node=handler.type, args=(exc.name,)
+                )
+
+    def _check_try_except_raise(self, node: nodes.Try) -> None:
+        def gather_exceptions_from_handler(
+            handler: nodes.ExceptHandler,
+        ) -> list[InferenceResult] | None:
+            exceptions: list[InferenceResult] = []
+            if handler.type:
+                exceptions_in_handler = utils.safe_infer(handler.type)
+                if isinstance(exceptions_in_handler, nodes.Tuple):
+                    exceptions = list(
+                        {
+                            exception
+                            for exception in exceptions_in_handler.elts
+                            if isinstance(exception, (nodes.Name, nodes.Attribute))
+                        }
+                    )
+                elif exceptions_in_handler:
+                    exceptions = [exceptions_in_handler]
+                else:
+                    # Break when we cannot infer anything reliably.
+                    return None
+            return exceptions
+
+        bare_raise = False
+        handler_having_bare_raise = None
+        exceptions_in_bare_handler: list[InferenceResult] | None = []
+        for handler in node.handlers:
+            if bare_raise:
+                # check that subsequent handler is not parent of handler which had bare raise.
+                # since utils.safe_infer can fail for bare except, check it before.
+                # also break early if bare except is followed by bare except.
+
+                excs_in_current_handler = gather_exceptions_from_handler(handler)
+                if not excs_in_current_handler:
+                    break
+                if exceptions_in_bare_handler is None:
+                    # It can be `None` when the inference failed
+                    break
+                for exc_in_current_handler in excs_in_current_handler:
+                    inferred_current = utils.safe_infer(exc_in_current_handler)
+                    if any(
+                        utils.is_subclass_of(utils.safe_infer(e), inferred_current)
+                        for e in exceptions_in_bare_handler
+                    ):
+                        bare_raise = False
+                        break
+
+            # `raise` as the first operator inside the except handler
+            if _is_raising([handler.body[0]]):
+                # flags when there is a bare raise
+                if handler.body[0].exc is None:
+                    bare_raise = True
+                    handler_having_bare_raise = handler
+                    exceptions_in_bare_handler = gather_exceptions_from_handler(handler)
+        else:
+            if bare_raise:
+                self.add_message("try-except-raise", node=handler_having_bare_raise)
+
+    @utils.only_required_for_messages("wrong-exception-operation")
+    def visit_binop(self, node: nodes.BinOp) -> None:
+        if isinstance(node.parent, nodes.ExceptHandler):
+            both_sides_tuple_or_uninferable = isinstance(
+                utils.safe_infer(node.left), (nodes.Tuple, util.UninferableBase)
+            ) and isinstance(
+                utils.safe_infer(node.right), (nodes.Tuple, util.UninferableBase)
+            )
+            # Tuple concatenation allowed
+            if both_sides_tuple_or_uninferable:
+                if node.op == "+":
+                    return
+                suggestion = f"Did you mean '({node.left.as_string()} + {node.right.as_string()})' instead?"
+            # except (V | A)
+            else:
+                suggestion = f"Did you mean '({node.left.as_string()}, {node.right.as_string()})' instead?"
+            self.add_message("wrong-exception-operation", node=node, args=(suggestion,))

-    @utils.only_required_for_messages('bare-except',
-        'broad-exception-caught', 'try-except-raise', 'binary-op-exception',
-        'bad-except-order', 'catching-non-exception', 'duplicate-except')
-    def visit_trystar(self, node: nodes.TryStar) ->None:
+    @utils.only_required_for_messages("wrong-exception-operation")
+    def visit_compare(self, node: nodes.Compare) -> None:
+        if isinstance(node.parent, nodes.ExceptHandler):
+            # except (V < A)
+            suggestion = (
+                f"Did you mean '({node.left.as_string()}, "
+                f"{', '.join(o.as_string() for _, o in node.ops)})' instead?"
+            )
+            self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
+
+    @utils.only_required_for_messages(
+        "bare-except",
+        "broad-exception-caught",
+        "try-except-raise",
+        "binary-op-exception",
+        "bad-except-order",
+        "catching-non-exception",
+        "duplicate-except",
+    )
+    def visit_trystar(self, node: nodes.TryStar) -> None:
         """Check for empty except*."""
-        pass
+        self.visit_try(node)

-    def visit_try(self, node: nodes.Try) ->None:
+    def visit_try(self, node: nodes.Try) -> None:
         """Check for empty except."""
-        pass
+        self._check_try_except_raise(node)
+        exceptions_classes: list[Any] = []
+        nb_handlers = len(node.handlers)
+        for index, handler in enumerate(node.handlers):
+            if handler.type is None:
+                if not _is_raising(handler.body):
+                    self.add_message("bare-except", node=handler, confidence=HIGH)
+
+                # check if an "except:" is followed by some other
+                # except
+                if index < (nb_handlers - 1):
+                    msg = "empty except clause should always appear last"
+                    self.add_message(
+                        "bad-except-order", node=node, args=msg, confidence=HIGH
+                    )
+
+            elif isinstance(handler.type, nodes.BoolOp):
+                self.add_message(
+                    "binary-op-exception",
+                    node=handler,
+                    args=handler.type.op,
+                    confidence=HIGH,
+                )
+            else:
+                try:
+                    exceptions = list(_annotated_unpack_infer(handler.type))
+                except astroid.InferenceError:
+                    continue
+
+                for part, exception in exceptions:
+                    if isinstance(
+                        exception, astroid.Instance
+                    ) and utils.inherit_from_std_ex(exception):
+                        exception = exception._proxied
+
+                    self._check_catching_non_exception(handler, exception, part)
+
+                    if not isinstance(exception, nodes.ClassDef):
+                        continue
+
+                    exc_ancestors = [
+                        anc
+                        for anc in exception.ancestors()
+                        if isinstance(anc, nodes.ClassDef)
+                    ]
+
+                    for previous_exc in exceptions_classes:
+                        if previous_exc in exc_ancestors:
+                            msg = f"{previous_exc.name} is an ancestor class of {exception.name}"
+                            self.add_message(
+                                "bad-except-order",
+                                node=handler.type,
+                                args=msg,
+                                confidence=INFERENCE,
+                            )
+                    if self._is_overgeneral_exception(exception) and not _is_raising(
+                        handler.body
+                    ):
+                        self.add_message(
+                            "broad-exception-caught",
+                            args=exception.name,
+                            node=handler.type,
+                            confidence=INFERENCE,
+                        )
+
+                    if exception in exceptions_classes:
+                        self.add_message(
+                            "duplicate-except",
+                            args=exception.name,
+                            node=handler.type,
+                            confidence=INFERENCE,
+                        )
+
+                exceptions_classes += [exc for _, exc in exceptions]
+
+    def _is_overgeneral_exception(self, exception: nodes.ClassDef) -> bool:
+        return exception.qname() in self.linter.config.overgeneral_exceptions
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ExceptionsChecker(linter))
diff --git a/pylint/checkers/format.py b/pylint/checkers/format.py
index 1b31de075..f8aecbda6 100644
--- a/pylint/checkers/format.py
+++ b/pylint/checkers/format.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Python code format's checker.

 By default, try to follow Guido's style guide :
@@ -6,58 +10,141 @@ https://www.python.org/doc/essays/styleguide/

 Some parts of the process_token method is based from The Tab Nanny std module.
 """
+
 from __future__ import annotations
+
 import tokenize
 from functools import reduce
 from re import Match
 from typing import TYPE_CHECKING, Literal
+
 from astroid import nodes
+
 from pylint.checkers import BaseRawFileChecker, BaseTokenChecker
 from pylint.checkers.utils import only_required_for_messages
 from pylint.constants import WarningScope
 from pylint.interfaces import HIGH
 from pylint.typing import MessageDefinitionTuple
 from pylint.utils.pragma_parser import OPTION_PO, PragmaParserError, parse_pragma
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-_KEYWORD_TOKENS = {'assert', 'del', 'elif', 'except', 'for', 'if', 'in',
-    'not', 'raise', 'return', 'while', 'yield', 'with', '=', ':='}
+
+
+_KEYWORD_TOKENS = {
+    "assert",
+    "del",
+    "elif",
+    "except",
+    "for",
+    "if",
+    "in",
+    "not",
+    "raise",
+    "return",
+    "while",
+    "yield",
+    "with",
+    "=",
+    ":=",
+}
 _JUNK_TOKENS = {tokenize.COMMENT, tokenize.NL}
-MSGS: dict[str, MessageDefinitionTuple] = {'C0301': (
-    'Line too long (%s/%s)', 'line-too-long',
-    'Used when a line is longer than a given number of characters.'),
-    'C0302': ('Too many lines in module (%s/%s)', 'too-many-lines',
-    'Used when a module has too many lines, reducing its readability.'),
-    'C0303': ('Trailing whitespace', 'trailing-whitespace',
-    'Used when there is whitespace between the end of a line and the newline.'
-    ), 'C0304': ('Final newline missing', 'missing-final-newline',
-    'Used when the last line in a file is missing a newline.'), 'C0305': (
-    'Trailing newlines', 'trailing-newlines',
-    'Used when there are trailing blank lines in a file.'), 'W0311': (
-    'Bad indentation. Found %s %s, expected %s', 'bad-indentation',
-    "Used when an unexpected number of indentation's tabulations or spaces has been found."
-    ), 'W0301': ('Unnecessary semicolon', 'unnecessary-semicolon',
-    'Used when a statement is ended by a semi-colon (";"), which isn\'t necessary (that\'s python, not C ;).'
-    ), 'C0321': ('More than one statement on a single line',
-    'multiple-statements',
-    'Used when more than on statement are found on the same line.', {
-    'scope': WarningScope.NODE}), 'C0325': (
-    'Unnecessary parens after %r keyword', 'superfluous-parens',
-    'Used when a single item in parentheses follows an if, for, or other keyword.'
-    ), 'C0327': ('Mixed line endings LF and CRLF', 'mixed-line-endings',
-    'Used when there are mixed (LF and CRLF) newline signs in a file.'),
-    'C0328': (
-    "Unexpected line ending format. There is '%s' while it should be '%s'.",
-    'unexpected-line-ending-format',
-    'Used when there is different newline than expected.')}
+
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "C0301": (
+        "Line too long (%s/%s)",
+        "line-too-long",
+        "Used when a line is longer than a given number of characters.",
+    ),
+    "C0302": (
+        "Too many lines in module (%s/%s)",  # was W0302
+        "too-many-lines",
+        "Used when a module has too many lines, reducing its readability.",
+    ),
+    "C0303": (
+        "Trailing whitespace",
+        "trailing-whitespace",
+        "Used when there is whitespace between the end of a line and the newline.",
+    ),
+    "C0304": (
+        "Final newline missing",
+        "missing-final-newline",
+        "Used when the last line in a file is missing a newline.",
+    ),
+    "C0305": (
+        "Trailing newlines",
+        "trailing-newlines",
+        "Used when there are trailing blank lines in a file.",
+    ),
+    "W0311": (
+        "Bad indentation. Found %s %s, expected %s",
+        "bad-indentation",
+        "Used when an unexpected number of indentation's tabulations or "
+        "spaces has been found.",
+    ),
+    "W0301": (
+        "Unnecessary semicolon",  # was W0106
+        "unnecessary-semicolon",
+        'Used when a statement is ended by a semi-colon (";"), which '
+        "isn't necessary (that's python, not C ;).",
+    ),
+    "C0321": (
+        "More than one statement on a single line",
+        "multiple-statements",
+        "Used when more than on statement are found on the same line.",
+        {"scope": WarningScope.NODE},
+    ),
+    "C0325": (
+        "Unnecessary parens after %r keyword",
+        "superfluous-parens",
+        "Used when a single item in parentheses follows an if, for, or "
+        "other keyword.",
+    ),
+    "C0327": (
+        "Mixed line endings LF and CRLF",
+        "mixed-line-endings",
+        "Used when there are mixed (LF and CRLF) newline signs in a file.",
+    ),
+    "C0328": (
+        "Unexpected line ending format. There is '%s' while it should be '%s'.",
+        "unexpected-line-ending-format",
+        "Used when there is different newline than expected.",
+    ),
+}
+
+
+def _last_token_on_line_is(tokens: TokenWrapper, line_end: int, token: str) -> bool:
+    return (
+        line_end > 0
+        and tokens.token(line_end - 1) == token
+        or line_end > 1
+        and tokens.token(line_end - 2) == token
+        and tokens.type(line_end - 1) == tokenize.COMMENT
+    )


 class TokenWrapper:
     """A wrapper for readable access to token information."""

-    def __init__(self, tokens: list[tokenize.TokenInfo]) ->None:
+    def __init__(self, tokens: list[tokenize.TokenInfo]) -> None:
         self._tokens = tokens

+    def token(self, idx: int) -> str:
+        return self._tokens[idx][1]
+
+    def type(self, idx: int) -> int:
+        return self._tokens[idx][0]
+
+    def start_line(self, idx: int) -> int:
+        return self._tokens[idx][2][0]
+
+    def start_col(self, idx: int) -> int:
+        return self._tokens[idx][2][1]
+
+    def line(self, idx: int) -> str:
+        return self._tokens[idx][4]
+

 class FormatChecker(BaseTokenChecker, BaseRawFileChecker):
     """Formatting checker.
@@ -67,45 +154,126 @@ class FormatChecker(BaseTokenChecker, BaseRawFileChecker):
     * strict indentation
     * line length
     """
-    name = 'format'
+
+    # configuration section name
+    name = "format"
+    # messages
     msgs = MSGS
-    options = ('max-line-length', {'default': 100, 'type': 'int', 'metavar':
-        '<int>', 'help': 'Maximum number of characters on a single line.'}), (
-        'ignore-long-lines', {'type': 'regexp', 'metavar': '<regexp>',
-        'default': '^\\s*(# )?<?https?://\\S+>?$', 'help':
-        'Regexp for a line that is allowed to be longer than the limit.'}), (
-        'single-line-if-stmt', {'default': False, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Allow the body of an if to be on the same line as the test if there is no else.'
-        }), ('single-line-class-stmt', {'default': False, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Allow the body of a class to be on the same line as the declaration if body contains single statement.'
-        }), ('max-module-lines', {'default': 1000, 'type': 'int', 'metavar':
-        '<int>', 'help': 'Maximum number of lines in a module.'}), (
-        'indent-string', {'default': '    ', 'type': 'non_empty_string',
-        'metavar': '<string>', 'help':
-        'String used as indentation unit. This is usually "    " (4 spaces) or "\\t" (1 tab).'
-        }), ('indent-after-paren', {'type': 'int', 'metavar': '<int>',
-        'default': 4, 'help':
-        'Number of spaces of indent required inside a hanging or continued line.'
-        }), ('expected-line-ending-format', {'type': 'choice', 'metavar':
-        '<empty or LF or CRLF>', 'default': '', 'choices': ['', 'LF',
-        'CRLF'], 'help':
-        'Expected format of line ending, e.g. empty (any line ending), LF or CRLF.'
-        })
-
-    def __init__(self, linter: PyLinter) ->None:
+    # configuration options
+    # for available dict keys/values see the optik parser 'add_option' method
+    options = (
+        (
+            "max-line-length",
+            {
+                "default": 100,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of characters on a single line.",
+            },
+        ),
+        (
+            "ignore-long-lines",
+            {
+                "type": "regexp",
+                "metavar": "<regexp>",
+                "default": r"^\s*(# )?<?https?://\S+>?$",
+                "help": (
+                    "Regexp for a line that is allowed to be longer than the limit."
+                ),
+            },
+        ),
+        (
+            "single-line-if-stmt",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": (
+                    "Allow the body of an if to be on the same "
+                    "line as the test if there is no else."
+                ),
+            },
+        ),
+        (
+            "single-line-class-stmt",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": (
+                    "Allow the body of a class to be on the same "
+                    "line as the declaration if body contains "
+                    "single statement."
+                ),
+            },
+        ),
+        (
+            "max-module-lines",
+            {
+                "default": 1000,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of lines in a module.",
+            },
+        ),
+        (
+            "indent-string",
+            {
+                "default": "    ",
+                "type": "non_empty_string",
+                "metavar": "<string>",
+                "help": "String used as indentation unit. This is usually "
+                '"    " (4 spaces) or "\\t" (1 tab).',
+            },
+        ),
+        (
+            "indent-after-paren",
+            {
+                "type": "int",
+                "metavar": "<int>",
+                "default": 4,
+                "help": "Number of spaces of indent required inside a hanging "
+                "or continued line.",
+            },
+        ),
+        (
+            "expected-line-ending-format",
+            {
+                "type": "choice",
+                "metavar": "<empty or LF or CRLF>",
+                "default": "",
+                "choices": ["", "LF", "CRLF"],
+                "help": (
+                    "Expected format of line ending, "
+                    "e.g. empty (any line ending), LF or CRLF."
+                ),
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._lines: dict[int, str] = {}
         self._visited_lines: dict[int, Literal[1, 2]] = {}

-    def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int
-        ) ->None:
+    def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int) -> None:
         """A new line has been encountered, process it if necessary."""
+        if _last_token_on_line_is(tokens, line_end, ";"):
+            self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
+
+        line_num = tokens.start_line(line_start)
+        line = tokens.line(line_start)
+        if tokens.type(line_start) not in _JUNK_TOKENS:
+            self._lines[line_num] = line.split("\n")[0]
+        self.check_lines(tokens, line_start, line, line_num)
+
+    def process_module(self, node: nodes.Module) -> None:
         pass

-    def _check_keyword_parentheses(self, tokens: list[tokenize.TokenInfo],
-        start: int) ->None:
+    # pylint: disable-next = too-many-return-statements, too-many-branches
+    def _check_keyword_parentheses(
+        self, tokens: list[tokenize.TokenInfo], start: int
+    ) -> None:
         """Check that there are not unnecessary parentheses after a keyword.

         Parens are unnecessary if there is exactly one balanced outer pair on a
@@ -115,55 +283,346 @@ class FormatChecker(BaseTokenChecker, BaseRawFileChecker):
         tokens: The entire list of Tokens.
         start: The position of the keyword in the token list.
         """
-        pass
+        # If the next token is not a paren, we're fine.
+        if tokens[start + 1].string != "(":
+            return
+        if (
+            tokens[start].string == "not"
+            and start > 0
+            and tokens[start - 1].string == "is"
+        ):
+            # If this is part of an `is not` expression, we have a binary operator
+            # so the parentheses are not necessarily redundant.
+            return
+        found_and_or = False
+        contains_walrus_operator = False
+        walrus_operator_depth = 0
+        contains_double_parens = 0
+        depth = 0
+        keyword_token = str(tokens[start].string)
+        line_num = tokens[start].start[0]
+        for i in range(start, len(tokens) - 1):
+            token = tokens[i]
+
+            # If we hit a newline, then assume any parens were for continuation.
+            if token.type == tokenize.NL:
+                return
+            # Since the walrus operator doesn't exist below python3.8, the tokenizer
+            # generates independent tokens
+            if (
+                token.string == ":="  # <-- python3.8+ path
+                or token.string + tokens[i + 1].string == ":="
+            ):
+                contains_walrus_operator = True
+                walrus_operator_depth = depth
+            if token.string == "(":
+                depth += 1
+                if tokens[i + 1].string == "(":
+                    contains_double_parens = 1
+            elif token.string == ")":
+                depth -= 1
+                if depth:
+                    if contains_double_parens and tokens[i + 1].string == ")":
+                        # For walrus operators in `if (not)` conditions and comprehensions
+                        if keyword_token in {"in", "if", "not"}:
+                            continue
+                        return
+                    contains_double_parens -= 1
+                    continue
+                # ')' can't happen after if (foo), since it would be a syntax error.
+                if tokens[i + 1].string in {":", ")", "]", "}", "in"} or tokens[
+                    i + 1
+                ].type in {tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT}:
+                    if contains_walrus_operator and walrus_operator_depth - 1 == depth:
+                        return
+                    # The empty tuple () is always accepted.
+                    if i == start + 2:
+                        return
+                    if found_and_or:
+                        return
+                    if keyword_token == "in":
+                        # This special case was added in https://github.com/pylint-dev/pylint/pull/4948
+                        # but it could be removed in the future. Avoid churn for now.
+                        return
+                    self.add_message(
+                        "superfluous-parens", line=line_num, args=keyword_token
+                    )
+                return
+            elif depth == 1:
+                # This is a tuple, which is always acceptable.
+                if token[1] == ",":
+                    return
+                # 'and' and 'or' are the only boolean operators with lower precedence
+                # than 'not', so parens are only required when they are found.
+                if token[1] in {"and", "or"}:
+                    found_and_or = True
+                # A yield inside an expression must always be in parentheses,
+                # quit early without error.
+                elif token[1] == "yield":
+                    return
+                # A generator expression always has a 'for' token in it, and
+                # the 'for' token is only legal inside parens when it is in a
+                # generator expression.  The parens are necessary here, so bail
+                # without an error.
+                elif token[1] == "for":
+                    return
+                # A generator expression can have an 'else' token in it.
+                # We check the rest of the tokens to see if any problems occur after
+                # the 'else'.
+                elif token[1] == "else":
+                    if "(" in (i.string for i in tokens[i:]):
+                        self._check_keyword_parentheses(tokens[i:], 0)
+                    return

-    def process_tokens(self, tokens: list[tokenize.TokenInfo]) ->None:
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
         """Process tokens and search for:

         - too long lines (i.e. longer than <max_chars>)
         - optionally bad construct (if given, bad_construct must be a compiled
           regular expression).
         """
-        pass
+        indents = [0]
+        check_equal = False
+        line_num = 0
+        self._lines = {}
+        self._visited_lines = {}
+        self._last_line_ending: str | None = None
+        last_blank_line_num = 0
+        for idx, (tok_type, string, start, _, line) in enumerate(tokens):
+            if start[0] != line_num:
+                line_num = start[0]
+                # A tokenizer oddity: if an indented line contains a multi-line
+                # docstring, the line member of the INDENT token does not contain
+                # the full line; therefore we check the next token on the line.
+                if tok_type == tokenize.INDENT:
+                    self.new_line(TokenWrapper(tokens), idx - 1, idx + 1)
+                else:
+                    self.new_line(TokenWrapper(tokens), idx - 1, idx)
+
+            if tok_type == tokenize.NEWLINE:
+                # a program statement, or ENDMARKER, will eventually follow,
+                # after some (possibly empty) run of tokens of the form
+                #     (NL | COMMENT)* (INDENT | DEDENT+)?
+                # If an INDENT appears, setting check_equal is wrong, and will
+                # be undone when we see the INDENT.
+                check_equal = True
+                self._check_line_ending(string, line_num)
+            elif tok_type == tokenize.INDENT:
+                check_equal = False
+                self.check_indent_level(string, indents[-1] + 1, line_num)
+                indents.append(indents[-1] + 1)
+            elif tok_type == tokenize.DEDENT:
+                # there's nothing we need to check here!  what's important is
+                # that when the run of DEDENTs ends, the indentation of the
+                # program statement (or ENDMARKER) that triggered the run is
+                # equal to what's left at the top of the indents stack
+                check_equal = True
+                if len(indents) > 1:
+                    del indents[-1]
+            elif tok_type == tokenize.NL:
+                if not line.strip("\r\n"):
+                    last_blank_line_num = line_num
+            elif tok_type not in (tokenize.COMMENT, tokenize.ENCODING):
+                # This is the first concrete token following a NEWLINE, so it
+                # must be the first token of the next program statement, or an
+                # ENDMARKER; the "line" argument exposes the leading white-space
+                # for this statement; in the case of ENDMARKER, line is an empty
+                # string, so will properly match the empty string with which the
+                # "indents" stack was seeded
+                if check_equal:
+                    check_equal = False
+                    self.check_indent_level(line, indents[-1], line_num)
+
+            if tok_type == tokenize.NUMBER and string.endswith("l"):
+                self.add_message("lowercase-l-suffix", line=line_num)
+
+            if string in _KEYWORD_TOKENS:
+                self._check_keyword_parentheses(tokens, idx)

-    @only_required_for_messages('multiple-statements')
-    def visit_default(self, node: nodes.NodeNG) ->None:
+        line_num -= 1  # to be ok with "wc -l"
+        if line_num > self.linter.config.max_module_lines:
+            # Get the line where the too-many-lines (or its message id)
+            # was disabled or default to 1.
+            message_definition = self.linter.msgs_store.get_message_definitions(
+                "too-many-lines"
+            )[0]
+            names = (message_definition.msgid, "too-many-lines")
+            lineno = next(
+                filter(None, (self.linter._pragma_lineno.get(name) for name in names)),
+                1,
+            )
+            self.add_message(
+                "too-many-lines",
+                args=(line_num, self.linter.config.max_module_lines),
+                line=lineno,
+            )
+
+        # See if there are any trailing lines.  Do not complain about empty
+        # files like __init__.py markers.
+        if line_num == last_blank_line_num and line_num > 0:
+            self.add_message("trailing-newlines", line=line_num)
+
+    def _check_line_ending(self, line_ending: str, line_num: int) -> None:
+        # check if line endings are mixed
+        if self._last_line_ending is not None:
+            # line_ending == "" indicates a synthetic newline added at
+            # the end of a file that does not, in fact, end with a
+            # newline.
+            if line_ending and line_ending != self._last_line_ending:
+                self.add_message("mixed-line-endings", line=line_num)
+
+        self._last_line_ending = line_ending
+
+        # check if line ending is as expected
+        expected = self.linter.config.expected_line_ending_format
+        if expected:
+            # reduce multiple \n\n\n\n to one \n
+            line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
+            line_ending = "LF" if line_ending == "\n" else "CRLF"
+            if line_ending != expected:
+                self.add_message(
+                    "unexpected-line-ending-format",
+                    args=(line_ending, expected),
+                    line=line_num,
+                )
+
+    @only_required_for_messages("multiple-statements")
+    def visit_default(self, node: nodes.NodeNG) -> None:
         """Check the node line number and check it if not yet done."""
-        pass
+        if not node.is_statement:
+            return
+        if not node.root().pure_python:
+            return
+        prev_sibl = node.previous_sibling()
+        if prev_sibl is not None:
+            prev_line = prev_sibl.fromlineno
+        elif isinstance(node.parent, nodes.Module):
+            prev_line = 0
+        else:
+            prev_line = node.parent.statement().fromlineno
+        line = node.fromlineno
+        assert line, node
+        if prev_line == line and self._visited_lines.get(line) != 2:
+            self._check_multi_statement_line(node, line)
+            return
+        if line in self._visited_lines:
+            return
+        try:
+            tolineno = node.blockstart_tolineno
+        except AttributeError:
+            tolineno = node.tolineno
+        assert tolineno, node
+        lines: list[str] = []
+        for line in range(line, tolineno + 1):  # noqa: B020
+            self._visited_lines[line] = 1
+            try:
+                lines.append(self._lines[line].rstrip())
+            except KeyError:
+                lines.append("")

-    def _check_multi_statement_line(self, node: nodes.NodeNG, line: int
-        ) ->None:
+    def _check_multi_statement_line(self, node: nodes.NodeNG, line: int) -> None:
         """Check for lines containing multiple statements."""
-        pass
+        if isinstance(node, nodes.With):
+            # Do not warn about multiple nested context managers in with statements.
+            return
+        if (
+            isinstance(node.parent, nodes.If)
+            and not node.parent.orelse
+            and self.linter.config.single_line_if_stmt
+        ):
+            return
+        if (
+            isinstance(node.parent, nodes.ClassDef)
+            and len(node.parent.body) == 1
+            and self.linter.config.single_line_class_stmt
+        ):
+            return

-    def check_trailing_whitespace_ending(self, line: str, i: int) ->None:
+        # Functions stubs and class with ``Ellipsis`` as body are exempted.
+        if (
+            isinstance(node, nodes.Expr)
+            and isinstance(node.parent, (nodes.FunctionDef, nodes.ClassDef))
+            and isinstance(node.value, nodes.Const)
+            and node.value.value is Ellipsis
+        ):
+            return
+
+        self.add_message("multiple-statements", node=node, confidence=HIGH)
+        self._visited_lines[line] = 2
+
+    def check_trailing_whitespace_ending(self, line: str, i: int) -> None:
         """Check that there is no trailing white-space."""
-        pass
+        # exclude \f (formfeed) from the rstrip
+        stripped_line = line.rstrip("\t\n\r\v ")
+        if line[len(stripped_line) :] not in ("\n", "\r\n"):
+            self.add_message(
+                "trailing-whitespace",
+                line=i,
+                col_offset=len(stripped_line),
+                confidence=HIGH,
+            )

-    def check_line_length(self, line: str, i: int, checker_off: bool) ->None:
+    def check_line_length(self, line: str, i: int, checker_off: bool) -> None:
         """Check that the line length is less than the authorized value."""
-        pass
+        max_chars = self.linter.config.max_line_length
+        ignore_long_line = self.linter.config.ignore_long_lines
+        line = line.rstrip()
+        if len(line) > max_chars and not ignore_long_line.search(line):
+            if checker_off:
+                self.linter.add_ignored_message("line-too-long", i)
+            else:
+                self.add_message("line-too-long", line=i, args=(len(line), max_chars))

     @staticmethod
-    def remove_pylint_option_from_lines(options_pattern_obj: Match[str]) ->str:
+    def remove_pylint_option_from_lines(options_pattern_obj: Match[str]) -> str:
         """Remove the `# pylint ...` pattern from lines."""
-        pass
+        lines = options_pattern_obj.string
+        purged_lines = (
+            lines[: options_pattern_obj.start(1)].rstrip()
+            + lines[options_pattern_obj.end(1) :]
+        )
+        return purged_lines

     @staticmethod
-    def is_line_length_check_activated(pylint_pattern_match_object: Match[str]
-        ) ->bool:
+    def is_line_length_check_activated(pylint_pattern_match_object: Match[str]) -> bool:
         """Return True if the line length check is activated."""
-        pass
+        try:
+            for pragma in parse_pragma(pylint_pattern_match_object.group(2)):
+                if pragma.action == "disable" and "line-too-long" in pragma.messages:
+                    return False
+        except PragmaParserError:
+            # Printing useful information dealing with this error is done in the lint package
+            pass
+        return True

     @staticmethod
-    def specific_splitlines(lines: str) ->list[str]:
+    def specific_splitlines(lines: str) -> list[str]:
         """Split lines according to universal newlines except those in a specific
         sets.
         """
-        pass
+        unsplit_ends = {
+            "\x0b",  # synonym of \v
+            "\x0c",  # synonym of \f
+            "\x1c",
+            "\x1d",
+            "\x1e",
+            "\x85",
+            "\u2028",
+            "\u2029",
+        }
+        res: list[str] = []
+        buffer = ""
+        for atomic_line in lines.splitlines(True):
+            if atomic_line[-1] not in unsplit_ends:
+                res.append(buffer + atomic_line)
+                buffer = ""
+            else:
+                buffer += atomic_line
+        return res

-    def check_lines(self, tokens: TokenWrapper, line_start: int, lines: str,
-        lineno: int) ->None:
+    def check_lines(
+        self, tokens: TokenWrapper, line_start: int, lines: str, lineno: int
+    ) -> None:
         """Check given lines for potential messages.

         Check if lines have:
@@ -171,9 +630,76 @@ class FormatChecker(BaseTokenChecker, BaseRawFileChecker):
         - no trailing white-space
         - less than a maximum number of characters
         """
-        pass
+        # we're first going to do a rough check whether any lines in this set
+        # go over the line limit. If none of them do, then we don't need to
+        # parse out the pylint options later on and can just assume that these
+        # lines are clean
+
+        # we'll also handle the line ending check here to avoid double-iteration
+        # unless the line lengths are suspect
+
+        max_chars = self.linter.config.max_line_length
+
+        split_lines = self.specific_splitlines(lines)
+
+        for offset, line in enumerate(split_lines):
+            if not line.endswith("\n"):
+                self.add_message("missing-final-newline", line=lineno + offset)
+                continue
+            # We don't test for trailing whitespaces in strings
+            # See https://github.com/pylint-dev/pylint/issues/6936
+            # and https://github.com/pylint-dev/pylint/issues/3822
+            if tokens.type(line_start) != tokenize.STRING:
+                self.check_trailing_whitespace_ending(line, lineno + offset)
+
+        # This check is purposefully simple and doesn't rstrip since this is running
+        # on every line you're checking it's advantageous to avoid doing a lot of work
+        potential_line_length_warning = any(
+            len(line) > max_chars for line in split_lines
+        )

-    def check_indent_level(self, string: str, expected: int, line_num: int
-        ) ->None:
+        # if there were no lines passing the max_chars config, we don't bother
+        # running the full line check (as we've met an even more strict condition)
+        if not potential_line_length_warning:
+            return
+
+        # Line length check may be deactivated through `pylint: disable` comment
+        mobj = OPTION_PO.search(lines)
+        checker_off = False
+        if mobj:
+            if not self.is_line_length_check_activated(mobj):
+                checker_off = True
+            # The 'pylint: disable whatever' should not be taken into account for line length count
+            lines = self.remove_pylint_option_from_lines(mobj)
+
+        # here we re-run specific_splitlines since we have filtered out pylint options above
+        for offset, line in enumerate(self.specific_splitlines(lines)):
+            self.check_line_length(line, lineno + offset, checker_off)
+
+    def check_indent_level(self, string: str, expected: int, line_num: int) -> None:
         """Return the indent level of the string."""
-        pass
+        indent = self.linter.config.indent_string
+        if indent == "\\t":  # \t is not interpreted in the configuration file
+            indent = "\t"
+        level = 0
+        unit_size = len(indent)
+        while string[:unit_size] == indent:
+            string = string[unit_size:]
+            level += 1
+        suppl = ""
+        while string and string[0] in " \t":
+            suppl += string[0]
+            string = string[1:]
+        if level != expected or suppl:
+            i_type = "spaces"
+            if indent[0] == "\t":
+                i_type = "tabs"
+            self.add_message(
+                "bad-indentation",
+                line=line_num,
+                args=(level * unit_size + len(suppl), i_type, expected * unit_size),
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(FormatChecker(linter))
diff --git a/pylint/checkers/imports.py b/pylint/checkers/imports.py
index 14add5d0f..afef0277e 100644
--- a/pylint/checkers/imports.py
+++ b/pylint/checkers/imports.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Imports checkers for Python code."""
+
 from __future__ import annotations
+
 import collections
 import copy
 import os
@@ -8,11 +14,20 @@ from collections import defaultdict
 from collections.abc import ItemsView, Sequence
 from functools import cached_property
 from typing import TYPE_CHECKING, Any, Dict, List, Union
+
 import astroid
 from astroid import nodes
 from astroid.nodes._base_nodes import ImportNode
+
 from pylint.checkers import BaseChecker, DeprecatedMixin
-from pylint.checkers.utils import get_import_name, in_type_checking_block, is_from_fallback_block, is_module_ignored, is_sys_guard, node_ignores_exception
+from pylint.checkers.utils import (
+    get_import_name,
+    in_type_checking_block,
+    is_from_fallback_block,
+    is_module_ignored,
+    is_sys_guard,
+    node_ignores_exception,
+)
 from pylint.constants import MAX_NUMBER_OF_IMPORT_SHOWN
 from pylint.exceptions import EmptyReportError
 from pylint.graph import DotBackend, get_cycles
@@ -21,91 +36,289 @@ from pylint.reporters.ureports.nodes import Paragraph, Section, VerbatimText
 from pylint.typing import MessageDefinitionTuple
 from pylint.utils import IsortDriver
 from pylint.utils.linterstats import LinterStats
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
+
+# The dictionary with Any should actually be a _ImportTree again
+# but mypy doesn't support recursive types yet
 _ImportTree = Dict[str, Union[List[Dict[str, Any]], List[str]]]
-DEPRECATED_MODULES = {(0, 0, 0): {'tkinter.tix', 'fpectl'}, (3, 2, 0): {
-    'optparse'}, (3, 3, 0): {'xml.etree.cElementTree'}, (3, 4, 0): {'imp'},
-    (3, 5, 0): {'formatter'}, (3, 6, 0): {'asynchat', 'asyncore', 'smtpd'},
-    (3, 7, 0): {'macpath'}, (3, 9, 0): {'lib2to3', 'parser', 'symbol',
-    'binhex'}, (3, 10, 0): {'distutils', 'typing.io', 'typing.re'}, (3, 11,
-    0): {'aifc', 'audioop', 'cgi', 'cgitb', 'chunk', 'crypt', 'imghdr',
-    'msilib', 'mailcap', 'nis', 'nntplib', 'ossaudiodev', 'pipes', 'sndhdr',
-    'spwd', 'sunau', 'sre_compile', 'sre_constants', 'sre_parse',
-    'telnetlib', 'uu', 'xdrlib'}}
-
-
-def _get_first_import(node: ImportNode, context: nodes.LocalsDictNodeNG,
-    name: str, base: (str | None), level: (int | None), alias: (str | None)
-    ) ->tuple[nodes.Import | nodes.ImportFrom | None, str | None]:
+
+DEPRECATED_MODULES = {
+    (0, 0, 0): {"tkinter.tix", "fpectl"},
+    (3, 2, 0): {"optparse"},
+    (3, 3, 0): {"xml.etree.cElementTree"},
+    (3, 4, 0): {"imp"},
+    (3, 5, 0): {"formatter"},
+    (3, 6, 0): {"asynchat", "asyncore", "smtpd"},
+    (3, 7, 0): {"macpath"},
+    (3, 9, 0): {"lib2to3", "parser", "symbol", "binhex"},
+    (3, 10, 0): {"distutils", "typing.io", "typing.re"},
+    (3, 11, 0): {
+        "aifc",
+        "audioop",
+        "cgi",
+        "cgitb",
+        "chunk",
+        "crypt",
+        "imghdr",
+        "msilib",
+        "mailcap",
+        "nis",
+        "nntplib",
+        "ossaudiodev",
+        "pipes",
+        "sndhdr",
+        "spwd",
+        "sunau",
+        "sre_compile",
+        "sre_constants",
+        "sre_parse",
+        "telnetlib",
+        "uu",
+        "xdrlib",
+    },
+}
+
+
+def _get_first_import(
+    node: ImportNode,
+    context: nodes.LocalsDictNodeNG,
+    name: str,
+    base: str | None,
+    level: int | None,
+    alias: str | None,
+) -> tuple[nodes.Import | nodes.ImportFrom | None, str | None]:
     """Return the node where [base.]<name> is imported or None if not found."""
-    pass
+    fullname = f"{base}.{name}" if base else name

+    first = None
+    found = False
+    msg = "reimported"

-def _make_tree_defs(mod_files_list: ItemsView[str, set[str]]) ->_ImportTree:
+    for first in context.body:
+        if first is node:
+            continue
+        if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
+            continue
+        if isinstance(first, nodes.Import):
+            if any(fullname == iname[0] for iname in first.names):
+                found = True
+                break
+            for imported_name, imported_alias in first.names:
+                if not imported_alias and imported_name == alias:
+                    found = True
+                    msg = "shadowed-import"
+                    break
+            if found:
+                break
+        elif isinstance(first, nodes.ImportFrom):
+            if level == first.level:
+                for imported_name, imported_alias in first.names:
+                    if fullname == f"{first.modname}.{imported_name}":
+                        found = True
+                        break
+                    if (
+                        name != "*"
+                        and name == imported_name
+                        and not (alias or imported_alias)
+                    ):
+                        found = True
+                        break
+                    if not imported_alias and imported_name == alias:
+                        found = True
+                        msg = "shadowed-import"
+                        break
+                if found:
+                    break
+    if found and not astroid.are_exclusive(first, node):
+        return first, msg
+    return None, None
+
+
+def _ignore_import_failure(
+    node: ImportNode,
+    modname: str,
+    ignored_modules: Sequence[str],
+) -> bool:
+    if is_module_ignored(modname, ignored_modules):
+        return True
+
+    # Ignore import failure if part of guarded import block
+    # I.e. `sys.version_info` or `typing.TYPE_CHECKING`
+    if in_type_checking_block(node):
+        return True
+    if isinstance(node.parent, nodes.If) and is_sys_guard(node.parent):
+        return True
+
+    return node_ignores_exception(node, ImportError)
+
+
+# utilities to represents import dependencies as tree and dot graph ###########
+
+
+def _make_tree_defs(mod_files_list: ItemsView[str, set[str]]) -> _ImportTree:
     """Get a list of 2-uple (module, list_of_files_which_import_this_module),
     it will return a dictionary to represent this as a tree.
     """
-    pass
+    tree_defs: _ImportTree = {}
+    for mod, files in mod_files_list:
+        node: list[_ImportTree | list[str]] = [tree_defs, []]
+        for prefix in mod.split("."):
+            assert isinstance(node[0], dict)
+            node = node[0].setdefault(prefix, ({}, []))  # type: ignore[arg-type,assignment]
+        assert isinstance(node[1], list)
+        node[1].extend(files)
+    return tree_defs


-def _repr_tree_defs(data: _ImportTree, indent_str: (str | None)=None) ->str:
+def _repr_tree_defs(data: _ImportTree, indent_str: str | None = None) -> str:
     """Return a string which represents imports as a tree."""
-    pass
+    lines = []
+    nodes_items = data.items()
+    for i, (mod, (sub, files)) in enumerate(sorted(nodes_items, key=lambda x: x[0])):
+        files_list = "" if not files else f"({','.join(sorted(files))})"
+        if indent_str is None:
+            lines.append(f"{mod} {files_list}")
+            sub_indent_str = "  "
+        else:
+            lines.append(rf"{indent_str}\-{mod} {files_list}")
+            if i == len(nodes_items) - 1:
+                sub_indent_str = f"{indent_str}  "
+            else:
+                sub_indent_str = f"{indent_str}| "
+        if sub and isinstance(sub, dict):
+            lines.append(_repr_tree_defs(sub, sub_indent_str))
+    return "\n".join(lines)


-def _dependencies_graph(filename: str, dep_info: dict[str, set[str]]) ->str:
+def _dependencies_graph(filename: str, dep_info: dict[str, set[str]]) -> str:
     """Write dependencies as a dot (graphviz) file."""
-    pass
+    done = {}
+    printer = DotBackend(os.path.splitext(os.path.basename(filename))[0], rankdir="LR")
+    printer.emit('URL="." node[shape="box"]')
+    for modname, dependencies in sorted(dep_info.items()):
+        sorted_dependencies = sorted(dependencies)
+        done[modname] = 1
+        printer.emit_node(modname)
+        for depmodname in sorted_dependencies:
+            if depmodname not in done:
+                done[depmodname] = 1
+                printer.emit_node(depmodname)
+    for depmodname, dependencies in sorted(dep_info.items()):
+        for modname in sorted(dependencies):
+            printer.emit_edge(modname, depmodname)
+    return printer.generate(filename)


-def _make_graph(filename: str, dep_info: dict[str, set[str]], sect: Section,
-    gtype: str) ->None:
+def _make_graph(
+    filename: str, dep_info: dict[str, set[str]], sect: Section, gtype: str
+) -> None:
     """Generate a dependencies graph and add some information about it in the
     report's section.
     """
-    pass
-
-
-MSGS: dict[str, MessageDefinitionTuple] = {'E0401': ('Unable to import %s',
-    'import-error', 'Used when pylint has been unable to import a module.',
-    {'old_names': [('F0401', 'old-import-error')]}), 'E0402': (
-    'Attempted relative import beyond top-level package',
-    'relative-beyond-top-level',
-    'Used when a relative import tries to access too many levels in the current package.'
-    ), 'R0401': ('Cyclic import (%s)', 'cyclic-import',
-    'Used when a cyclic import between two or more modules is detected.'),
-    'R0402': ("Use 'from %s import %s' instead",
-    'consider-using-from-import',
-    'Emitted when a submodule of a package is imported and aliased with the same name, e.g., instead of ``import concurrent.futures as futures`` use ``from concurrent import futures``.'
-    ), 'W0401': ('Wildcard import %s', 'wildcard-import',
-    'Used when `from module import *` is detected.'), 'W0404': (
-    'Reimport %r (imported line %s)', 'reimported',
-    'Used when a module is imported more than once.'), 'W0406': (
-    'Module import itself', 'import-self',
-    'Used when a module is importing itself.'), 'W0407': (
-    'Prefer importing %r instead of %r', 'preferred-module',
-    'Used when a module imported has a preferred replacement module.'),
-    'W0410': ('__future__ import is not the first non docstring statement',
-    'misplaced-future',
-    'Python 2.5 and greater require __future__ import to be the first non docstring statement in the module.'
-    ), 'C0410': ('Multiple imports on one line (%s)', 'multiple-imports',
-    'Used when import statement importing multiple modules is detected.'),
-    'C0411': ('%s should be placed before %s', 'wrong-import-order',
-    'Used when PEP8 import order is not respected (standard imports first, then third-party libraries, then local imports).'
-    ), 'C0412': ('Imports from package %s are not grouped',
-    'ungrouped-imports', 'Used when imports are not grouped by packages.'),
-    'C0413': ('Import "%s" should be placed at the top of the module',
-    'wrong-import-position', 'Used when code and imports are mixed.'),
-    'C0414': ('Import alias does not rename original package',
-    'useless-import-alias',
-    'Used when an import alias is same as original package, e.g., using import numpy as numpy instead of import numpy as np.'
-    ), 'C0415': ('Import outside toplevel (%s)', 'import-outside-toplevel',
-    'Used when an import statement is used anywhere other than the module toplevel. Move this import to the top of the file.'
-    ), 'W0416': ('Shadowed %r (imported line %s)', 'shadowed-import',
-    'Used when a module is aliased with a name that shadows another import.')}
+    outputfile = _dependencies_graph(filename, dep_info)
+    sect.append(Paragraph((f"{gtype}imports graph has been written to {outputfile}",)))
+
+
+# the import checker itself ###################################################
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "E0401": (
+        "Unable to import %s",
+        "import-error",
+        "Used when pylint has been unable to import a module.",
+        {"old_names": [("F0401", "old-import-error")]},
+    ),
+    "E0402": (
+        "Attempted relative import beyond top-level package",
+        "relative-beyond-top-level",
+        "Used when a relative import tries to access too many levels "
+        "in the current package.",
+    ),
+    "R0401": (
+        "Cyclic import (%s)",
+        "cyclic-import",
+        "Used when a cyclic import between two or more modules is detected.",
+    ),
+    "R0402": (
+        "Use 'from %s import %s' instead",
+        "consider-using-from-import",
+        "Emitted when a submodule of a package is imported and "
+        "aliased with the same name, "
+        "e.g., instead of ``import concurrent.futures as futures`` use "
+        "``from concurrent import futures``.",
+    ),
+    "W0401": (
+        "Wildcard import %s",
+        "wildcard-import",
+        "Used when `from module import *` is detected.",
+    ),
+    "W0404": (
+        "Reimport %r (imported line %s)",
+        "reimported",
+        "Used when a module is imported more than once.",
+    ),
+    "W0406": (
+        "Module import itself",
+        "import-self",
+        "Used when a module is importing itself.",
+    ),
+    "W0407": (
+        "Prefer importing %r instead of %r",
+        "preferred-module",
+        "Used when a module imported has a preferred replacement module.",
+    ),
+    "W0410": (
+        "__future__ import is not the first non docstring statement",
+        "misplaced-future",
+        "Python 2.5 and greater require __future__ import to be the "
+        "first non docstring statement in the module.",
+    ),
+    "C0410": (
+        "Multiple imports on one line (%s)",
+        "multiple-imports",
+        "Used when import statement importing multiple modules is detected.",
+    ),
+    "C0411": (
+        "%s should be placed before %s",
+        "wrong-import-order",
+        "Used when PEP8 import order is not respected (standard imports "
+        "first, then third-party libraries, then local imports).",
+    ),
+    "C0412": (
+        "Imports from package %s are not grouped",
+        "ungrouped-imports",
+        "Used when imports are not grouped by packages.",
+    ),
+    "C0413": (
+        'Import "%s" should be placed at the top of the module',
+        "wrong-import-position",
+        "Used when code and imports are mixed.",
+    ),
+    "C0414": (
+        "Import alias does not rename original package",
+        "useless-import-alias",
+        "Used when an import alias is same as original package, "
+        "e.g., using import numpy as numpy instead of import numpy as np.",
+    ),
+    "C0415": (
+        "Import outside toplevel (%s)",
+        "import-outside-toplevel",
+        "Used when an import statement is used anywhere other than the module "
+        "toplevel. Move this import to the top of the file.",
+    ),
+    "W0416": (
+        "Shadowed %r (imported line %s)",
+        "shadowed-import",
+        "Used when a module is aliased with a name that shadows another import.",
+    ),
+}
+
+
 DEFAULT_STANDARD_LIBRARY = ()
-DEFAULT_KNOWN_THIRD_PARTY = 'enchant',
+DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
 DEFAULT_PREFERRED_MODULES = ()


@@ -119,144 +332,931 @@ class ImportsChecker(DeprecatedMixin, BaseChecker):
     * uses of deprecated modules
     * uses of modules instead of preferred modules
     """
-    name = 'imports'
+
+    name = "imports"
     msgs = {**DeprecatedMixin.DEPRECATED_MODULE_MESSAGE, **MSGS}
     default_deprecated_modules = ()
-    options = ('deprecated-modules', {'default': default_deprecated_modules,
-        'type': 'csv', 'metavar': '<modules>', 'help':
-        'Deprecated modules which should not be used, separated by a comma.'}
-        ), ('preferred-modules', {'default': DEFAULT_PREFERRED_MODULES,
-        'type': 'csv', 'metavar': '<module:preferred-module>', 'help':
-        'Couples of modules and preferred modules, separated by a comma.'}), (
-        'import-graph', {'default': '', 'type': 'path', 'metavar':
-        '<file.gv>', 'help':
-        'Output a graph (.gv or any supported image format) of all (i.e. internal and external) dependencies to the given file (report RP0402 must not be disabled).'
-        }), ('ext-import-graph', {'default': '', 'type': 'path', 'metavar':
-        '<file.gv>', 'help':
-        'Output a graph (.gv or any supported image format) of external dependencies to the given file (report RP0402 must not be disabled).'
-        }), ('int-import-graph', {'default': '', 'type': 'path', 'metavar':
-        '<file.gv>', 'help':
-        'Output a graph (.gv or any supported image format) of internal dependencies to the given file (report RP0402 must not be disabled).'
-        }), ('known-standard-library', {'default': DEFAULT_STANDARD_LIBRARY,
-        'type': 'csv', 'metavar': '<modules>', 'help':
-        'Force import order to recognize a module as part of the standard compatibility libraries.'
-        }), ('known-third-party', {'default': DEFAULT_KNOWN_THIRD_PARTY,
-        'type': 'csv', 'metavar': '<modules>', 'help':
-        'Force import order to recognize a module as part of a third party library.'
-        }), ('allow-any-import-level', {'default': (), 'type': 'csv',
-        'metavar': '<modules>', 'help':
-        'List of modules that can be imported at any level, not just the top level one.'
-        }), ('allow-wildcard-with-all', {'default': False, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Allow wildcard imports from modules that define __all__.'}), (
-        'allow-reexport-from-package', {'default': False, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Allow explicit reexports by alias from a package __init__.'})
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    options = (
+        (
+            "deprecated-modules",
+            {
+                "default": default_deprecated_modules,
+                "type": "csv",
+                "metavar": "<modules>",
+                "help": "Deprecated modules which should not be used,"
+                " separated by a comma.",
+            },
+        ),
+        (
+            "preferred-modules",
+            {
+                "default": DEFAULT_PREFERRED_MODULES,
+                "type": "csv",
+                "metavar": "<module:preferred-module>",
+                "help": "Couples of modules and preferred modules,"
+                " separated by a comma.",
+            },
+        ),
+        (
+            "import-graph",
+            {
+                "default": "",
+                "type": "path",
+                "metavar": "<file.gv>",
+                "help": "Output a graph (.gv or any supported image format) of"
+                " all (i.e. internal and external) dependencies to the given file"
+                " (report RP0402 must not be disabled).",
+            },
+        ),
+        (
+            "ext-import-graph",
+            {
+                "default": "",
+                "type": "path",
+                "metavar": "<file.gv>",
+                "help": "Output a graph (.gv or any supported image format)"
+                " of external dependencies to the given file"
+                " (report RP0402 must not be disabled).",
+            },
+        ),
+        (
+            "int-import-graph",
+            {
+                "default": "",
+                "type": "path",
+                "metavar": "<file.gv>",
+                "help": "Output a graph (.gv or any supported image format)"
+                " of internal dependencies to the given file"
+                " (report RP0402 must not be disabled).",
+            },
+        ),
+        (
+            "known-standard-library",
+            {
+                "default": DEFAULT_STANDARD_LIBRARY,
+                "type": "csv",
+                "metavar": "<modules>",
+                "help": "Force import order to recognize a module as part of "
+                "the standard compatibility libraries.",
+            },
+        ),
+        (
+            "known-third-party",
+            {
+                "default": DEFAULT_KNOWN_THIRD_PARTY,
+                "type": "csv",
+                "metavar": "<modules>",
+                "help": "Force import order to recognize a module as part of "
+                "a third party library.",
+            },
+        ),
+        (
+            "allow-any-import-level",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<modules>",
+                "help": (
+                    "List of modules that can be imported at any level, not just "
+                    "the top level one."
+                ),
+            },
+        ),
+        (
+            "allow-wildcard-with-all",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Allow wildcard imports from modules that define __all__.",
+            },
+        ),
+        (
+            "allow-reexport-from-package",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Allow explicit reexports by alias from a package __init__.",
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         BaseChecker.__init__(self, linter)
         self.import_graph: defaultdict[str, set[str]] = defaultdict(set)
         self._imports_stack: list[tuple[ImportNode, str]] = []
         self._first_non_import_node = None
-        self._module_pkg: dict[Any, Any] = {}
+        self._module_pkg: dict[Any, Any] = (
+            {}
+        )  # mapping of modules to the pkg they belong in
         self._allow_any_import_level: set[Any] = set()
-        self.reports = ('RP0401', 'External dependencies', self.
-            _report_external_dependencies), ('RP0402',
-            'Modules dependencies graph', self._report_dependencies_graph)
+        self.reports = (
+            ("RP0401", "External dependencies", self._report_external_dependencies),
+            ("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
+        )
         self._excluded_edges: defaultdict[str, set[str]] = defaultdict(set)

-    def open(self) ->None:
+    def open(self) -> None:
         """Called before visiting project (i.e set of modules)."""
-        pass
+        self.linter.stats.dependencies = {}
+        self.linter.stats = self.linter.stats
+        self.import_graph = defaultdict(set)
+        self._module_pkg = {}  # mapping of modules to the pkg they belong in
+        self._current_module_package = False
+        self._ignored_modules: Sequence[str] = self.linter.config.ignored_modules
+        # Build a mapping {'module': 'preferred-module'}
+        self.preferred_modules = dict(
+            module.split(":")
+            for module in self.linter.config.preferred_modules
+            if ":" in module
+        )
+        self._allow_any_import_level = set(self.linter.config.allow_any_import_level)
+        self._allow_reexport_package = self.linter.config.allow_reexport_from_package
+
+    def _import_graph_without_ignored_edges(self) -> defaultdict[str, set[str]]:
+        filtered_graph = copy.deepcopy(self.import_graph)
+        for node in filtered_graph:
+            filtered_graph[node].difference_update(self._excluded_edges[node])
+        return filtered_graph

-    def close(self) ->None:
+    def close(self) -> None:
         """Called before visiting project (i.e set of modules)."""
-        pass
+        if self.linter.is_message_enabled("cyclic-import"):
+            graph = self._import_graph_without_ignored_edges()
+            vertices = list(graph)
+            for cycle in get_cycles(graph, vertices=vertices):
+                self.add_message("cyclic-import", args=" -> ".join(cycle))

-    def deprecated_modules(self) ->set[str]:
+    def get_map_data(
+        self,
+    ) -> tuple[defaultdict[str, set[str]], defaultdict[str, set[str]]]:
+        if self.linter.is_message_enabled("cyclic-import"):
+            return (self.import_graph, self._excluded_edges)
+        return (defaultdict(set), defaultdict(set))
+
+    def reduce_map_data(
+        self,
+        linter: PyLinter,
+        data: list[tuple[defaultdict[str, set[str]], defaultdict[str, set[str]]]],
+    ) -> None:
+        if self.linter.is_message_enabled("cyclic-import"):
+            self.import_graph = defaultdict(set)
+            self._excluded_edges = defaultdict(set)
+            for to_update in data:
+                graph, excluded_edges = to_update
+                self.import_graph.update(graph)
+                self._excluded_edges.update(excluded_edges)
+
+            self.close()
+
+    def deprecated_modules(self) -> set[str]:
         """Callback returning the deprecated modules."""
-        pass
+        # First get the modules the user indicated
+        all_deprecated_modules = set(self.linter.config.deprecated_modules)
+        # Now get the hard-coded ones from the stdlib
+        for since_vers, mod_set in DEPRECATED_MODULES.items():
+            if since_vers <= sys.version_info:
+                all_deprecated_modules = all_deprecated_modules.union(mod_set)
+        return all_deprecated_modules

-    def visit_module(self, node: nodes.Module) ->None:
+    def visit_module(self, node: nodes.Module) -> None:
         """Store if current module is a package, i.e. an __init__ file."""
-        pass
+        self._current_module_package = node.package

-    def visit_import(self, node: nodes.Import) ->None:
+    def visit_import(self, node: nodes.Import) -> None:
         """Triggered when an import statement is seen."""
-        pass
+        self._check_reimport(node)
+        self._check_import_as_rename(node)
+        self._check_toplevel(node)
+
+        names = [name for name, _ in node.names]
+        if len(names) >= 2:
+            self.add_message("multiple-imports", args=", ".join(names), node=node)
+
+        for name in names:
+            self.check_deprecated_module(node, name)
+            self._check_preferred_module(node, name)
+            imported_module = self._get_imported_module(node, name)
+            if isinstance(node.parent, nodes.Module):
+                # Allow imports nested
+                self._check_position(node)
+            if isinstance(node.scope(), nodes.Module):
+                self._record_import(node, imported_module)

-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+            if imported_module is None:
+                continue
+
+            self._add_imported_module(node, imported_module.name)
+
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Triggered when a from statement is seen."""
-        pass
-    (visit_try) = (visit_assignattr) = (visit_assign) = (visit_ifexp) = (
-        visit_comprehension) = (visit_expr) = (visit_if
-        ) = compute_first_non_import_node
+        basename = node.modname
+        imported_module = self._get_imported_module(node, basename)
+        absolute_name = get_import_name(node, basename)
+
+        self._check_import_as_rename(node)
+        self._check_misplaced_future(node)
+        self.check_deprecated_module(node, absolute_name)
+        self._check_preferred_module(node, basename)
+        self._check_wildcard_imports(node, imported_module)
+        self._check_same_line_imports(node)
+        self._check_reimport(node, basename=basename, level=node.level)
+        self._check_toplevel(node)
+
+        if isinstance(node.parent, nodes.Module):
+            # Allow imports nested
+            self._check_position(node)
+        if isinstance(node.scope(), nodes.Module):
+            self._record_import(node, imported_module)
+        if imported_module is None:
+            return
+        for name, _ in node.names:
+            if name != "*":
+                self._add_imported_module(node, f"{imported_module.name}.{name}")
+            else:
+                self._add_imported_module(node, imported_module.name)
+
+    def leave_module(self, node: nodes.Module) -> None:
+        # Check imports are grouped by category (standard, 3rd party, local)
+        std_imports, ext_imports, loc_imports = self._check_imports_order(node)
+
+        # Check that imports are grouped by package within a given category
+        met_import: set[str] = set()  # set for 'import x' style
+        met_from: set[str] = set()  # set for 'from x import y' style
+        current_package = None
+        for import_node, import_name in std_imports + ext_imports + loc_imports:
+            met = met_from if isinstance(import_node, nodes.ImportFrom) else met_import
+            package, _, _ = import_name.partition(".")
+            if (
+                current_package
+                and current_package != package
+                and package in met
+                and not in_type_checking_block(import_node)
+                and not (
+                    isinstance(import_node.parent, nodes.If)
+                    and is_sys_guard(import_node.parent)
+                )
+            ):
+                self.add_message("ungrouped-imports", node=import_node, args=package)
+            current_package = package
+            if not self.linter.is_message_enabled(
+                "ungrouped-imports", import_node.fromlineno
+            ):
+                continue
+            met.add(package)
+
+        self._imports_stack = []
+        self._first_non_import_node = None
+
+    def compute_first_non_import_node(
+        self,
+        node: (
+            nodes.If
+            | nodes.Expr
+            | nodes.Comprehension
+            | nodes.IfExp
+            | nodes.Assign
+            | nodes.AssignAttr
+            | nodes.Try
+        ),
+    ) -> None:
+        # if the node does not contain an import instruction, and if it is the
+        # first node of the module, keep a track of it (all the import positions
+        # of the module will be compared to the position of this first
+        # instruction)
+        if self._first_non_import_node:
+            return
+        if not isinstance(node.parent, nodes.Module):
+            return
+        if isinstance(node, nodes.Try) and any(
+            node.nodes_of_class((nodes.Import, nodes.ImportFrom))
+        ):
+            return
+        if isinstance(node, nodes.Assign):
+            # Add compatibility for module level dunder names
+            # https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
+            valid_targets = [
+                isinstance(target, nodes.AssignName)
+                and target.name.startswith("__")
+                and target.name.endswith("__")
+                for target in node.targets
+            ]
+            if all(valid_targets):
+                return
+        self._first_non_import_node = node
+
+    visit_try = visit_assignattr = visit_assign = visit_ifexp = visit_comprehension = (
+        visit_expr
+    ) = visit_if = compute_first_non_import_node
+
+    def visit_functiondef(
+        self, node: nodes.FunctionDef | nodes.While | nodes.For | nodes.ClassDef
+    ) -> None:
+        # If it is the first non import instruction of the module, record it.
+        if self._first_non_import_node:
+            return
+
+        # Check if the node belongs to an `If` or a `Try` block. If they
+        # contain imports, skip recording this node.
+        if not isinstance(node.parent.scope(), nodes.Module):
+            return
+
+        root = node
+        while not isinstance(root.parent, nodes.Module):
+            root = root.parent
+
+        if isinstance(root, (nodes.If, nodes.Try)):
+            if any(root.nodes_of_class((nodes.Import, nodes.ImportFrom))):
+                return
+
+        self._first_non_import_node = node
+
     visit_classdef = visit_for = visit_while = visit_functiondef

-    def _check_position(self, node: ImportNode) ->None:
+    def _check_misplaced_future(self, node: nodes.ImportFrom) -> None:
+        basename = node.modname
+        if basename == "__future__":
+            # check if this is the first non-docstring statement in the module
+            prev = node.previous_sibling()
+            if prev:
+                # consecutive future statements are possible
+                if not (
+                    isinstance(prev, nodes.ImportFrom) and prev.modname == "__future__"
+                ):
+                    self.add_message("misplaced-future", node=node)
+
+    def _check_same_line_imports(self, node: nodes.ImportFrom) -> None:
+        # Detect duplicate imports on the same line.
+        names = (name for name, _ in node.names)
+        counter = collections.Counter(names)
+        for name, count in counter.items():
+            if count > 1:
+                self.add_message("reimported", node=node, args=(name, node.fromlineno))
+
+    def _check_position(self, node: ImportNode) -> None:
         """Check `node` import or importfrom node position is correct.

         Send a message  if `node` comes before another instruction
         """
-        pass
+        # if a first non-import instruction has already been encountered,
+        # it means the import comes after it and therefore is not well placed
+        if self._first_non_import_node:
+            if self.linter.is_message_enabled(
+                "wrong-import-position", self._first_non_import_node.fromlineno
+            ):
+                self.add_message(
+                    "wrong-import-position", node=node, args=node.as_string()
+                )
+            else:
+                self.linter.add_ignored_message(
+                    "wrong-import-position", node.fromlineno, node
+                )

-    def _record_import(self, node: ImportNode, importedmodnode: (nodes.
-        Module | None)) ->None:
+    def _record_import(
+        self,
+        node: ImportNode,
+        importedmodnode: nodes.Module | None,
+    ) -> None:
         """Record the package `node` imports from."""
-        pass
+        if isinstance(node, nodes.ImportFrom):
+            importedname = node.modname
+        else:
+            importedname = importedmodnode.name if importedmodnode else None
+        if not importedname:
+            importedname = node.names[0][0].split(".")[0]
+
+        if isinstance(node, nodes.ImportFrom) and (node.level or 0) >= 1:
+            # We need the importedname with first point to detect local package
+            # Example of node:
+            #  'from .my_package1 import MyClass1'
+            #  the output should be '.my_package1' instead of 'my_package1'
+            # Example of node:
+            #  'from . import my_package2'
+            #  the output should be '.my_package2' instead of '{pyfile}'
+            importedname = "." + importedname
+
+        self._imports_stack.append((node, importedname))

-    def _check_imports_order(self, _module_node: nodes.Module) ->tuple[list
-        [tuple[ImportNode, str]], list[tuple[ImportNode, str]], list[tuple[
-        ImportNode, str]]]:
+    @staticmethod
+    def _is_fallback_import(
+        node: ImportNode, imports: list[tuple[ImportNode, str]]
+    ) -> bool:
+        imports = [import_node for (import_node, _) in imports]
+        return any(astroid.are_exclusive(import_node, node) for import_node in imports)
+
+    # pylint: disable = too-many-statements
+    def _check_imports_order(self, _module_node: nodes.Module) -> tuple[
+        list[tuple[ImportNode, str]],
+        list[tuple[ImportNode, str]],
+        list[tuple[ImportNode, str]],
+    ]:
         """Checks imports of module `node` are grouped by category.

         Imports must follow this order: standard, 3rd party, local
         """
-        pass
+        std_imports: list[tuple[ImportNode, str]] = []
+        third_party_imports: list[tuple[ImportNode, str]] = []
+        first_party_imports: list[tuple[ImportNode, str]] = []
+        # need of a list that holds third or first party ordered import
+        external_imports: list[tuple[ImportNode, str]] = []
+        local_imports: list[tuple[ImportNode, str]] = []
+        third_party_not_ignored: list[tuple[ImportNode, str]] = []
+        first_party_not_ignored: list[tuple[ImportNode, str]] = []
+        local_not_ignored: list[tuple[ImportNode, str]] = []
+        isort_driver = IsortDriver(self.linter.config)
+        for node, modname in self._imports_stack:
+            if modname.startswith("."):
+                package = "." + modname.split(".")[1]
+            else:
+                package = modname.split(".")[0]
+            nested = not isinstance(node.parent, nodes.Module)
+            ignore_for_import_order = not self.linter.is_message_enabled(
+                "wrong-import-order", node.fromlineno
+            )
+            import_category = isort_driver.place_module(package)
+            node_and_package_import = (node, package)
+
+            if import_category in {"FUTURE", "STDLIB"}:
+                std_imports.append(node_and_package_import)
+                wrong_import = (
+                    third_party_not_ignored
+                    or first_party_not_ignored
+                    or local_not_ignored
+                )
+                if self._is_fallback_import(node, wrong_import):
+                    continue
+                if wrong_import and not nested:
+                    self.add_message(
+                        "wrong-import-order",
+                        node=node,
+                        args=(  ## TODO - this isn't right for multiple on the same line...
+                            f'standard import "{self._get_full_import_name((node, package))}"',
+                            self._get_out_of_order_string(
+                                third_party_not_ignored,
+                                first_party_not_ignored,
+                                local_not_ignored,
+                            ),
+                        ),
+                    )
+            elif import_category == "THIRDPARTY":
+                third_party_imports.append(node_and_package_import)
+                external_imports.append(node_and_package_import)
+                if not nested:
+                    if not ignore_for_import_order:
+                        third_party_not_ignored.append(node_and_package_import)
+                    else:
+                        self.linter.add_ignored_message(
+                            "wrong-import-order", node.fromlineno, node
+                        )
+                wrong_import = first_party_not_ignored or local_not_ignored
+                if wrong_import and not nested:
+                    self.add_message(
+                        "wrong-import-order",
+                        node=node,
+                        args=(
+                            f'third party import "{self._get_full_import_name((node, package))}"',
+                            self._get_out_of_order_string(
+                                None, first_party_not_ignored, local_not_ignored
+                            ),
+                        ),
+                    )
+            elif import_category == "FIRSTPARTY":
+                first_party_imports.append(node_and_package_import)
+                external_imports.append(node_and_package_import)
+                if not nested:
+                    if not ignore_for_import_order:
+                        first_party_not_ignored.append(node_and_package_import)
+                    else:
+                        self.linter.add_ignored_message(
+                            "wrong-import-order", node.fromlineno, node
+                        )
+                wrong_import = local_not_ignored
+                if wrong_import and not nested:
+                    self.add_message(
+                        "wrong-import-order",
+                        node=node,
+                        args=(
+                            f'first party import "{self._get_full_import_name((node, package))}"',
+                            self._get_out_of_order_string(
+                                None, None, local_not_ignored
+                            ),
+                        ),
+                    )
+            elif import_category == "LOCALFOLDER":
+                local_imports.append((node, package))
+                if not nested:
+                    if not ignore_for_import_order:
+                        local_not_ignored.append((node, package))
+                    else:
+                        self.linter.add_ignored_message(
+                            "wrong-import-order", node.fromlineno, node
+                        )
+        return std_imports, external_imports, local_imports
+
+    def _get_out_of_order_string(
+        self,
+        third_party_imports: list[tuple[ImportNode, str]] | None,
+        first_party_imports: list[tuple[ImportNode, str]] | None,
+        local_imports: list[tuple[ImportNode, str]] | None,
+    ) -> str:
+        # construct the string listing out of order imports used in the message
+        # for wrong-import-order
+        if third_party_imports:
+            plural = "s" if len(third_party_imports) > 1 else ""
+            if len(third_party_imports) > MAX_NUMBER_OF_IMPORT_SHOWN:
+                imports_list = (
+                    ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in third_party_imports[
+                                : int(MAX_NUMBER_OF_IMPORT_SHOWN // 2)
+                            ]
+                        ]
+                    )
+                    + " (...) "
+                    + ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in third_party_imports[
+                                int(-MAX_NUMBER_OF_IMPORT_SHOWN // 2) :
+                            ]
+                        ]
+                    )
+                )
+            else:
+                imports_list = ", ".join(
+                    [
+                        f'"{self._get_full_import_name(tpi)}"'
+                        for tpi in third_party_imports
+                    ]
+                )
+            third_party = f"third party import{plural} {imports_list}"
+        else:
+            third_party = ""
+
+        if first_party_imports:
+            plural = "s" if len(first_party_imports) > 1 else ""
+            if len(first_party_imports) > MAX_NUMBER_OF_IMPORT_SHOWN:
+                imports_list = (
+                    ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in first_party_imports[
+                                : int(MAX_NUMBER_OF_IMPORT_SHOWN // 2)
+                            ]
+                        ]
+                    )
+                    + " (...) "
+                    + ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in first_party_imports[
+                                int(-MAX_NUMBER_OF_IMPORT_SHOWN // 2) :
+                            ]
+                        ]
+                    )
+                )
+            else:
+                imports_list = ", ".join(
+                    [
+                        f'"{self._get_full_import_name(fpi)}"'
+                        for fpi in first_party_imports
+                    ]
+                )
+            first_party = f"first party import{plural} {imports_list}"
+        else:
+            first_party = ""
+
+        if local_imports:
+            plural = "s" if len(local_imports) > 1 else ""
+            if len(local_imports) > MAX_NUMBER_OF_IMPORT_SHOWN:
+                imports_list = (
+                    ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in local_imports[
+                                : int(MAX_NUMBER_OF_IMPORT_SHOWN // 2)
+                            ]
+                        ]
+                    )
+                    + " (...) "
+                    + ", ".join(
+                        [
+                            f'"{self._get_full_import_name(tpi)}"'
+                            for tpi in local_imports[
+                                int(-MAX_NUMBER_OF_IMPORT_SHOWN // 2) :
+                            ]
+                        ]
+                    )
+                )
+            else:
+                imports_list = ", ".join(
+                    [f'"{self._get_full_import_name(li)}"' for li in local_imports]
+                )
+            local = f"local import{plural} {imports_list}"
+        else:
+            local = ""
+
+        delimiter_third_party = (
+            (
+                ", "
+                if (first_party and local)
+                else (" and " if (first_party or local) else "")
+            )
+            if third_party
+            else ""
+        )
+        delimiter_first_party1 = (
+            (", " if (third_party and local) else " ") if first_party else ""
+        )
+        delimiter_first_party2 = ("and " if local else "") if first_party else ""
+        delimiter_first_party = f"{delimiter_first_party1}{delimiter_first_party2}"
+        msg = (
+            f"{third_party}{delimiter_third_party}"
+            f"{first_party}{delimiter_first_party}"
+            f'{local if local else ""}'
+        )
+
+        return msg
+
+    def _get_full_import_name(self, importNode: ImportNode) -> str:
+        # construct a more descriptive name of the import
+        # for: import X, this returns X
+        # for: import X.Y this returns X.Y
+        # for: from X import Y, this returns X.Y
+
+        try:
+            # this will only succeed for ImportFrom nodes, which in themselves
+            # contain the information needed to reconstruct the package
+            return f"{importNode[0].modname}.{importNode[0].names[0][0]}"
+        except AttributeError:
+            # in all other cases, the import will either be X or X.Y
+            node: str = importNode[0].names[0][0]
+            package: str = importNode[1]
+
+            if node.split(".")[0] == package:
+                # this is sufficient with one import per line, since package = X
+                # and node = X.Y or X
+                return node
+
+            # when there is a node that contains multiple imports, the "current"
+            # import being analyzed is specified by package (node is the first
+            # import on the line and therefore != package in this case)
+            return package
+
+    def _get_imported_module(
+        self, importnode: ImportNode, modname: str
+    ) -> nodes.Module | None:
+        try:
+            return importnode.do_import_module(modname)
+        except astroid.TooManyLevelsError:
+            if _ignore_import_failure(importnode, modname, self._ignored_modules):
+                return None
+            self.add_message("relative-beyond-top-level", node=importnode)
+        except astroid.AstroidSyntaxError as exc:
+            message = f"Cannot import {modname!r} due to '{exc.error}'"
+            self.add_message(
+                "syntax-error", line=importnode.lineno, args=message, confidence=HIGH
+            )
+
+        except astroid.AstroidBuildingError:
+            if not self.linter.is_message_enabled("import-error"):
+                return None
+            if _ignore_import_failure(importnode, modname, self._ignored_modules):
+                return None
+            if (
+                not self.linter.config.analyse_fallback_blocks
+                and is_from_fallback_block(importnode)
+            ):
+                return None

-    def _add_imported_module(self, node: ImportNode, importedmodname: str
-        ) ->None:
+            dotted_modname = get_import_name(importnode, modname)
+            self.add_message("import-error", args=repr(dotted_modname), node=importnode)
+        except Exception as e:  # pragma: no cover
+            raise astroid.AstroidError from e
+        return None
+
+    def _add_imported_module(self, node: ImportNode, importedmodname: str) -> None:
         """Notify an imported module, used to analyze dependencies."""
-        pass
+        module_file = node.root().file
+        context_name = node.root().name
+        base = os.path.splitext(os.path.basename(module_file))[0]
+
+        try:
+            if isinstance(node, nodes.ImportFrom) and node.level:
+                importedmodname = astroid.modutils.get_module_part(
+                    importedmodname, module_file
+                )
+            else:
+                importedmodname = astroid.modutils.get_module_part(importedmodname)
+        except ImportError:
+            pass
+
+        if context_name == importedmodname:
+            self.add_message("import-self", node=node)
+
+        elif not astroid.modutils.is_stdlib_module(importedmodname):
+            # if this is not a package __init__ module
+            if base != "__init__" and context_name not in self._module_pkg:
+                # record the module's parent, or the module itself if this is
+                # a top level module, as the package it belongs to
+                self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
+
+            # handle dependencies
+            dependencies_stat: dict[str, set[str]] = self.linter.stats.dependencies
+            importedmodnames = dependencies_stat.setdefault(importedmodname, set())
+            if context_name not in importedmodnames:
+                importedmodnames.add(context_name)
+
+            # update import graph
+            self.import_graph[context_name].add(importedmodname)
+            if not self.linter.is_message_enabled(
+                "cyclic-import", line=node.lineno
+            ) or in_type_checking_block(node):
+                self._excluded_edges[context_name].add(importedmodname)

-    def _check_preferred_module(self, node: ImportNode, mod_path: str) ->None:
+    def _check_preferred_module(self, node: ImportNode, mod_path: str) -> None:
         """Check if the module has a preferred replacement."""
-        pass
+        mod_compare = [mod_path]
+        # build a comparison list of possible names using importfrom
+        if isinstance(node, astroid.nodes.node_classes.ImportFrom):
+            mod_compare = [f"{node.modname}.{name[0]}" for name in node.names]
+
+        # find whether there are matches with the import vs preferred_modules keys
+        matches = [
+            k
+            for k in self.preferred_modules
+            for mod in mod_compare
+            # exact match
+            if k == mod
+            # checks for base module matches
+            or k in mod.split(".")[0]
+        ]
+
+        # if we have matches, add message
+        if matches:
+            self.add_message(
+                "preferred-module",
+                node=node,
+                args=(self.preferred_modules[matches[0]], matches[0]),
+            )
+
+    def _check_import_as_rename(self, node: ImportNode) -> None:
+        names = node.names
+        for name in names:
+            if not all(name):
+                return
+
+            splitted_packages = name[0].rsplit(".", maxsplit=1)
+            import_name = splitted_packages[-1]
+            aliased_name = name[1]
+            if import_name != aliased_name:
+                continue
+
+            if len(splitted_packages) == 1 and (
+                self._allow_reexport_package is False
+                or self._current_module_package is False
+            ):
+                self.add_message("useless-import-alias", node=node, confidence=HIGH)
+            elif len(splitted_packages) == 2:
+                self.add_message(
+                    "consider-using-from-import",
+                    node=node,
+                    args=(splitted_packages[0], import_name),
+                )

-    def _check_reimport(self, node: ImportNode, basename: (str | None)=None,
-        level: (int | None)=None) ->None:
+    def _check_reimport(
+        self,
+        node: ImportNode,
+        basename: str | None = None,
+        level: int | None = None,
+    ) -> None:
         """Check if a module with the same name is already imported or aliased."""
-        pass
+        if not self.linter.is_message_enabled(
+            "reimported"
+        ) and not self.linter.is_message_enabled("shadowed-import"):
+            return

-    def _report_external_dependencies(self, sect: Section, _: LinterStats,
-        _dummy: (LinterStats | None)) ->None:
+        frame = node.frame()
+        root = node.root()
+        contexts = [(frame, level)]
+        if root is not frame:
+            contexts.append((root, None))
+
+        for known_context, known_level in contexts:
+            for name, alias in node.names:
+                first, msg = _get_first_import(
+                    node, known_context, name, basename, known_level, alias
+                )
+                if first is not None and msg is not None:
+                    name = name if msg == "reimported" else alias
+                    self.add_message(
+                        msg, node=node, args=(name, first.fromlineno), confidence=HIGH
+                    )
+
+    def _report_external_dependencies(
+        self, sect: Section, _: LinterStats, _dummy: LinterStats | None
+    ) -> None:
         """Return a verbatim layout for displaying dependencies."""
-        pass
+        dep_info = _make_tree_defs(self._external_dependencies_info.items())
+        if not dep_info:
+            raise EmptyReportError()
+        tree_str = _repr_tree_defs(dep_info)
+        sect.append(VerbatimText(tree_str))

-    def _report_dependencies_graph(self, sect: Section, _: LinterStats,
-        _dummy: (LinterStats | None)) ->None:
+    def _report_dependencies_graph(
+        self, sect: Section, _: LinterStats, _dummy: LinterStats | None
+    ) -> None:
         """Write dependencies as a dot (graphviz) file."""
-        pass
+        dep_info = self.linter.stats.dependencies
+        if not dep_info or not (
+            self.linter.config.import_graph
+            or self.linter.config.ext_import_graph
+            or self.linter.config.int_import_graph
+        ):
+            raise EmptyReportError()
+        filename = self.linter.config.import_graph
+        if filename:
+            _make_graph(filename, dep_info, sect, "")
+        filename = self.linter.config.ext_import_graph
+        if filename:
+            _make_graph(filename, self._external_dependencies_info, sect, "external ")
+        filename = self.linter.config.int_import_graph
+        if filename:
+            _make_graph(filename, self._internal_dependencies_info, sect, "internal ")

-    def _filter_dependencies_graph(self, internal: bool) ->defaultdict[str,
-        set[str]]:
+    def _filter_dependencies_graph(self, internal: bool) -> defaultdict[str, set[str]]:
         """Build the internal or the external dependency graph."""
-        pass
+        graph: defaultdict[str, set[str]] = defaultdict(set)
+        for importee, importers in self.linter.stats.dependencies.items():
+            for importer in importers:
+                package = self._module_pkg.get(importer, importer)
+                is_inside = importee.startswith(package)
+                if is_inside and internal or not is_inside and not internal:
+                    graph[importee].add(importer)
+        return graph

     @cached_property
-    def _external_dependencies_info(self) ->defaultdict[str, set[str]]:
+    def _external_dependencies_info(self) -> defaultdict[str, set[str]]:
         """Return cached external dependencies information or build and
         cache them.
         """
-        pass
+        return self._filter_dependencies_graph(internal=False)

     @cached_property
-    def _internal_dependencies_info(self) ->defaultdict[str, set[str]]:
+    def _internal_dependencies_info(self) -> defaultdict[str, set[str]]:
         """Return cached internal dependencies information or build and
         cache them.
         """
-        pass
+        return self._filter_dependencies_graph(internal=True)
+
+    def _check_wildcard_imports(
+        self, node: nodes.ImportFrom, imported_module: nodes.Module | None
+    ) -> None:
+        if node.root().package:
+            # Skip the check if in __init__.py issue #2026
+            return
+
+        wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
+        for name, _ in node.names:
+            if name == "*" and not wildcard_import_is_allowed:
+                self.add_message("wildcard-import", args=node.modname, node=node)
+
+    def _wildcard_import_is_allowed(self, imported_module: nodes.Module | None) -> bool:
+        return (
+            self.linter.config.allow_wildcard_with_all
+            and imported_module is not None
+            and "__all__" in imported_module.locals
+        )

-    def _check_toplevel(self, node: ImportNode) ->None:
+    def _check_toplevel(self, node: ImportNode) -> None:
         """Check whether the import is made outside the module toplevel."""
-        pass
+        # If the scope of the import is a module, then obviously it is
+        # not outside the module toplevel.
+        if isinstance(node.scope(), nodes.Module):
+            return
+
+        module_names = [
+            (
+                f"{node.modname}.{name[0]}"
+                if isinstance(node, nodes.ImportFrom)
+                else name[0]
+            )
+            for name in node.names
+        ]
+
+        # Get the full names of all the imports that are only allowed at the module level
+        scoped_imports = [
+            name for name in module_names if name not in self._allow_any_import_level
+        ]
+
+        if scoped_imports:
+            self.add_message(
+                "import-outside-toplevel", args=", ".join(scoped_imports), node=node
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ImportsChecker(linter))
diff --git a/pylint/checkers/lambda_expressions.py b/pylint/checkers/lambda_expressions.py
index a4466072b..18c03060d 100644
--- a/pylint/checkers/lambda_expressions.py
+++ b/pylint/checkers/lambda_expressions.py
@@ -1,31 +1,93 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from itertools import zip_longest
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class LambdaExpressionChecker(BaseChecker):
     """Check for unnecessary usage of lambda expressions."""
-    name = 'lambda-expressions'
-    msgs = {'C3001': (
-        'Lambda expression assigned to a variable. Define a function using the "def" keyword instead.'
-        , 'unnecessary-lambda-assignment',
-        'Used when a lambda expression is assigned to variable rather than defining a standard function with the "def" keyword.'
-        ), 'C3002': (
-        'Lambda expression called directly. Execute the expression inline instead.'
-        , 'unnecessary-direct-lambda-call',
-        'Used when a lambda expression is directly called rather than executing its contents inline.'
-        )}
+
+    name = "lambda-expressions"
+    msgs = {
+        "C3001": (
+            "Lambda expression assigned to a variable. "
+            'Define a function using the "def" keyword instead.',
+            "unnecessary-lambda-assignment",
+            "Used when a lambda expression is assigned to variable "
+            'rather than defining a standard function with the "def" keyword.',
+        ),
+        "C3002": (
+            "Lambda expression called directly. Execute the expression inline instead.",
+            "unnecessary-direct-lambda-call",
+            "Used when a lambda expression is directly called "
+            "rather than executing its contents inline.",
+        ),
+    }
     options = ()

-    def visit_assign(self, node: nodes.Assign) ->None:
+    def visit_assign(self, node: nodes.Assign) -> None:
         """Check if lambda expression is assigned to a variable."""
-        pass
+        if isinstance(node.targets[0], nodes.AssignName) and isinstance(
+            node.value, nodes.Lambda
+        ):
+            self.add_message(
+                "unnecessary-lambda-assignment",
+                node=node.value,
+                confidence=HIGH,
+            )
+        elif isinstance(node.targets[0], nodes.Tuple) and isinstance(
+            node.value, (nodes.Tuple, nodes.List)
+        ):
+            # Iterate over tuple unpacking assignment elements and
+            # see if any lambdas are assigned to a variable.
+            # N.B. We may encounter W0632 (unbalanced-tuple-unpacking)
+            # and still need to flag the lambdas that are being assigned.
+            for lhs_elem, rhs_elem in zip_longest(
+                node.targets[0].elts, node.value.elts
+            ):
+                if lhs_elem is None or rhs_elem is None:
+                    # unbalanced tuple unpacking. stop checking.
+                    break
+                if isinstance(lhs_elem, nodes.AssignName) and isinstance(
+                    rhs_elem, nodes.Lambda
+                ):
+                    self.add_message(
+                        "unnecessary-lambda-assignment",
+                        node=rhs_elem,
+                        confidence=HIGH,
+                    )
+
+    def visit_namedexpr(self, node: nodes.NamedExpr) -> None:
+        if isinstance(node.target, nodes.AssignName) and isinstance(
+            node.value, nodes.Lambda
+        ):
+            self.add_message(
+                "unnecessary-lambda-assignment",
+                node=node.value,
+                confidence=HIGH,
+            )

-    def visit_call(self, node: nodes.Call) ->None:
+    def visit_call(self, node: nodes.Call) -> None:
         """Check if lambda expression is called directly."""
-        pass
+        if isinstance(node.func, nodes.Lambda):
+            self.add_message(
+                "unnecessary-direct-lambda-call",
+                node=node,
+                confidence=HIGH,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(LambdaExpressionChecker(linter))
diff --git a/pylint/checkers/logging.py b/pylint/checkers/logging.py
index e7eeb4359..8a02d662b 100644
--- a/pylint/checkers/logging.py
+++ b/pylint/checkers/logging.py
@@ -1,46 +1,110 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for use of Python logging."""
+
 from __future__ import annotations
+
 import string
 from typing import TYPE_CHECKING, Literal
+
 import astroid
 from astroid import bases, nodes
 from astroid.typing import InferenceResult
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.checkers.utils import infer_all
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-MSGS: dict[str, MessageDefinitionTuple] = {'W1201': (
-    'Use %s formatting in logging functions', 'logging-not-lazy',
-    'Used when a logging statement has a call form of "logging.<logging method>(format_string % (format_args...))". Use another type of string formatting instead. You can use % formatting but leave interpolation to the logging function by passing the parameters as arguments. If logging-fstring-interpolation is disabled then you can use fstring formatting. If logging-format-interpolation is disabled then you can use str.format.'
-    ), 'W1202': ('Use %s formatting in logging functions',
-    'logging-format-interpolation',
-    'Used when a logging statement has a call form of "logging.<logging method>(format_string.format(format_args...))". Use another type of string formatting instead. You can use % formatting but leave interpolation to the logging function by passing the parameters as arguments. If logging-fstring-interpolation is disabled then you can use fstring formatting. If logging-not-lazy is disabled then you can use % formatting as normal.'
-    ), 'W1203': ('Use %s formatting in logging functions',
-    'logging-fstring-interpolation',
-    'Used when a logging statement has a call form of "logging.<logging method>(f"...")".Use another type of string formatting instead. You can use % formatting but leave interpolation to the logging function by passing the parameters as arguments. If logging-format-interpolation is disabled then you can use str.format. If logging-not-lazy is disabled then you can use % formatting as normal.'
-    ), 'E1200': (
-    'Unsupported logging format character %r (%#02x) at index %d',
-    'logging-unsupported-format',
-    'Used when an unsupported format character is used in a logging statement format string.'
-    ), 'E1201': (
-    'Logging format string ends in middle of conversion specifier',
-    'logging-format-truncated',
-    'Used when a logging statement format string terminates before the end of a conversion specifier.'
-    ), 'E1205': ('Too many arguments for logging format string',
-    'logging-too-many-args',
-    'Used when a logging format string is given too many arguments.'),
-    'E1206': ('Not enough arguments for logging format string',
-    'logging-too-few-args',
-    'Used when a logging format string is given too few arguments.')}
-CHECKED_CONVENIENCE_FUNCTIONS = {'critical', 'debug', 'error', 'exception',
-    'fatal', 'info', 'warn', 'warning'}
-MOST_COMMON_FORMATTING = frozenset(['%s', '%d', '%f', '%r'])
-
-
-def is_method_call(func: bases.BoundMethod, types: tuple[str, ...]=(),
-    methods: tuple[str, ...]=()) ->bool:
+
+MSGS: dict[str, MessageDefinitionTuple] = (
+    {  # pylint: disable=consider-using-namedtuple-or-dataclass
+        "W1201": (
+            "Use %s formatting in logging functions",
+            "logging-not-lazy",
+            "Used when a logging statement has a call form of "
+            '"logging.<logging method>(format_string % (format_args...))". '
+            "Use another type of string formatting instead. "
+            "You can use % formatting but leave interpolation to "
+            "the logging function by passing the parameters as arguments. "
+            "If logging-fstring-interpolation is disabled then "
+            "you can use fstring formatting. "
+            "If logging-format-interpolation is disabled then "
+            "you can use str.format.",
+        ),
+        "W1202": (
+            "Use %s formatting in logging functions",
+            "logging-format-interpolation",
+            "Used when a logging statement has a call form of "
+            '"logging.<logging method>(format_string.format(format_args...))". '
+            "Use another type of string formatting instead. "
+            "You can use % formatting but leave interpolation to "
+            "the logging function by passing the parameters as arguments. "
+            "If logging-fstring-interpolation is disabled then "
+            "you can use fstring formatting. "
+            "If logging-not-lazy is disabled then "
+            "you can use % formatting as normal.",
+        ),
+        "W1203": (
+            "Use %s formatting in logging functions",
+            "logging-fstring-interpolation",
+            "Used when a logging statement has a call form of "
+            '"logging.<logging method>(f"...")".'
+            "Use another type of string formatting instead. "
+            "You can use % formatting but leave interpolation to "
+            "the logging function by passing the parameters as arguments. "
+            "If logging-format-interpolation is disabled then "
+            "you can use str.format. "
+            "If logging-not-lazy is disabled then "
+            "you can use % formatting as normal.",
+        ),
+        "E1200": (
+            "Unsupported logging format character %r (%#02x) at index %d",
+            "logging-unsupported-format",
+            "Used when an unsupported format character is used in a logging "
+            "statement format string.",
+        ),
+        "E1201": (
+            "Logging format string ends in middle of conversion specifier",
+            "logging-format-truncated",
+            "Used when a logging statement format string terminates before "
+            "the end of a conversion specifier.",
+        ),
+        "E1205": (
+            "Too many arguments for logging format string",
+            "logging-too-many-args",
+            "Used when a logging format string is given too many arguments.",
+        ),
+        "E1206": (
+            "Not enough arguments for logging format string",
+            "logging-too-few-args",
+            "Used when a logging format string is given too few arguments.",
+        ),
+    }
+)
+
+
+CHECKED_CONVENIENCE_FUNCTIONS = {
+    "critical",
+    "debug",
+    "error",
+    "exception",
+    "fatal",
+    "info",
+    "warn",
+    "warning",
+}
+
+MOST_COMMON_FORMATTING = frozenset(["%s", "%d", "%f", "%r"])
+
+
+def is_method_call(
+    func: bases.BoundMethod, types: tuple[str, ...] = (), methods: tuple[str, ...] = ()
+) -> bool:
     """Determines if a BoundMethod node represents a method call.

     Args:
@@ -52,77 +116,280 @@ def is_method_call(func: bases.BoundMethod, types: tuple[str, ...]=(),
       true if the node represents a method call for the given type and
       method names, False otherwise.
     """
-    pass
+    return (
+        isinstance(func, astroid.BoundMethod)
+        and isinstance(func.bound, astroid.Instance)
+        and (func.bound.name in types if types else True)
+        and (func.name in methods if methods else True)
+    )


 class LoggingChecker(checkers.BaseChecker):
     """Checks use of the logging module."""
-    name = 'logging'
+
+    name = "logging"
     msgs = MSGS
-    options = ('logging-modules', {'default': ('logging',), 'type': 'csv',
-        'metavar': '<comma separated list>', 'help':
-        'Logging modules to check that the string format arguments are in logging function parameter format.'
-        }), ('logging-format-style', {'default': 'old', 'type': 'choice',
-        'metavar': '<old (%) or new ({)>', 'choices': ['old', 'new'],
-        'help':
-        'The type of string formatting that logging methods do. `old` means using % formatting, `new` is for `{}` formatting.'
-        })
-
-    def visit_module(self, _: nodes.Module) ->None:
+
+    options = (
+        (
+            "logging-modules",
+            {
+                "default": ("logging",),
+                "type": "csv",
+                "metavar": "<comma separated list>",
+                "help": "Logging modules to check that the string format "
+                "arguments are in logging function parameter format.",
+            },
+        ),
+        (
+            "logging-format-style",
+            {
+                "default": "old",
+                "type": "choice",
+                "metavar": "<old (%) or new ({)>",
+                "choices": ["old", "new"],
+                "help": "The type of string formatting that logging methods do. "
+                "`old` means using % formatting, `new` is for `{}` formatting.",
+            },
+        ),
+    )
+
+    def visit_module(self, _: nodes.Module) -> None:
         """Clears any state left in this checker from last module checked."""
-        pass
+        # The code being checked can just as easily "import logging as foo",
+        # so it is necessary to process the imports and store in this field
+        # what name the logging module is actually given.
+        self._logging_names: set[str] = set()
+        logging_mods = self.linter.config.logging_modules
+
+        self._format_style = self.linter.config.logging_format_style

-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+        self._logging_modules = set(logging_mods)
+        self._from_imports = {}
+        for logging_mod in logging_mods:
+            parts = logging_mod.rsplit(".", 1)
+            if len(parts) > 1:
+                self._from_imports[parts[0]] = parts[1]
+
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Checks to see if a module uses a non-Python logging module."""
-        pass
+        try:
+            logging_name = self._from_imports[node.modname]
+            for module, as_name in node.names:
+                if module == logging_name:
+                    self._logging_names.add(as_name or module)
+        except KeyError:
+            pass

-    def visit_import(self, node: nodes.Import) ->None:
+    def visit_import(self, node: nodes.Import) -> None:
         """Checks to see if this module uses Python's built-in logging."""
-        pass
+        for module, as_name in node.names:
+            if module in self._logging_modules:
+                self._logging_names.add(as_name or module)

-    def visit_call(self, node: nodes.Call) ->None:
+    def visit_call(self, node: nodes.Call) -> None:
         """Checks calls to logging methods."""
-        pass

-    def _check_log_method(self, node: nodes.Call, name: str) ->None:
+        def is_logging_name() -> bool:
+            return (
+                isinstance(node.func, nodes.Attribute)
+                and isinstance(node.func.expr, nodes.Name)
+                and node.func.expr.name in self._logging_names
+            )
+
+        def is_logger_class() -> tuple[bool, str | None]:
+            for inferred in infer_all(node.func):
+                if isinstance(inferred, astroid.BoundMethod):
+                    parent = inferred._proxied.parent
+                    if isinstance(parent, nodes.ClassDef) and (
+                        parent.qname() == "logging.Logger"
+                        or any(
+                            ancestor.qname() == "logging.Logger"
+                            for ancestor in parent.ancestors()
+                        )
+                    ):
+                        return True, inferred._proxied.name
+            return False, None
+
+        if is_logging_name():
+            name = node.func.attrname
+        else:
+            result, name = is_logger_class()
+            if not result:
+                return
+        self._check_log_method(node, name)
+
+    def _check_log_method(self, node: nodes.Call, name: str) -> None:
         """Checks calls to logging.log(level, format, *format_args)."""
-        pass
+        if name == "log":
+            if node.starargs or node.kwargs or len(node.args) < 2:
+                # Either a malformed call, star args, or double-star args. Beyond
+                # the scope of this checker.
+                return
+            format_pos: Literal[0, 1] = 1
+        elif name in CHECKED_CONVENIENCE_FUNCTIONS:
+            if node.starargs or node.kwargs or not node.args:
+                # Either no args, star args, or double-star args. Beyond the
+                # scope of this checker.
+                return
+            format_pos = 0
+        else:
+            return
+
+        format_arg = node.args[format_pos]
+        if isinstance(format_arg, nodes.BinOp):
+            binop = format_arg
+            emit = binop.op == "%"
+            if binop.op == "+" and not self._is_node_explicit_str_concatenation(binop):
+                total_number_of_strings = sum(
+                    1
+                    for operand in (binop.left, binop.right)
+                    if self._is_operand_literal_str(utils.safe_infer(operand))
+                )
+                emit = total_number_of_strings > 0
+            if emit:
+                self.add_message(
+                    "logging-not-lazy",
+                    node=node,
+                    args=(self._helper_string(node),),
+                )
+        elif isinstance(format_arg, nodes.Call):
+            self._check_call_func(format_arg)
+        elif isinstance(format_arg, nodes.Const):
+            self._check_format_string(node, format_pos)
+        elif isinstance(format_arg, nodes.JoinedStr):
+            if str_formatting_in_f_string(format_arg):
+                return
+            self.add_message(
+                "logging-fstring-interpolation",
+                node=node,
+                args=(self._helper_string(node),),
+            )

-    def _helper_string(self, node: nodes.Call) ->str:
+    def _helper_string(self, node: nodes.Call) -> str:
         """Create a string that lists the valid types of formatting for this node."""
-        pass
+        valid_types = ["lazy %"]
+
+        if not self.linter.is_message_enabled(
+            "logging-fstring-formatting", node.fromlineno
+        ):
+            valid_types.append("fstring")
+        if not self.linter.is_message_enabled(
+            "logging-format-interpolation", node.fromlineno
+        ):
+            valid_types.append(".format()")
+        if not self.linter.is_message_enabled("logging-not-lazy", node.fromlineno):
+            valid_types.append("%")
+
+        return " or ".join(valid_types)

     @staticmethod
-    def _is_operand_literal_str(operand: (InferenceResult | None)) ->bool:
+    def _is_operand_literal_str(operand: InferenceResult | None) -> bool:
         """Return True if the operand in argument is a literal string."""
-        pass
+        return isinstance(operand, nodes.Const) and operand.name == "str"

     @staticmethod
-    def _is_node_explicit_str_concatenation(node: nodes.NodeNG) ->bool:
+    def _is_node_explicit_str_concatenation(node: nodes.NodeNG) -> bool:
         """Return True if the node represents an explicitly concatenated string."""
-        pass
+        if not isinstance(node, nodes.BinOp):
+            return False
+        return (
+            LoggingChecker._is_operand_literal_str(node.left)
+            or LoggingChecker._is_node_explicit_str_concatenation(node.left)
+        ) and (
+            LoggingChecker._is_operand_literal_str(node.right)
+            or LoggingChecker._is_node_explicit_str_concatenation(node.right)
+        )

-    def _check_call_func(self, node: nodes.Call) ->None:
+    def _check_call_func(self, node: nodes.Call) -> None:
         """Checks that function call is not format_string.format()."""
-        pass
+        func = utils.safe_infer(node.func)
+        types = ("str", "unicode")
+        methods = ("format",)
+        if (
+            isinstance(func, astroid.BoundMethod)
+            and is_method_call(func, types, methods)
+            and not is_complex_format_str(func.bound)
+        ):
+            self.add_message(
+                "logging-format-interpolation",
+                node=node,
+                args=(self._helper_string(node),),
+            )

-    def _check_format_string(self, node: nodes.Call, format_arg: Literal[0, 1]
-        ) ->None:
+    def _check_format_string(self, node: nodes.Call, format_arg: Literal[0, 1]) -> None:
         """Checks that format string tokens match the supplied arguments.

         Args:
           node: AST node to be checked.
           format_arg: Index of the format string in the node arguments.
         """
-        pass
+        num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
+        if not num_args:
+            # If no args were supplied the string is not interpolated and can contain
+            # formatting characters - it's used verbatim. Don't check any further.
+            return
+
+        format_string = node.args[format_arg].value
+        required_num_args = 0
+        if isinstance(format_string, bytes):
+            format_string = format_string.decode()
+        if isinstance(format_string, str):
+            try:
+                if self._format_style == "old":
+                    keyword_args, required_num_args, _, _ = utils.parse_format_string(
+                        format_string
+                    )
+                    if keyword_args:
+                        # Keyword checking on logging strings is complicated by
+                        # special keywords - out of scope.
+                        return
+                elif self._format_style == "new":
+                    (
+                        keyword_arguments,
+                        implicit_pos_args,
+                        explicit_pos_args,
+                    ) = utils.parse_format_method_string(format_string)

+                    keyword_args_cnt = len(
+                        {k for k, _ in keyword_arguments if not isinstance(k, int)}
+                    )
+                    required_num_args = (
+                        keyword_args_cnt + implicit_pos_args + explicit_pos_args
+                    )
+            except utils.UnsupportedFormatCharacter as ex:
+                char = format_string[ex.index]
+                self.add_message(
+                    "logging-unsupported-format",
+                    node=node,
+                    args=(char, ord(char), ex.index),
+                )
+                return
+            except utils.IncompleteFormatString:
+                self.add_message("logging-format-truncated", node=node)
+                return
+        if num_args > required_num_args:
+            self.add_message("logging-too-many-args", node=node)
+        elif num_args < required_num_args:
+            self.add_message("logging-too-few-args", node=node)

-def is_complex_format_str(node: nodes.NodeNG) ->bool:
+
+def is_complex_format_str(node: nodes.NodeNG) -> bool:
     """Return whether the node represents a string with complex formatting specs."""
-    pass
+    inferred = utils.safe_infer(node)
+    if inferred is None or not (
+        isinstance(inferred, nodes.Const) and isinstance(inferred.value, str)
+    ):
+        return True
+    try:
+        parsed = list(string.Formatter().parse(inferred.value))
+    except ValueError:
+        # This format string is invalid
+        return False
+    return any(format_spec for (_, _, format_spec, _) in parsed)


-def _count_supplied_tokens(args: list[nodes.NodeNG]) ->int:
+def _count_supplied_tokens(args: list[nodes.NodeNG]) -> int:
     """Counts the number of tokens in an args list.

     The Python log functions allow for special keyword arguments: func,
@@ -135,12 +402,21 @@ def _count_supplied_tokens(args: list[nodes.NodeNG]) ->int:
     Returns:
       Number of AST nodes that aren't keywords.
     """
-    pass
+    return sum(1 for arg in args if not isinstance(arg, nodes.Keyword))


-def str_formatting_in_f_string(node: nodes.JoinedStr) ->bool:
+def str_formatting_in_f_string(node: nodes.JoinedStr) -> bool:
     """Determine whether the node represents an f-string with string formatting.

     For example: `f'Hello %s'`
     """
-    pass
+    # Check "%" presence first for performance.
+    return any(
+        "%" in val.value and any(x in val.value for x in MOST_COMMON_FORMATTING)
+        for val in node.values
+        if isinstance(val, nodes.Const)
+    )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(LoggingChecker(linter))
diff --git a/pylint/checkers/method_args.py b/pylint/checkers/method_args.py
index b9264d586..59083fa25 100644
--- a/pylint/checkers/method_args.py
+++ b/pylint/checkers/method_args.py
@@ -1,10 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Variables checkers for Python code."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 import astroid
 from astroid import arguments, bases, nodes
+
 from pylint.checkers import BaseChecker, utils
 from pylint.interfaces import INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -16,35 +25,106 @@ class MethodArgsChecker(BaseChecker):
     * missing-timeout
     * positional-only-arguments-expected
     """
-    name = 'method_args'
-    msgs = {'W3101': (
-        "Missing timeout argument for method '%s' can cause your program to hang indefinitely"
-        , 'missing-timeout',
-        "Used when a method needs a 'timeout' parameter in order to avoid waiting for a long time. If no timeout is specified explicitly the default value is used. For example for 'requests' the program will never time out (i.e. hang indefinitely)."
-        ), 'E3102': (
-        '`%s()` got some positional-only arguments passed as keyword arguments: %s'
-        , 'positional-only-arguments-expected',
-        'Emitted when positional-only arguments have been passed as keyword arguments. Remove the keywords for the affected arguments in the function call.'
-        , {'minversion': (3, 8)})}
-    options = ('timeout-methods', {'default': ('requests.api.delete',
-        'requests.api.get', 'requests.api.head', 'requests.api.options',
-        'requests.api.patch', 'requests.api.post', 'requests.api.put',
-        'requests.api.request'), 'type': 'csv', 'metavar':
-        '<comma separated list>', 'help':
-        "List of qualified names (i.e., library.method) which require a timeout parameter e.g. 'requests.api.get,requests.api.post'"
-        }),
-
-    def _check_missing_timeout(self, node: nodes.Call) ->None:
+
+    name = "method_args"
+    msgs = {
+        "W3101": (
+            "Missing timeout argument for method '%s' can cause your program to hang indefinitely",
+            "missing-timeout",
+            "Used when a method needs a 'timeout' parameter in order to avoid waiting "
+            "for a long time. If no timeout is specified explicitly the default value "
+            "is used. For example for 'requests' the program will never time out "
+            "(i.e. hang indefinitely).",
+        ),
+        "E3102": (
+            "`%s()` got some positional-only arguments passed as keyword arguments: %s",
+            "positional-only-arguments-expected",
+            "Emitted when positional-only arguments have been passed as keyword arguments. "
+            "Remove the keywords for the affected arguments in the function call.",
+            {"minversion": (3, 8)},
+        ),
+    }
+    options = (
+        (
+            "timeout-methods",
+            {
+                "default": (
+                    "requests.api.delete",
+                    "requests.api.get",
+                    "requests.api.head",
+                    "requests.api.options",
+                    "requests.api.patch",
+                    "requests.api.post",
+                    "requests.api.put",
+                    "requests.api.request",
+                ),
+                "type": "csv",
+                "metavar": "<comma separated list>",
+                "help": "List of qualified names (i.e., library.method) which require a timeout parameter "
+                "e.g. 'requests.api.get,requests.api.post'",
+            },
+        ),
+    )
+
+    @utils.only_required_for_messages(
+        "missing-timeout", "positional-only-arguments-expected"
+    )
+    def visit_call(self, node: nodes.Call) -> None:
+        self._check_missing_timeout(node)
+        self._check_positional_only_arguments_expected(node)
+
+    def _check_missing_timeout(self, node: nodes.Call) -> None:
         """Check if the call needs a timeout parameter based on package.func_name
         configured in config.timeout_methods.

         Package uses inferred node in order to know the package imported.
         """
-        pass
+        inferred = utils.safe_infer(node.func)
+        call_site = arguments.CallSite.from_call(node)
+        if (
+            inferred
+            and not call_site.has_invalid_keywords()
+            and isinstance(
+                inferred, (nodes.FunctionDef, nodes.ClassDef, bases.UnboundMethod)
+            )
+            and inferred.qname() in self.linter.config.timeout_methods
+        ):
+            keyword_arguments = [keyword.arg for keyword in node.keywords]
+            keyword_arguments.extend(call_site.keyword_arguments)
+            if "timeout" not in keyword_arguments:
+                self.add_message(
+                    "missing-timeout",
+                    node=node,
+                    args=(node.func.as_string(),),
+                    confidence=INFERENCE,
+                )

-    def _check_positional_only_arguments_expected(self, node: nodes.Call
-        ) ->None:
+    def _check_positional_only_arguments_expected(self, node: nodes.Call) -> None:
         """Check if positional only arguments have been passed as keyword arguments by
         inspecting its method definition.
         """
-        pass
+        inferred_func = utils.safe_infer(node.func)
+        while isinstance(inferred_func, (astroid.BoundMethod, astroid.UnboundMethod)):
+            inferred_func = inferred_func._proxied
+        if not (
+            isinstance(inferred_func, (nodes.FunctionDef))
+            and inferred_func.args.posonlyargs
+        ):
+            return
+        if inferred_func.args.kwarg:
+            return
+        pos_args = [a.name for a in inferred_func.args.posonlyargs]
+        kws = [k.arg for k in node.keywords if k.arg in pos_args]
+        if not kws:
+            return
+
+        self.add_message(
+            "positional-only-arguments-expected",
+            node=node,
+            args=(node.func.as_string(), ", ".join(f"'{k}'" for k in kws)),
+            confidence=INFERENCE,
+        )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(MethodArgsChecker(linter))
diff --git a/pylint/checkers/misc.py b/pylint/checkers/misc.py
index c7fce6781..78c21d0c5 100644
--- a/pylint/checkers/misc.py
+++ b/pylint/checkers/misc.py
@@ -1,26 +1,53 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check source code is ascii only or has an encoding declaration (PEP 263)."""
+
 from __future__ import annotations
+
 import re
 import tokenize
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseRawFileChecker, BaseTokenChecker
 from pylint.typing import ManagedMessage
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class ByIdManagedMessagesChecker(BaseRawFileChecker):
     """Checks for messages that are enabled or disabled by id instead of symbol."""
-    name = 'miscellaneous'
-    msgs = {'I0023': ('%s', 'use-symbolic-message-instead',
-        'Used when a message is enabled or disabled by id.', {
-        'default_enabled': False})}
+
+    name = "miscellaneous"
+    msgs = {
+        "I0023": (
+            "%s",
+            "use-symbolic-message-instead",
+            "Used when a message is enabled or disabled by id.",
+            {"default_enabled": False},
+        )
+    }
     options = ()

-    def process_module(self, node: nodes.Module) ->None:
+    def _clear_by_id_managed_msgs(self) -> None:
+        self.linter._by_id_managed_msgs.clear()
+
+    def _get_by_id_managed_msgs(self) -> list[ManagedMessage]:
+        return self.linter._by_id_managed_msgs
+
+    def process_module(self, node: nodes.Module) -> None:
         """Inspect the source file to find messages activated or deactivated by id."""
-        pass
+        managed_msgs = self._get_by_id_managed_msgs()
+        for mod_name, msgid, symbol, lineno, is_disabled in managed_msgs:
+            if mod_name == node.name:
+                verb = "disable" if is_disabled else "enable"
+                txt = f"'{msgid}' is cryptic: use '# pylint: {verb}={symbol}' instead"
+                self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
+        self._clear_by_id_managed_msgs()


 class EncodingChecker(BaseTokenChecker, BaseRawFileChecker):
@@ -30,21 +57,94 @@ class EncodingChecker(BaseTokenChecker, BaseRawFileChecker):
     * warning notes in the code like FIXME, XXX
     * encoding issues.
     """
-    name = 'miscellaneous'
-    msgs = {'W0511': ('%s', 'fixme',
-        'Used when a warning note as FIXME or XXX is detected.')}
-    options = ('notes', {'type': 'csv', 'metavar':
-        '<comma separated values>', 'default': ('FIXME', 'XXX', 'TODO'),
-        'help':
-        'List of note tags to take in consideration, separated by a comma.'}
-        ), ('notes-rgx', {'type': 'string', 'metavar': '<regexp>', 'help':
-        'Regular expression of note tags to take in consideration.',
-        'default': ''})
-
-    def process_module(self, node: nodes.Module) ->None:
+
+    # configuration section name
+    name = "miscellaneous"
+    msgs = {
+        "W0511": (
+            "%s",
+            "fixme",
+            "Used when a warning note as FIXME or XXX is detected.",
+        )
+    }
+
+    options = (
+        (
+            "notes",
+            {
+                "type": "csv",
+                "metavar": "<comma separated values>",
+                "default": ("FIXME", "XXX", "TODO"),
+                "help": (
+                    "List of note tags to take in consideration, "
+                    "separated by a comma."
+                ),
+            },
+        ),
+        (
+            "notes-rgx",
+            {
+                "type": "string",
+                "metavar": "<regexp>",
+                "help": "Regular expression of note tags to take in consideration.",
+                "default": "",
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        super().open()
+
+        notes = "|".join(re.escape(note) for note in self.linter.config.notes)
+        if self.linter.config.notes_rgx:
+            regex_string = rf"#\s*({notes}|{self.linter.config.notes_rgx})(?=(:|\s|\Z))"
+        else:
+            regex_string = rf"#\s*({notes})(?=(:|\s|\Z))"
+
+        self._fixme_pattern = re.compile(regex_string, re.I)
+
+    def _check_encoding(
+        self, lineno: int, line: bytes, file_encoding: str
+    ) -> str | None:
+        try:
+            return line.decode(file_encoding)
+        except UnicodeDecodeError:
+            pass
+        except LookupError:
+            if (
+                line.startswith(b"#")
+                and "coding" in str(line)
+                and file_encoding in str(line)
+            ):
+                msg = f"Cannot decode using encoding '{file_encoding}', bad encoding"
+                self.add_message("syntax-error", line=lineno, args=msg)
+        return None
+
+    def process_module(self, node: nodes.Module) -> None:
         """Inspect the source file to find encoding problem."""
-        pass
+        encoding = node.file_encoding if node.file_encoding else "ascii"
+
+        with node.stream() as stream:
+            for lineno, line in enumerate(stream):
+                self._check_encoding(lineno + 1, line, encoding)

-    def process_tokens(self, tokens: list[tokenize.TokenInfo]) ->None:
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
         """Inspect the source to find fixme problems."""
-        pass
+        if not self.linter.config.notes:
+            return
+        for token_info in tokens:
+            if token_info.type != tokenize.COMMENT:
+                continue
+            comment_text = token_info.string[1:].lstrip()  # trim '#' and white-spaces
+            if self._fixme_pattern.search("#" + comment_text.lower()):
+                self.add_message(
+                    "fixme",
+                    col_offset=token_info.start[1] + 1,
+                    args=comment_text,
+                    line=token_info.start[0],
+                )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(EncodingChecker(linter))
+    linter.register_checker(ByIdManagedMessagesChecker(linter))
diff --git a/pylint/checkers/modified_iterating_checker.py b/pylint/checkers/modified_iterating_checker.py
index a2395f574..be8d967ab 100644
--- a/pylint/checkers/modified_iterating_checker.py
+++ b/pylint/checkers/modified_iterating_checker.py
@@ -1,12 +1,22 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint import checkers, interfaces
 from pylint.checkers import utils
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-_LIST_MODIFIER_METHODS = {'append', 'remove'}
-_SET_MODIFIER_METHODS = {'add', 'clear', 'discard', 'pop', 'remove'}
+
+
+_LIST_MODIFIER_METHODS = {"append", "remove"}
+_SET_MODIFIER_METHODS = {"add", "clear", "discard", "pop", "remove"}


 class ModifiedIterationChecker(checkers.BaseChecker):
@@ -14,23 +24,177 @@ class ModifiedIterationChecker(checkers.BaseChecker):

     Currently supports `for` loops for Sets, Dictionaries and Lists.
     """
-    name = 'modified_iteration'
-    msgs = {'W4701': (
-        "Iterated list '%s' is being modified inside for loop body, consider iterating through a copy of it instead."
-        , 'modified-iterating-list',
-        'Emitted when items are added or removed to a list being iterated through. Doing so can result in unexpected behaviour, that is why it is preferred to use a copy of the list.'
-        ), 'E4702': (
-        "Iterated dict '%s' is being modified inside for loop body, iterate through a copy of it instead."
-        , 'modified-iterating-dict',
-        'Emitted when items are added or removed to a dict being iterated through. Doing so raises a RuntimeError.'
-        ), 'E4703': (
-        "Iterated set '%s' is being modified inside for loop body, iterate through a copy of it instead."
-        , 'modified-iterating-set',
-        'Emitted when items are added or removed to a set being iterated through. Doing so raises a RuntimeError.'
-        )}
+
+    name = "modified_iteration"
+
+    msgs = {
+        "W4701": (
+            "Iterated list '%s' is being modified inside for loop body, consider iterating through a copy of it "
+            "instead.",
+            "modified-iterating-list",
+            "Emitted when items are added or removed to a list being iterated through. "
+            "Doing so can result in unexpected behaviour, that is why it is preferred to use a copy of the list.",
+        ),
+        "E4702": (
+            "Iterated dict '%s' is being modified inside for loop body, iterate through a copy of it instead.",
+            "modified-iterating-dict",
+            "Emitted when items are added or removed to a dict being iterated through. "
+            "Doing so raises a RuntimeError.",
+        ),
+        "E4703": (
+            "Iterated set '%s' is being modified inside for loop body, iterate through a copy of it instead.",
+            "modified-iterating-set",
+            "Emitted when items are added or removed to a set being iterated through. "
+            "Doing so raises a RuntimeError.",
+        ),
+    }
+
     options = ()

-    def _modified_iterating_check_on_node_and_children(self, body_node:
-        nodes.NodeNG, iter_obj: nodes.NodeNG) ->None:
+    @utils.only_required_for_messages(
+        "modified-iterating-list", "modified-iterating-dict", "modified-iterating-set"
+    )
+    def visit_for(self, node: nodes.For) -> None:
+        iter_obj = node.iter
+        for body_node in node.body:
+            self._modified_iterating_check_on_node_and_children(body_node, iter_obj)
+
+    def _modified_iterating_check_on_node_and_children(
+        self, body_node: nodes.NodeNG, iter_obj: nodes.NodeNG
+    ) -> None:
         """See if node or any of its children raises modified iterating messages."""
-        pass
+        self._modified_iterating_check(body_node, iter_obj)
+        for child in body_node.get_children():
+            self._modified_iterating_check_on_node_and_children(child, iter_obj)
+
+    def _modified_iterating_check(
+        self, node: nodes.NodeNG, iter_obj: nodes.NodeNG
+    ) -> None:
+        msg_id = None
+        if isinstance(node, nodes.Delete) and any(
+            self._deleted_iteration_target_cond(t, iter_obj) for t in node.targets
+        ):
+            inferred = utils.safe_infer(iter_obj)
+            if isinstance(inferred, nodes.List):
+                msg_id = "modified-iterating-list"
+            elif isinstance(inferred, nodes.Dict):
+                msg_id = "modified-iterating-dict"
+            elif isinstance(inferred, nodes.Set):
+                msg_id = "modified-iterating-set"
+        elif not isinstance(iter_obj, (nodes.Name, nodes.Attribute)):
+            pass
+        elif self._modified_iterating_list_cond(node, iter_obj):
+            msg_id = "modified-iterating-list"
+        elif self._modified_iterating_dict_cond(node, iter_obj):
+            msg_id = "modified-iterating-dict"
+        elif self._modified_iterating_set_cond(node, iter_obj):
+            msg_id = "modified-iterating-set"
+        if msg_id:
+            self.add_message(
+                msg_id,
+                node=node,
+                args=(iter_obj.repr_name(),),
+                confidence=interfaces.INFERENCE,
+            )
+
+    @staticmethod
+    def _is_node_expr_that_calls_attribute_name(node: nodes.NodeNG) -> bool:
+        return (
+            isinstance(node, nodes.Expr)
+            and isinstance(node.value, nodes.Call)
+            and isinstance(node.value.func, nodes.Attribute)
+            and isinstance(node.value.func.expr, nodes.Name)
+        )
+
+    @staticmethod
+    def _common_cond_list_set(
+        node: nodes.Expr,
+        iter_obj: nodes.Name | nodes.Attribute,
+        infer_val: nodes.List | nodes.Set,
+    ) -> bool:
+        iter_obj_name = (
+            iter_obj.attrname
+            if isinstance(iter_obj, nodes.Attribute)
+            else iter_obj.name
+        )
+        return (infer_val == utils.safe_infer(iter_obj)) and (  # type: ignore[no-any-return]
+            node.value.func.expr.name == iter_obj_name
+        )
+
+    @staticmethod
+    def _is_node_assigns_subscript_name(node: nodes.NodeNG) -> bool:
+        return isinstance(node, nodes.Assign) and (
+            isinstance(node.targets[0], nodes.Subscript)
+            and (isinstance(node.targets[0].value, nodes.Name))
+        )
+
+    def _modified_iterating_list_cond(
+        self, node: nodes.NodeNG, iter_obj: nodes.Name | nodes.Attribute
+    ) -> bool:
+        if not self._is_node_expr_that_calls_attribute_name(node):
+            return False
+        infer_val = utils.safe_infer(node.value.func.expr)
+        if not isinstance(infer_val, nodes.List):
+            return False
+        return (
+            self._common_cond_list_set(node, iter_obj, infer_val)
+            and node.value.func.attrname in _LIST_MODIFIER_METHODS
+        )
+
+    def _modified_iterating_dict_cond(
+        self, node: nodes.NodeNG, iter_obj: nodes.Name | nodes.Attribute
+    ) -> bool:
+        if not self._is_node_assigns_subscript_name(node):
+            return False
+        # Do not emit when merely updating the same key being iterated
+        if (
+            isinstance(iter_obj, nodes.Name)
+            and iter_obj.name == node.targets[0].value.name
+            and isinstance(iter_obj.parent.target, nodes.AssignName)
+            and isinstance(node.targets[0].slice, nodes.Name)
+            and iter_obj.parent.target.name == node.targets[0].slice.name
+        ):
+            return False
+        infer_val = utils.safe_infer(node.targets[0].value)
+        if not isinstance(infer_val, nodes.Dict):
+            return False
+        if infer_val != utils.safe_infer(iter_obj):
+            return False
+        if isinstance(iter_obj, nodes.Attribute):
+            iter_obj_name = iter_obj.attrname
+        else:
+            iter_obj_name = iter_obj.name
+        return node.targets[0].value.name == iter_obj_name  # type: ignore[no-any-return]
+
+    def _modified_iterating_set_cond(
+        self, node: nodes.NodeNG, iter_obj: nodes.Name | nodes.Attribute
+    ) -> bool:
+        if not self._is_node_expr_that_calls_attribute_name(node):
+            return False
+        infer_val = utils.safe_infer(node.value.func.expr)
+        if not isinstance(infer_val, nodes.Set):
+            return False
+        return (
+            self._common_cond_list_set(node, iter_obj, infer_val)
+            and node.value.func.attrname in _SET_MODIFIER_METHODS
+        )
+
+    def _deleted_iteration_target_cond(
+        self, node: nodes.DelName, iter_obj: nodes.NodeNG
+    ) -> bool:
+        if not isinstance(node, nodes.DelName):
+            return False
+        if not isinstance(iter_obj.parent, nodes.For):
+            return False
+        if not isinstance(
+            iter_obj.parent.target, (nodes.AssignName, nodes.BaseContainer)
+        ):
+            return False
+        return any(
+            t == node.name
+            for t in utils.find_assigned_names_recursive(iter_obj.parent.target)
+        )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ModifiedIterationChecker(linter))
diff --git a/pylint/checkers/nested_min_max.py b/pylint/checkers/nested_min_max.py
index 043bee27a..c8231fe7d 100644
--- a/pylint/checkers/nested_min_max.py
+++ b/pylint/checkers/nested_min_max.py
@@ -1,17 +1,31 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for use of nested min/max functions."""
+
 from __future__ import annotations
+
 import copy
 from typing import TYPE_CHECKING
+
 from astroid import nodes, objects
 from astroid.const import Context
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages, safe_infer
 from pylint.constants import PY39_PLUS
 from pylint.interfaces import INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-DICT_TYPES = (objects.DictValues, objects.DictKeys, objects.DictItems,
-    nodes.node_classes.Dict)
+
+DICT_TYPES = (
+    objects.DictValues,
+    objects.DictKeys,
+    objects.DictItems,
+    nodes.node_classes.Dict,
+)


 class NestedMinMaxChecker(BaseChecker):
@@ -20,16 +34,135 @@ class NestedMinMaxChecker(BaseChecker):
     This behaviour is intended as it would slow down the checker to check
     for nested call with minimal benefits.
     """
-    FUNC_NAMES = 'builtins.min', 'builtins.max'
-    name = 'nested_min_max'
-    msgs = {'W3301': (
-        "Do not use nested call of '%s'; it's possible to do '%s' instead",
-        'nested-min-max',
-        'Nested calls ``min(1, min(2, 3))`` can be rewritten as ``min(1, 2, 3)``.'
-        )}
-
-    def _is_splattable_expression(self, arg: nodes.NodeNG) ->bool:
+
+    FUNC_NAMES = ("builtins.min", "builtins.max")
+
+    name = "nested_min_max"
+    msgs = {
+        "W3301": (
+            "Do not use nested call of '%s'; it's possible to do '%s' instead",
+            "nested-min-max",
+            "Nested calls ``min(1, min(2, 3))`` can be rewritten as ``min(1, 2, 3)``.",
+        )
+    }
+
+    @classmethod
+    def is_min_max_call(cls, node: nodes.NodeNG) -> bool:
+        if not isinstance(node, nodes.Call):
+            return False
+
+        inferred = safe_infer(node.func)
+        return (
+            isinstance(inferred, nodes.FunctionDef)
+            and inferred.qname() in cls.FUNC_NAMES
+        )
+
+    @classmethod
+    def get_redundant_calls(cls, node: nodes.Call) -> list[nodes.Call]:
+        return [
+            arg
+            for arg in node.args
+            if (
+                cls.is_min_max_call(arg)
+                and arg.func.name == node.func.name
+                # Nesting is useful for finding the maximum in a matrix.
+                # Allow: max(max([[1, 2, 3], [4, 5, 6]]))
+                # Meaning, redundant call only if parent max call has more than 1 arg.
+                and len(arg.parent.args) > 1
+            )
+        ]
+
+    @only_required_for_messages("nested-min-max")
+    def visit_call(self, node: nodes.Call) -> None:
+        if not self.is_min_max_call(node):
+            return
+
+        redundant_calls = self.get_redundant_calls(node)
+        if not redundant_calls:
+            return
+
+        fixed_node = copy.copy(node)
+        while len(redundant_calls) > 0:
+            for i, arg in enumerate(fixed_node.args):
+                # Exclude any calls with generator expressions as there is no
+                # clear better suggestion for them.
+                if isinstance(arg, nodes.Call) and any(
+                    isinstance(a, nodes.GeneratorExp) for a in arg.args
+                ):
+                    return
+
+                if arg in redundant_calls:
+                    fixed_node.args = (
+                        fixed_node.args[:i] + arg.args + fixed_node.args[i + 1 :]
+                    )
+                    break
+
+            redundant_calls = self.get_redundant_calls(fixed_node)
+
+        for idx, arg in enumerate(fixed_node.args):
+            if not isinstance(arg, nodes.Const):
+                if self._is_splattable_expression(arg):
+                    splat_node = nodes.Starred(
+                        ctx=Context.Load,
+                        lineno=arg.lineno,
+                        col_offset=0,
+                        parent=nodes.NodeNG(
+                            lineno=None,
+                            col_offset=None,
+                            end_lineno=None,
+                            end_col_offset=None,
+                            parent=None,
+                        ),
+                        end_lineno=0,
+                        end_col_offset=0,
+                    )
+                    splat_node.value = arg
+                    fixed_node.args = (
+                        fixed_node.args[:idx]
+                        + [splat_node]
+                        + fixed_node.args[idx + 1 : idx]
+                    )
+
+        self.add_message(
+            "nested-min-max",
+            node=node,
+            args=(node.func.name, fixed_node.as_string()),
+            confidence=INFERENCE,
+        )
+
+    def _is_splattable_expression(self, arg: nodes.NodeNG) -> bool:
         """Returns true if expression under min/max could be converted to splat
         expression.
         """
-        pass
+        # Support sequence addition (operator __add__)
+        if isinstance(arg, nodes.BinOp) and arg.op == "+":
+            return self._is_splattable_expression(
+                arg.left
+            ) and self._is_splattable_expression(arg.right)
+        # Support dict merge (operator __or__ in Python 3.9)
+        if isinstance(arg, nodes.BinOp) and arg.op == "|" and PY39_PLUS:
+            return self._is_splattable_expression(
+                arg.left
+            ) and self._is_splattable_expression(arg.right)
+
+        inferred = safe_infer(arg)
+        if inferred and inferred.pytype() in {"builtins.list", "builtins.tuple"}:
+            return True
+        if isinstance(
+            inferred or arg,
+            (
+                nodes.List,
+                nodes.Tuple,
+                nodes.Set,
+                nodes.ListComp,
+                nodes.DictComp,
+                *DICT_TYPES,
+            ),
+        ):
+            return True
+
+        return False
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(NestedMinMaxChecker(linter))
diff --git a/pylint/checkers/newstyle.py b/pylint/checkers/newstyle.py
index c0588ddc5..0c2c559fe 100644
--- a/pylint/checkers/newstyle.py
+++ b/pylint/checkers/newstyle.py
@@ -1,17 +1,35 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for new / old style related problems."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 import astroid
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import has_known_bases, node_frame_class, only_required_for_messages
+from pylint.checkers.utils import (
+    has_known_bases,
+    node_frame_class,
+    only_required_for_messages,
+)
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-MSGS: dict[str, MessageDefinitionTuple] = {'E1003': (
-    'Bad first argument %r given to super()', 'bad-super-call',
-    'Used when another argument than the current class is given as first argument of the super builtin.'
-    )}
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "E1003": (
+        "Bad first argument %r given to super()",
+        "bad-super-call",
+        "Used when another argument than the current class is given as "
+        "first argument of the super builtin.",
+    )
+}


 class NewStyleConflictChecker(BaseChecker):
@@ -21,12 +39,91 @@ class NewStyleConflictChecker(BaseChecker):
     * use of property, __slots__, super
     * "super" usage
     """
-    name = 'newstyle'
+
+    # configuration section name
+    name = "newstyle"
+    # messages
     msgs = MSGS
+    # configuration options
     options = ()

-    @only_required_for_messages('bad-super-call')
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+    @only_required_for_messages("bad-super-call")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Check use of super."""
-        pass
+        # ignore actual functions or method within a new style class
+        if not node.is_method():
+            return
+        klass = node.parent.frame()
+        for stmt in node.nodes_of_class(nodes.Call):
+            if node_frame_class(stmt) != node_frame_class(node):
+                # Don't look down in other scopes.
+                continue
+
+            expr = stmt.func
+            if not isinstance(expr, nodes.Attribute):
+                continue
+
+            call = expr.expr
+            # skip the test if using super
+            if not (
+                isinstance(call, nodes.Call)
+                and isinstance(call.func, nodes.Name)
+                and call.func.name == "super"
+            ):
+                continue
+
+            # super should not be used on an old style class
+            if klass.newstyle or not has_known_bases(klass):
+                # super first arg should not be the class
+                if not call.args:
+                    continue
+
+                # calling super(type(self), self) can lead to recursion loop
+                # in derived classes
+                arg0 = call.args[0]
+                if (
+                    isinstance(arg0, nodes.Call)
+                    and isinstance(arg0.func, nodes.Name)
+                    and arg0.func.name == "type"
+                ):
+                    self.add_message("bad-super-call", node=call, args=("type",))
+                    continue
+
+                # calling super(self.__class__, self) can lead to recursion loop
+                # in derived classes
+                if (
+                    len(call.args) >= 2
+                    and isinstance(call.args[1], nodes.Name)
+                    and call.args[1].name == "self"
+                    and isinstance(arg0, nodes.Attribute)
+                    and arg0.attrname == "__class__"
+                ):
+                    self.add_message(
+                        "bad-super-call", node=call, args=("self.__class__",)
+                    )
+                    continue
+
+                try:
+                    supcls = call.args and next(call.args[0].infer(), None)
+                except astroid.InferenceError:
+                    continue
+
+                # If the supcls is in the ancestors of klass super can be used to skip
+                # a step in the mro() and get a method from a higher parent
+                if klass is not supcls and all(i != supcls for i in klass.ancestors()):
+                    name = None
+                    # if supcls is not Uninferable, then supcls was inferred
+                    # and use its name. Otherwise, try to look
+                    # for call.args[0].name
+                    if supcls:
+                        name = supcls.name
+                    elif call.args and hasattr(call.args[0], "name"):
+                        name = call.args[0].name
+                    if name:
+                        self.add_message("bad-super-call", node=call, args=(name,))
+
     visit_asyncfunctiondef = visit_functiondef
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(NewStyleConflictChecker(linter))
diff --git a/pylint/checkers/non_ascii_names.py b/pylint/checkers/non_ascii_names.py
index 3105ba332..693d8529f 100644
--- a/pylint/checkers/non_ascii_names.py
+++ b/pylint/checkers/non_ascii_names.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """All alphanumeric unicode character are allowed in Python but due
 to similarities in how they look they can be confused.

@@ -5,12 +9,22 @@ See: https://peps.python.org/pep-0672/#confusing-features

 The following checkers are intended to make users are aware of these issues.
 """
+
 from __future__ import annotations
+
 from astroid import nodes
+
 from pylint import constants, interfaces, lint
 from pylint.checkers import base_checker, utils
-NON_ASCII_HELP = """Used when the name contains at least one non-ASCII unicode character. See https://peps.python.org/pep-0672/#confusing-features for a background why this could be bad. 
-If your programming guideline defines that you are programming in English, then there should be no need for non ASCII characters in Python Names. If not you can simply disable this check."""
+
+NON_ASCII_HELP = (
+    "Used when the name contains at least one non-ASCII unicode character. "
+    "See https://peps.python.org/pep-0672/#confusing-features"
+    " for a background why this could be bad. \n"
+    "If your programming guideline defines that you are programming in "
+    "English, then there should be no need for non ASCII characters in "
+    "Python Names. If not you can simply disable this check."
+)


 class NonAsciiNameChecker(base_checker.BaseChecker):
@@ -19,30 +33,142 @@ class NonAsciiNameChecker(base_checker.BaseChecker):
     Note: This check only checks Names, so it ignores the content of
           docstrings and comments!
     """
-    msgs = {'C2401': (
-        '%s name "%s" contains a non-ASCII character, consider renaming it.',
-        'non-ascii-name', NON_ASCII_HELP, {'old_names': [('C0144',
-        'old-non-ascii-name')]}), 'W2402': (
-        '%s name "%s" contains a non-ASCII character.',
-        'non-ascii-file-name',
-        "Under python 3.5, PEP 3131 allows non-ascii identifiers, but not non-ascii file names.Since Python 3.5, even though Python supports UTF-8 files, some editors or tools don't."
-        ), 'C2403': (
-        '%s name "%s" contains a non-ASCII character, use an ASCII-only alias for import.'
-        , 'non-ascii-module-import', NON_ASCII_HELP)}
-    name = 'NonASCII-Checker'
-
-    def _check_name(self, node_type: str, name: (str | None), node: nodes.
-        NodeNG) ->None:
+
+    msgs = {
+        "C2401": (
+            '%s name "%s" contains a non-ASCII character, consider renaming it.',
+            "non-ascii-name",
+            NON_ASCII_HELP,
+            {"old_names": [("C0144", "old-non-ascii-name")]},
+        ),
+        # First %s will always be "file"
+        "W2402": (
+            '%s name "%s" contains a non-ASCII character.',
+            "non-ascii-file-name",
+            (
+                # Some = PyCharm at the time of writing didn't display the non_ascii_name_loł
+                # files. That's also why this is a warning and not only a convention!
+                "Under python 3.5, PEP 3131 allows non-ascii identifiers, but not non-ascii file names."
+                "Since Python 3.5, even though Python supports UTF-8 files, some editors or tools "
+                "don't."
+            ),
+        ),
+        # First %s will always be "module"
+        "C2403": (
+            '%s name "%s" contains a non-ASCII character, use an ASCII-only alias for import.',
+            "non-ascii-module-import",
+            NON_ASCII_HELP,
+        ),
+    }
+
+    name = "NonASCII-Checker"
+
+    def _check_name(self, node_type: str, name: str | None, node: nodes.NodeNG) -> None:
         """Check whether a name is using non-ASCII characters."""
-        pass
+        if name is None:
+            # For some nodes i.e. *kwargs from a dict, the name will be empty
+            return
+
+        if not str(name).isascii():
+            type_label = constants.HUMAN_READABLE_TYPES[node_type]
+            args = (type_label.capitalize(), name)
+
+            msg = "non-ascii-name"
+
+            # Some node types have customized messages
+            if node_type == "file":
+                msg = "non-ascii-file-name"
+            elif node_type == "module":
+                msg = "non-ascii-module-import"
+
+            self.add_message(msg, node=node, args=args, confidence=interfaces.HIGH)
+
+    @utils.only_required_for_messages("non-ascii-name", "non-ascii-file-name")
+    def visit_module(self, node: nodes.Module) -> None:
+        self._check_name("file", node.name.split(".")[-1], node)
+
+    @utils.only_required_for_messages("non-ascii-name")
+    def visit_functiondef(
+        self, node: nodes.FunctionDef | nodes.AsyncFunctionDef
+    ) -> None:
+        self._check_name("function", node.name, node)
+
+        # Check argument names
+        arguments = node.args
+
+        # Check position only arguments
+        if arguments.posonlyargs:
+            for pos_only_arg in arguments.posonlyargs:
+                self._check_name("argument", pos_only_arg.name, pos_only_arg)
+
+        # Check "normal" arguments
+        if arguments.args:
+            for arg in arguments.args:
+                self._check_name("argument", arg.name, arg)
+
+        # Check key word only arguments
+        if arguments.kwonlyargs:
+            for kwarg in arguments.kwonlyargs:
+                self._check_name("argument", kwarg.name, kwarg)
+
     visit_asyncfunctiondef = visit_functiondef

-    @utils.only_required_for_messages('non-ascii-name')
-    def visit_assignname(self, node: nodes.AssignName) ->None:
+    @utils.only_required_for_messages("non-ascii-name")
+    def visit_global(self, node: nodes.Global) -> None:
+        for name in node.names:
+            self._check_name("const", name, node)
+
+    @utils.only_required_for_messages("non-ascii-name")
+    def visit_assignname(self, node: nodes.AssignName) -> None:
         """Check module level assigned names."""
-        pass
+        # The NameChecker from which this Checker originates knows a lot of different
+        # versions of variables, i.e. constants, inline variables etc.
+        # To simplify we use only `variable` here, as we don't need to apply different
+        # rules to different types of variables.
+        frame = node.frame()
+
+        if isinstance(frame, nodes.FunctionDef):
+            if node.parent in frame.body:
+                # Only perform the check if the assignment was done in within the body
+                # of the function (and not the function parameter definition
+                # (will be handled in visit_functiondef)
+                # or within a decorator (handled in visit_call)
+                self._check_name("variable", node.name, node)
+        elif isinstance(frame, nodes.ClassDef):
+            self._check_name("attr", node.name, node)
+        else:
+            # Possibilities here:
+            # - isinstance(node.assign_type(), nodes.Comprehension) == inlinevar
+            # - isinstance(frame, nodes.Module) == variable (constant?)
+            # - some other kind of assignment missed but still most likely a variable
+            self._check_name("variable", node.name, node)

-    @utils.only_required_for_messages('non-ascii-name')
-    def visit_call(self, node: nodes.Call) ->None:
+    @utils.only_required_for_messages("non-ascii-name")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        self._check_name("class", node.name, node)
+        for attr, anodes in node.instance_attrs.items():
+            if not any(node.instance_attr_ancestors(attr)):
+                self._check_name("attr", attr, anodes[0])
+
+    def _check_module_import(self, node: nodes.ImportFrom | nodes.Import) -> None:
+        for module_name, alias in node.names:
+            name = alias or module_name
+            self._check_name("module", name, node)
+
+    @utils.only_required_for_messages("non-ascii-name", "non-ascii-module-import")
+    def visit_import(self, node: nodes.Import) -> None:
+        self._check_module_import(node)
+
+    @utils.only_required_for_messages("non-ascii-name", "non-ascii-module-import")
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
+        self._check_module_import(node)
+
+    @utils.only_required_for_messages("non-ascii-name")
+    def visit_call(self, node: nodes.Call) -> None:
         """Check if the used keyword args are correct."""
-        pass
+        for keyword in node.keywords:
+            self._check_name("argument", keyword.arg, keyword)
+
+
+def register(linter: lint.PyLinter) -> None:
+    linter.register_checker(NonAsciiNameChecker(linter))
diff --git a/pylint/checkers/raw_metrics.py b/pylint/checkers/raw_metrics.py
index 205644431..ef4535345 100644
--- a/pylint/checkers/raw_metrics.py
+++ b/pylint/checkers/raw_metrics.py
@@ -1,17 +1,43 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import tokenize
 from typing import TYPE_CHECKING, Any, Literal, cast
+
 from pylint.checkers import BaseTokenChecker
 from pylint.reporters.ureports.nodes import Paragraph, Section, Table, Text
 from pylint.utils import LinterStats, diff_string
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


-def report_raw_stats(sect: Section, stats: LinterStats, old_stats: (
-    LinterStats | None)) ->None:
+def report_raw_stats(
+    sect: Section,
+    stats: LinterStats,
+    old_stats: LinterStats | None,
+) -> None:
     """Calculate percentage of code / doc / comment / empty."""
-    pass
+    total_lines = stats.code_type_count["total"]
+    sect.insert(0, Paragraph([Text(f"{total_lines} lines have been analyzed\n")]))
+    lines = ["type", "number", "%", "previous", "difference"]
+    for node_type in ("code", "docstring", "comment", "empty"):
+        node_type = cast(Literal["code", "docstring", "comment", "empty"], node_type)
+        total = stats.code_type_count[node_type]
+        percent = float(total * 100) / total_lines if total_lines else None
+        old = old_stats.code_type_count[node_type] if old_stats else None
+        diff_str = diff_string(old, total) if old else None
+        lines += [
+            node_type,
+            str(total),
+            f"{percent:.2f}" if percent is not None else "NC",
+            str(old) if old else "NC",
+            diff_str if diff_str else "NC",
+        ]
+    sect.append(Table(children=lines, cols=5, rheaders=1))


 class RawMetricsChecker(BaseTokenChecker):
@@ -24,24 +50,61 @@ class RawMetricsChecker(BaseTokenChecker):
     * total number of comments lines
     * total number of empty lines
     """
-    name = 'metrics'
+
+    # configuration section name
+    name = "metrics"
+    # configuration options
     options = ()
+    # messages
     msgs: Any = {}
-    reports = ('RP0701', 'Raw metrics', report_raw_stats),
+    # reports
+    reports = (("RP0701", "Raw metrics", report_raw_stats),)

-    def open(self) ->None:
+    def open(self) -> None:
         """Init statistics."""
-        pass
+        self.linter.stats.reset_code_count()

-    def process_tokens(self, tokens: list[tokenize.TokenInfo]) ->None:
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
         """Update stats."""
-        pass
+        i = 0
+        tokens = list(tokens)
+        while i < len(tokens):
+            i, lines_number, line_type = get_type(tokens, i)
+            self.linter.stats.code_type_count["total"] += lines_number
+            self.linter.stats.code_type_count[line_type] += lines_number


-JUNK = tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER
+JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)


-def get_type(tokens: list[tokenize.TokenInfo], start_index: int) ->tuple[
-    int, int, Literal['code', 'docstring', 'comment', 'empty']]:
+def get_type(
+    tokens: list[tokenize.TokenInfo], start_index: int
+) -> tuple[int, int, Literal["code", "docstring", "comment", "empty"]]:
     """Return the line type : docstring, comment, code, empty."""
-    pass
+    i = start_index
+    start = tokens[i][2]
+    pos = start
+    line_type = None
+    while i < len(tokens) and tokens[i][2][0] == start[0]:
+        tok_type = tokens[i][0]
+        pos = tokens[i][3]
+        if line_type is None:
+            if tok_type == tokenize.STRING:
+                line_type = "docstring"
+            elif tok_type == tokenize.COMMENT:
+                line_type = "comment"
+            elif tok_type in JUNK:
+                pass
+            else:
+                line_type = "code"
+        i += 1
+    if line_type is None:
+        line_type = "empty"
+    elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
+        i += 1
+    # Mypy fails to infer the literal of line_type
+    return i, pos[0] - start[0] + 1, line_type  # type: ignore[return-value]
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(RawMetricsChecker(linter))
diff --git a/pylint/checkers/refactoring/implicit_booleaness_checker.py b/pylint/checkers/refactoring/implicit_booleaness_checker.py
index b7c28e71f..5818c2f4a 100644
--- a/pylint/checkers/refactoring/implicit_booleaness_checker.py
+++ b/pylint/checkers/refactoring/implicit_booleaness_checker.py
@@ -1,12 +1,27 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import itertools
+
 import astroid
 from astroid import bases, nodes, util
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.interfaces import HIGH, INFERENCE


+def _is_constant_zero(node: str | nodes.NodeNG) -> bool:
+    # We have to check that node.value is not False because node.value == 0 is True
+    # when node.value is False
+    return (
+        isinstance(node, astroid.Const) and node.value == 0 and node.value is not False
+    )
+
+
 class ImplicitBooleanessChecker(checkers.BaseChecker):
     """Checks for incorrect usage of comparisons or len() inside conditions.

@@ -45,52 +60,286 @@ class ImplicitBooleanessChecker(checkers.BaseChecker):
     * comparison such as variable == empty_literal:
     * comparison such as variable != empty_literal:
     """
-    name = 'refactoring'
-    msgs = {'C1802': (
-        'Do not use `len(SEQUENCE)` without comparison to determine if a sequence is empty'
-        , 'use-implicit-booleaness-not-len',
-        "Empty sequences are considered false in a boolean context. You can either remove the call to 'len' (``if not x``) or compare the length against a scalar (``if len(x) > 1``)."
-        , {'old_names': [('C1801', 'len-as-condition')]}), 'C1803': (
-        '"%s" can be simplified to "%s", if it is strictly a sequence, as an empty %s is falsey'
-        , 'use-implicit-booleaness-not-comparison',
-        'Empty sequences are considered false in a boolean context. Following this check blindly in weakly typed code base can create hard to debug issues. If the value can be something else that is falsey but not a sequence (for example ``None``, an empty string, or ``0``) the code will not be equivalent.'
-        ), 'C1804': (
-        '"%s" can be simplified to "%s", if it is striclty a string, as an empty string is falsey'
-        , 'use-implicit-booleaness-not-comparison-to-string',
-        'Empty string are considered false in a boolean context. Following this check blindly in weakly typed code base can create hard to debug issues. If the value can be something else that is falsey but not a string (for example ``None``, an empty sequence, or ``0``) the code will not be equivalent.'
-        , {'default_enabled': False, 'old_names': [('C1901',
-        'compare-to-empty-string')]}), 'C1805': (
-        '"%s" can be simplified to "%s", if it is strictly an int, as 0 is falsey'
-        , 'use-implicit-booleaness-not-comparison-to-zero',
-        '0 is considered false in a boolean context. Following this check blindly in weakly typed code base can create hard to debug issues. If the value can be something else that is falsey but not an int (for example ``None``, an empty string, or an empty sequence) the code will not be equivalent.'
-        , {'default_enabled': False, 'old_names': [('C2001',
-        'compare-to-zero')]})}
+
+    name = "refactoring"
+    msgs = {
+        "C1802": (
+            "Do not use `len(SEQUENCE)` without comparison to determine if a sequence is empty",
+            "use-implicit-booleaness-not-len",
+            "Empty sequences are considered false in a boolean context. You can either"
+            " remove the call to 'len' (``if not x``) or compare the length against a"
+            " scalar (``if len(x) > 1``).",
+            {"old_names": [("C1801", "len-as-condition")]},
+        ),
+        "C1803": (
+            '"%s" can be simplified to "%s", if it is strictly a sequence, as an empty %s is falsey',
+            "use-implicit-booleaness-not-comparison",
+            "Empty sequences are considered false in a boolean context. Following this"
+            " check blindly in weakly typed code base can create hard to debug issues."
+            " If the value can be something else that is falsey but not a sequence (for"
+            " example ``None``, an empty string, or ``0``) the code will not be "
+            "equivalent.",
+        ),
+        "C1804": (
+            '"%s" can be simplified to "%s", if it is striclty a string, as an empty string is falsey',
+            "use-implicit-booleaness-not-comparison-to-string",
+            "Empty string are considered false in a boolean context. Following this"
+            " check blindly in weakly typed code base can create hard to debug issues."
+            " If the value can be something else that is falsey but not a string (for"
+            " example ``None``, an empty sequence, or ``0``) the code will not be "
+            "equivalent.",
+            {
+                "default_enabled": False,
+                "old_names": [("C1901", "compare-to-empty-string")],
+            },
+        ),
+        "C1805": (
+            '"%s" can be simplified to "%s", if it is strictly an int, as 0 is falsey',
+            "use-implicit-booleaness-not-comparison-to-zero",
+            "0 is considered false in a boolean context. Following this"
+            " check blindly in weakly typed code base can create hard to debug issues."
+            " If the value can be something else that is falsey but not an int (for"
+            " example ``None``, an empty string, or an empty sequence) the code will not be "
+            "equivalent.",
+            {"default_enabled": False, "old_names": [("C2001", "compare-to-zero")]},
+        ),
+    }
+
     options = ()
-    _operators = {'!=', '==', 'is not', 'is'}
+    _operators = {"!=", "==", "is not", "is"}
+
+    @utils.only_required_for_messages("use-implicit-booleaness-not-len")
+    def visit_call(self, node: nodes.Call) -> None:
+        # a len(S) call is used inside a test condition
+        # could be if, while, assert or if expression statement
+        # e.g. `if len(S):`
+        if not utils.is_call_of_name(node, "len"):
+            return
+        # the len() call could also be nested together with other
+        # boolean operations, e.g. `if z or len(x):`
+        parent = node.parent
+        while isinstance(parent, nodes.BoolOp):
+            parent = parent.parent
+        # we're finally out of any nested boolean operations so check if
+        # this len() call is part of a test condition
+        if not utils.is_test_condition(node, parent):
+            return
+        len_arg = node.args[0]
+        generator_or_comprehension = (
+            nodes.ListComp,
+            nodes.SetComp,
+            nodes.DictComp,
+            nodes.GeneratorExp,
+        )
+        if isinstance(len_arg, generator_or_comprehension):
+            # The node is a generator or comprehension as in len([x for x in ...])
+            self.add_message(
+                "use-implicit-booleaness-not-len",
+                node=node,
+                confidence=HIGH,
+            )
+            return
+        try:
+            instance = next(len_arg.infer())
+        except astroid.InferenceError:
+            # Probably undefined-variable, abort check
+            return
+        mother_classes = self.base_names_of_instance(instance)
+        affected_by_pep8 = any(
+            t in mother_classes for t in ("str", "tuple", "list", "set")
+        )
+        if "range" in mother_classes or (
+            affected_by_pep8 and not self.instance_has_bool(instance)
+        ):
+            self.add_message(
+                "use-implicit-booleaness-not-len",
+                node=node,
+                confidence=INFERENCE,
+            )

-    @utils.only_required_for_messages('use-implicit-booleaness-not-len')
-    def visit_unaryop(self, node: nodes.UnaryOp) ->None:
+    @staticmethod
+    def instance_has_bool(class_def: nodes.ClassDef) -> bool:
+        try:
+            class_def.getattr("__bool__")
+            return True
+        except astroid.AttributeInferenceError:
+            ...
+        return False
+
+    @utils.only_required_for_messages("use-implicit-booleaness-not-len")
+    def visit_unaryop(self, node: nodes.UnaryOp) -> None:
         """`not len(S)` must become `not S` regardless if the parent block is a test
         condition or something else (boolean expression) e.g. `if not len(S):`.
         """
-        pass
+        if (
+            isinstance(node, nodes.UnaryOp)
+            and node.op == "not"
+            and utils.is_call_of_name(node.operand, "len")
+        ):
+            self.add_message(
+                "use-implicit-booleaness-not-len", node=node, confidence=HIGH
+            )
+
+    @utils.only_required_for_messages(
+        "use-implicit-booleaness-not-comparison",
+        "use-implicit-booleaness-not-comparison-to-string",
+        "use-implicit-booleaness-not-comparison-to-zero",
+    )
+    def visit_compare(self, node: nodes.Compare) -> None:
+        if self.linter.is_message_enabled("use-implicit-booleaness-not-comparison"):
+            self._check_use_implicit_booleaness_not_comparison(node)
+        if self.linter.is_message_enabled(
+            "use-implicit-booleaness-not-comparison-to-zero"
+        ) or self.linter.is_message_enabled(
+            "use-implicit-booleaness-not-comparison-to-str"
+        ):
+            self._check_compare_to_str_or_zero(node)
+
+    def _check_compare_to_str_or_zero(self, node: nodes.Compare) -> None:
+        # note: astroid.Compare has the left most operand in node.left
+        # while the rest are a list of tuples in node.ops
+        # the format of the tuple is ('compare operator sign', node)
+        # here we squash everything into `ops` to make it easier for processing later
+        ops: list[tuple[str, nodes.NodeNG]] = [("", node.left), *node.ops]
+        iter_ops = iter(ops)
+        all_ops = list(itertools.chain(*iter_ops))
+        for ops_idx in range(len(all_ops) - 2):
+            op_2 = all_ops[ops_idx + 1]
+            if op_2 not in self._operators:
+                continue
+            op_1 = all_ops[ops_idx]
+            op_3 = all_ops[ops_idx + 2]
+            error_detected = False
+            if self.linter.is_message_enabled(
+                "use-implicit-booleaness-not-comparison-to-zero"
+            ):
+                # 0 ?? X
+                if _is_constant_zero(op_1):
+                    error_detected = True
+                    op = op_3
+                # X ?? 0
+                elif _is_constant_zero(op_3):
+                    error_detected = True
+                    op = op_1
+                if error_detected:
+                    original = f"{op_1.as_string()} {op_2} {op_3.as_string()}"
+                    suggestion = (
+                        op.as_string()
+                        if op_2 in {"!=", "is not"}
+                        else f"not {op.as_string()}"
+                    )
+                    self.add_message(
+                        "use-implicit-booleaness-not-comparison-to-zero",
+                        args=(original, suggestion),
+                        node=node,
+                        confidence=HIGH,
+                    )
+                    error_detected = False
+            if self.linter.is_message_enabled(
+                "use-implicit-booleaness-not-comparison-to-str"
+            ):
+                node_name = ""
+                # x ?? ""
+                if utils.is_empty_str_literal(op_1):
+                    error_detected = True
+                    node_name = op_3.as_string()
+                # '' ?? X
+                elif utils.is_empty_str_literal(op_3):
+                    error_detected = True
+                    node_name = op_1.as_string()
+                if error_detected:
+                    suggestion = (
+                        f"not {node_name}" if op_2 in {"==", "is"} else node_name
+                    )
+                    self.add_message(
+                        "use-implicit-booleaness-not-comparison-to-string",
+                        args=(node.as_string(), suggestion),
+                        node=node,
+                        confidence=HIGH,
+                    )

-    def _check_use_implicit_booleaness_not_comparison(self, node: nodes.Compare
-        ) ->None:
+    def _check_use_implicit_booleaness_not_comparison(
+        self, node: nodes.Compare
+    ) -> None:
         """Check for left side and right side of the node for empty literals."""
-        pass
+        is_left_empty_literal = utils.is_base_container(
+            node.left
+        ) or utils.is_empty_dict_literal(node.left)
+
+        # Check both left-hand side and right-hand side for literals
+        for operator, comparator in node.ops:
+            is_right_empty_literal = utils.is_base_container(
+                comparator
+            ) or utils.is_empty_dict_literal(comparator)
+            # Using Exclusive OR (XOR) to compare between two side.
+            # If two sides are both literal, it should be different error.
+            if is_right_empty_literal ^ is_left_empty_literal:
+                # set target_node to opposite side of literal
+                target_node = node.left if is_right_empty_literal else comparator
+                literal_node = comparator if is_right_empty_literal else node.left
+                # Infer node to check
+                target_instance = utils.safe_infer(target_node)
+                if target_instance is None:
+                    continue
+                mother_classes = self.base_names_of_instance(target_instance)
+                is_base_comprehension_type = any(
+                    t in mother_classes for t in ("tuple", "list", "dict", "set")
+                )
+
+                # Only time we bypass check is when target_node is not inherited by
+                # collection literals and have its own __bool__ implementation.
+                if not is_base_comprehension_type and self.instance_has_bool(
+                    target_instance
+                ):
+                    continue
+
+                # No need to check for operator when visiting compare node
+                if operator in {"==", "!=", ">=", ">", "<=", "<"}:
+                    self.add_message(
+                        "use-implicit-booleaness-not-comparison",
+                        args=self._implicit_booleaness_message_args(
+                            literal_node, operator, target_node
+                        ),
+                        node=node,
+                        confidence=HIGH,
+                    )
+
+    def _get_node_description(self, node: nodes.NodeNG) -> str:
+        return {
+            nodes.List: "list",
+            nodes.Tuple: "tuple",
+            nodes.Dict: "dict",
+            nodes.Const: "str",
+        }.get(type(node), "iterable")

-    def _implicit_booleaness_message_args(self, literal_node: nodes.NodeNG,
-        operator: str, target_node: nodes.NodeNG) ->tuple[str, str, str]:
+    def _implicit_booleaness_message_args(
+        self, literal_node: nodes.NodeNG, operator: str, target_node: nodes.NodeNG
+    ) -> tuple[str, str, str]:
         """Helper to get the right message for "use-implicit-booleaness-not-comparison"."""
-        pass
+        description = self._get_node_description(literal_node)
+        collection_literal = {
+            "list": "[]",
+            "tuple": "()",
+            "dict": "{}",
+        }.get(description, "iterable")
+        instance_name = "x"
+        if isinstance(target_node, nodes.Call) and target_node.func:
+            instance_name = f"{target_node.func.as_string()}(...)"
+        elif isinstance(target_node, (nodes.Attribute, nodes.Name)):
+            instance_name = target_node.as_string()
+        original_comparison = f"{instance_name} {operator} {collection_literal}"
+        suggestion = f"{instance_name}" if operator == "!=" else f"not {instance_name}"
+        return original_comparison, suggestion, description

     @staticmethod
-    def base_names_of_instance(node: (util.UninferableBase | bases.Instance)
-        ) ->list[str]:
+    def base_names_of_instance(
+        node: util.UninferableBase | bases.Instance,
+    ) -> list[str]:
         """Return all names inherited by a class instance or those returned by a
         function.

         The inherited names include 'object'.
         """
-        pass
+        if isinstance(node, bases.Instance):
+            return [node.name] + [x.name for x in node.ancestors()]
+        return []
diff --git a/pylint/checkers/refactoring/not_checker.py b/pylint/checkers/refactoring/not_checker.py
index dd1e2cd1d..c46b477b5 100644
--- a/pylint/checkers/refactoring/not_checker.py
+++ b/pylint/checkers/refactoring/not_checker.py
@@ -1,5 +1,10 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 import astroid
 from astroid import nodes
+
 from pylint import checkers
 from pylint.checkers import utils

@@ -10,13 +15,70 @@ class NotChecker(checkers.BaseChecker):
     - "not not" should trigger a warning
     - "not" followed by a comparison should trigger a warning
     """
-    msgs = {'C0117': ('Consider changing "%s" to "%s"',
-        'unnecessary-negation',
-        'Used when a boolean expression contains an unneeded negation, e.g. when two negation operators cancel each other out.'
-        , {'old_names': [('C0113', 'unneeded-not')]})}
-    name = 'refactoring'
-    reverse_op = {'<': '>=', '<=': '>', '>': '<=', '>=': '<', '==': '!=',
-        '!=': '==', 'in': 'not in', 'is': 'is not'}
-    skipped_nodes = nodes.Set,
-    skipped_classnames = [f'builtins.{qname}' for qname in ('set', 'frozenset')
-        ]
+
+    msgs = {
+        "C0117": (
+            'Consider changing "%s" to "%s"',
+            "unnecessary-negation",
+            "Used when a boolean expression contains an unneeded negation, "
+            "e.g. when two negation operators cancel each other out.",
+            {"old_names": [("C0113", "unneeded-not")]},
+        )
+    }
+    name = "refactoring"
+    reverse_op = {
+        "<": ">=",
+        "<=": ">",
+        ">": "<=",
+        ">=": "<",
+        "==": "!=",
+        "!=": "==",
+        "in": "not in",
+        "is": "is not",
+    }
+    # sets are not ordered, so for example "not set(LEFT_VALS) <= set(RIGHT_VALS)" is
+    # not equivalent to "set(LEFT_VALS) > set(RIGHT_VALS)"
+    skipped_nodes = (nodes.Set,)
+    # 'builtins' py3, '__builtin__' py2
+    skipped_classnames = [f"builtins.{qname}" for qname in ("set", "frozenset")]
+
+    @utils.only_required_for_messages("unnecessary-negation")
+    def visit_unaryop(self, node: nodes.UnaryOp) -> None:
+        if node.op != "not":
+            return
+        operand = node.operand
+
+        if isinstance(operand, nodes.UnaryOp) and operand.op == "not":
+            self.add_message(
+                "unnecessary-negation",
+                node=node,
+                args=(node.as_string(), operand.operand.as_string()),
+            )
+        elif isinstance(operand, nodes.Compare):
+            left = operand.left
+            # ignore multiple comparisons
+            if len(operand.ops) > 1:
+                return
+            operator, right = operand.ops[0]
+            if operator not in self.reverse_op:
+                return
+            # Ignore __ne__ as function of __eq__
+            frame = node.frame()
+            if frame.name == "__ne__" and operator == "==":
+                return
+            for _type in (utils.node_type(left), utils.node_type(right)):
+                if not _type:
+                    return
+                if isinstance(_type, self.skipped_nodes):
+                    return
+                if (
+                    isinstance(_type, astroid.Instance)
+                    and _type.qname() in self.skipped_classnames
+                ):
+                    return
+            suggestion = (
+                f"{left.as_string()} {self.reverse_op[operator]} {right.as_string()}"
+            )
+            self.add_message(
+                "unnecessary-negation", node=node, args=(node.as_string(), suggestion)
+            )
diff --git a/pylint/checkers/refactoring/recommendation_checker.py b/pylint/checkers/refactoring/recommendation_checker.py
index 187ef26fc..c5b19e1a5 100644
--- a/pylint/checkers/refactoring/recommendation_checker.py
+++ b/pylint/checkers/refactoring/recommendation_checker.py
@@ -1,64 +1,454 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import astroid
 from astroid import nodes
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.interfaces import HIGH, INFERENCE


 class RecommendationChecker(checkers.BaseChecker):
-    name = 'refactoring'
-    msgs = {'C0200': (
-        'Consider using enumerate instead of iterating with range and len',
-        'consider-using-enumerate',
-        'Emitted when code that iterates with range and len is encountered. Such code can be simplified by using the enumerate builtin.'
-        ), 'C0201': (
-        'Consider iterating the dictionary directly instead of calling .keys()'
-        , 'consider-iterating-dictionary',
-        'Emitted when the keys of a dictionary are iterated through the ``.keys()`` method or when ``.keys()`` is used for a membership check. It is enough to iterate through the dictionary itself, ``for key in dictionary``. For membership checks, ``if key in dictionary`` is faster.'
-        ), 'C0206': ('Consider iterating with .items()',
-        'consider-using-dict-items',
-        'Emitted when iterating over the keys of a dictionary and accessing the value by index lookup. Both the key and value can be accessed by iterating using the .items() method of the dictionary instead.'
-        ), 'C0207': ('Use %s instead', 'use-maxsplit-arg',
-        'Emitted when accessing only the first or last element of str.split(). The first and last element can be accessed by using str.split(sep, maxsplit=1)[0] or str.rsplit(sep, maxsplit=1)[-1] instead.'
-        ), 'C0208': ('Use a sequence type when iterating over values',
-        'use-sequence-for-iteration',
-        'When iterating over values, sequence types (e.g., ``lists``, ``tuples``, ``ranges``) are more efficient than ``sets``.'
-        ), 'C0209': (
-        'Formatting a regular string which could be an f-string',
-        'consider-using-f-string',
-        'Used when we detect a string that is being formatted with format() or % which could potentially be an f-string. The use of f-strings is preferred. Requires Python 3.6 and ``py-version >= 3.6``.'
-        )}
-
-    def _check_use_maxsplit_arg(self, node: nodes.Call) ->None:
+    name = "refactoring"
+    msgs = {
+        "C0200": (
+            "Consider using enumerate instead of iterating with range and len",
+            "consider-using-enumerate",
+            "Emitted when code that iterates with range and len is "
+            "encountered. Such code can be simplified by using the "
+            "enumerate builtin.",
+        ),
+        "C0201": (
+            "Consider iterating the dictionary directly instead of calling .keys()",
+            "consider-iterating-dictionary",
+            "Emitted when the keys of a dictionary are iterated through the ``.keys()`` "
+            "method or when ``.keys()`` is used for a membership check. "
+            "It is enough to iterate through the dictionary itself, "
+            "``for key in dictionary``. For membership checks, "
+            "``if key in dictionary`` is faster.",
+        ),
+        "C0206": (
+            "Consider iterating with .items()",
+            "consider-using-dict-items",
+            "Emitted when iterating over the keys of a dictionary and accessing the "
+            "value by index lookup. "
+            "Both the key and value can be accessed by iterating using the .items() "
+            "method of the dictionary instead.",
+        ),
+        "C0207": (
+            "Use %s instead",
+            "use-maxsplit-arg",
+            "Emitted when accessing only the first or last element of str.split(). "
+            "The first and last element can be accessed by using "
+            "str.split(sep, maxsplit=1)[0] or str.rsplit(sep, maxsplit=1)[-1] "
+            "instead.",
+        ),
+        "C0208": (
+            "Use a sequence type when iterating over values",
+            "use-sequence-for-iteration",
+            "When iterating over values, sequence types (e.g., ``lists``, ``tuples``, ``ranges``) "
+            "are more efficient than ``sets``.",
+        ),
+        "C0209": (
+            "Formatting a regular string which could be an f-string",
+            "consider-using-f-string",
+            "Used when we detect a string that is being formatted with format() or % "
+            "which could potentially be an f-string. The use of f-strings is preferred. "
+            "Requires Python 3.6 and ``py-version >= 3.6``.",
+        ),
+    }
+
+    def open(self) -> None:
+        py_version = self.linter.config.py_version
+        self._py36_plus = py_version >= (3, 6)
+
+    @staticmethod
+    def _is_builtin(node: nodes.NodeNG, function: str) -> bool:
+        inferred = utils.safe_infer(node)
+        if not inferred:
+            return False
+        return utils.is_builtin_object(inferred) and inferred.name == function
+
+    @utils.only_required_for_messages(
+        "consider-iterating-dictionary", "use-maxsplit-arg"
+    )
+    def visit_call(self, node: nodes.Call) -> None:
+        self._check_consider_iterating_dictionary(node)
+        self._check_use_maxsplit_arg(node)
+
+    def _check_consider_iterating_dictionary(self, node: nodes.Call) -> None:
+        if not isinstance(node.func, nodes.Attribute):
+            return
+        if node.func.attrname != "keys":
+            return
+
+        if isinstance(node.parent, nodes.BinOp) and node.parent.op in {"&", "|", "^"}:
+            return
+
+        comp_ancestor = utils.get_node_first_ancestor_of_type(node, nodes.Compare)
+        if (
+            isinstance(node.parent, (nodes.For, nodes.Comprehension))
+            or comp_ancestor
+            and any(
+                op
+                for op, comparator in comp_ancestor.ops
+                if op in {"in", "not in"}
+                and (comparator in node.node_ancestors() or comparator is node)
+            )
+        ):
+            inferred = utils.safe_infer(node.func)
+            if not isinstance(inferred, astroid.BoundMethod) or not isinstance(
+                inferred.bound, nodes.Dict
+            ):
+                return
+            self.add_message(
+                "consider-iterating-dictionary", node=node, confidence=INFERENCE
+            )
+
+    def _check_use_maxsplit_arg(self, node: nodes.Call) -> None:
         """Add message when accessing first or last elements of a str.split() or
         str.rsplit().
         """
-        pass
+        # Check if call is split() or rsplit()
+        if not (
+            isinstance(node.func, nodes.Attribute)
+            and node.func.attrname in {"split", "rsplit"}
+            and isinstance(utils.safe_infer(node.func), astroid.BoundMethod)
+        ):
+            return
+        inferred_expr = utils.safe_infer(node.func.expr)
+        if isinstance(inferred_expr, astroid.Instance) and any(
+            inferred_expr.nodes_of_class(nodes.ClassDef)
+        ):
+            return
+
+        confidence = HIGH
+        try:
+            sep = utils.get_argument_from_call(node, 0, "sep")
+        except utils.NoSuchArgumentError:
+            sep = utils.infer_kwarg_from_call(node, keyword="sep")
+            confidence = INFERENCE
+            if not sep:
+                return

-    def _check_consider_using_enumerate(self, node: nodes.For) ->None:
+        try:
+            # Ignore if maxsplit arg has been set
+            utils.get_argument_from_call(node, 1, "maxsplit")
+            return
+        except utils.NoSuchArgumentError:
+            if utils.infer_kwarg_from_call(node, keyword="maxsplit"):
+                return
+
+        if isinstance(node.parent, nodes.Subscript):
+            try:
+                subscript_value = utils.get_subscript_const_value(node.parent).value
+            except utils.InferredTypeError:
+                return
+
+            # Check for cases where variable (Name) subscripts may be mutated within a loop
+            if isinstance(node.parent.slice, nodes.Name):
+                # Check if loop present within the scope of the node
+                scope = node.scope()
+                for loop_node in scope.nodes_of_class((nodes.For, nodes.While)):
+                    if not loop_node.parent_of(node):
+                        continue
+
+                    # Check if var is mutated within loop (Assign/AugAssign)
+                    for assignment_node in loop_node.nodes_of_class(nodes.AugAssign):
+                        if node.parent.slice.name == assignment_node.target.name:
+                            return
+                    for assignment_node in loop_node.nodes_of_class(nodes.Assign):
+                        if node.parent.slice.name in [
+                            n.name for n in assignment_node.targets
+                        ]:
+                            return
+
+            if subscript_value in (-1, 0):
+                fn_name = node.func.attrname
+                new_fn = "rsplit" if subscript_value == -1 else "split"
+                new_name = (
+                    node.func.as_string().rsplit(fn_name, maxsplit=1)[0]
+                    + new_fn
+                    + f"({sep.as_string()}, maxsplit=1)[{subscript_value}]"
+                )
+                self.add_message(
+                    "use-maxsplit-arg",
+                    node=node,
+                    args=(new_name,),
+                    confidence=confidence,
+                )
+
+    @utils.only_required_for_messages(
+        "consider-using-enumerate",
+        "consider-using-dict-items",
+        "use-sequence-for-iteration",
+    )
+    def visit_for(self, node: nodes.For) -> None:
+        self._check_consider_using_enumerate(node)
+        self._check_consider_using_dict_items(node)
+        self._check_use_sequence_for_iteration(node)
+
+    def _check_consider_using_enumerate(self, node: nodes.For) -> None:
         """Emit a convention whenever range and len are used for indexing."""
-        pass
+        # Verify that we have a `range([start], len(...), [stop])` call and
+        # that the object which is iterated is used as a subscript in the
+        # body of the for.
+
+        # Is it a proper range call?
+        if not isinstance(node.iter, nodes.Call):
+            return
+        if not self._is_builtin(node.iter.func, "range"):
+            return
+        if not node.iter.args:
+            return
+        is_constant_zero = (
+            isinstance(node.iter.args[0], nodes.Const) and node.iter.args[0].value == 0
+        )
+        if len(node.iter.args) == 2 and not is_constant_zero:
+            return
+        if len(node.iter.args) > 2:
+            return
+
+        # Is it a proper len call?
+        if not isinstance(node.iter.args[-1], nodes.Call):
+            return
+        second_func = node.iter.args[-1].func
+        if not self._is_builtin(second_func, "len"):
+            return
+        len_args = node.iter.args[-1].args
+        if not len_args or len(len_args) != 1:
+            return
+        iterating_object = len_args[0]
+        if isinstance(iterating_object, nodes.Name):
+            expected_subscript_val_type = nodes.Name
+        elif isinstance(iterating_object, nodes.Attribute):
+            expected_subscript_val_type = nodes.Attribute
+        else:
+            return
+        # If we're defining __iter__ on self, enumerate won't work
+        scope = node.scope()
+        if (
+            isinstance(iterating_object, nodes.Name)
+            and iterating_object.name == "self"
+            and scope.name == "__iter__"
+        ):
+            return
+
+        # Verify that the body of the for loop uses a subscript
+        # with the object that was iterated. This uses some heuristics
+        # in order to make sure that the same object is used in the
+        # for body.
+        for child in node.body:
+            for subscript in child.nodes_of_class(nodes.Subscript):
+                if not isinstance(subscript.value, expected_subscript_val_type):
+                    continue
+
+                value = subscript.slice
+                if not isinstance(value, nodes.Name):
+                    continue
+                if subscript.value.scope() != node.scope():
+                    # Ignore this subscript if it's not in the same
+                    # scope. This means that in the body of the for
+                    # loop, another scope was created, where the same
+                    # name for the iterating object was used.
+                    continue
+                if value.name == node.target.name and (
+                    isinstance(subscript.value, nodes.Name)
+                    and iterating_object.name == subscript.value.name
+                    or isinstance(subscript.value, nodes.Attribute)
+                    and iterating_object.attrname == subscript.value.attrname
+                ):
+                    self.add_message("consider-using-enumerate", node=node)
+                    return

-    def _check_consider_using_dict_items(self, node: nodes.For) ->None:
+    def _check_consider_using_dict_items(self, node: nodes.For) -> None:
         """Add message when accessing dict values by index lookup."""
-        pass
+        # Verify that we have a .keys() call and
+        # that the object which is iterated is used as a subscript in the
+        # body of the for.

-    def _check_consider_using_dict_items_comprehension(self, node: nodes.
-        Comprehension) ->None:
+        iterating_object_name = utils.get_iterating_dictionary_name(node)
+        if iterating_object_name is None:
+            return
+
+        # Verify that the body of the for loop uses a subscript
+        # with the object that was iterated. This uses some heuristics
+        # in order to make sure that the same object is used in the
+        # for body.
+        for child in node.body:
+            for subscript in child.nodes_of_class(nodes.Subscript):
+                if not isinstance(subscript.value, (nodes.Name, nodes.Attribute)):
+                    continue
+
+                value = subscript.slice
+                if (
+                    not isinstance(value, nodes.Name)
+                    or value.name != node.target.name
+                    or iterating_object_name != subscript.value.as_string()
+                ):
+                    continue
+                last_definition_lineno = value.lookup(value.name)[1][-1].lineno
+                if last_definition_lineno > node.lineno:
+                    # Ignore this subscript if it has been redefined after
+                    # the for loop. This checks for the line number using .lookup()
+                    # to get the line number where the iterating object was last
+                    # defined and compare that to the for loop's line number
+                    continue
+                if (
+                    isinstance(subscript.parent, nodes.Assign)
+                    and subscript in subscript.parent.targets
+                    or isinstance(subscript.parent, nodes.AugAssign)
+                    and subscript == subscript.parent.target
+                ):
+                    # Ignore this subscript if it is the target of an assignment
+                    # Early termination as dict index lookup is necessary
+                    return
+                if isinstance(subscript.parent, nodes.Delete):
+                    # Ignore this subscript if the index is used to delete a
+                    # dictionary item.
+                    return
+
+                self.add_message("consider-using-dict-items", node=node)
+                return
+
+    @utils.only_required_for_messages(
+        "consider-using-dict-items",
+        "use-sequence-for-iteration",
+    )
+    def visit_comprehension(self, node: nodes.Comprehension) -> None:
+        self._check_consider_using_dict_items_comprehension(node)
+        self._check_use_sequence_for_iteration(node)
+
+    def _check_consider_using_dict_items_comprehension(
+        self, node: nodes.Comprehension
+    ) -> None:
         """Add message when accessing dict values by index lookup."""
-        pass
+        iterating_object_name = utils.get_iterating_dictionary_name(node)
+        if iterating_object_name is None:
+            return

-    def _check_use_sequence_for_iteration(self, node: (nodes.For | nodes.
-        Comprehension)) ->None:
+        for child in node.parent.get_children():
+            for subscript in child.nodes_of_class(nodes.Subscript):
+                if not isinstance(subscript.value, (nodes.Name, nodes.Attribute)):
+                    continue
+
+                value = subscript.slice
+                if (
+                    not isinstance(value, nodes.Name)
+                    or value.name != node.target.name
+                    or iterating_object_name != subscript.value.as_string()
+                ):
+                    continue
+
+                self.add_message("consider-using-dict-items", node=node)
+                return
+
+    def _check_use_sequence_for_iteration(
+        self, node: nodes.For | nodes.Comprehension
+    ) -> None:
         """Check if code iterates over an in-place defined set.

         Sets using `*` are not considered in-place.
         """
-        pass
+        if isinstance(node.iter, nodes.Set) and not any(
+            utils.has_starred_node_recursive(node)
+        ):
+            self.add_message(
+                "use-sequence-for-iteration", node=node.iter, confidence=HIGH
+            )

-    def _detect_replacable_format_call(self, node: nodes.Const) ->None:
+    @utils.only_required_for_messages("consider-using-f-string")
+    def visit_const(self, node: nodes.Const) -> None:
+        if self._py36_plus:
+            # f-strings require Python 3.6
+            if node.pytype() == "builtins.str" and not isinstance(
+                node.parent, nodes.JoinedStr
+            ):
+                self._detect_replacable_format_call(node)
+
+    def _detect_replacable_format_call(self, node: nodes.Const) -> None:
         """Check whether a string is used in a call to format() or '%' and whether it
         can be replaced by an f-string.
         """
-        pass
+        if (
+            isinstance(node.parent, nodes.Attribute)
+            and node.parent.attrname == "format"
+        ):
+            # Don't warn on referencing / assigning .format without calling it
+            if not isinstance(node.parent.parent, nodes.Call):
+                return
+
+            if node.parent.parent.args:
+                for arg in node.parent.parent.args:
+                    # If star expressions with more than 1 element are being used
+                    if isinstance(arg, nodes.Starred):
+                        inferred = utils.safe_infer(arg.value)
+                        if (
+                            isinstance(inferred, astroid.List)
+                            and len(inferred.elts) > 1
+                        ):
+                            return
+                    # Backslashes can't be in f-string expressions
+                    if "\\" in arg.as_string():
+                        return
+
+            elif node.parent.parent.keywords:
+                keyword_args = [
+                    i[0] for i in utils.parse_format_method_string(node.value)[0]
+                ]
+                for keyword in node.parent.parent.keywords:
+                    # If keyword is used multiple times
+                    if keyword_args.count(keyword.arg) > 1:
+                        return
+
+                    keyword = utils.safe_infer(keyword.value)
+
+                    # If lists of more than one element are being unpacked
+                    if isinstance(keyword, nodes.Dict):
+                        if len(keyword.items) > 1 and len(keyword_args) > 1:
+                            return
+
+            # If all tests pass, then raise message
+            self.add_message(
+                "consider-using-f-string",
+                node=node,
+                line=node.lineno,
+                col_offset=node.col_offset,
+            )
+
+        elif isinstance(node.parent, nodes.BinOp) and node.parent.op == "%":
+            # Backslashes can't be in f-string expressions
+            if "\\" in node.parent.right.as_string():
+                return
+
+            # If % applied to another type than str, it's modulo and can't be replaced by formatting
+            if not hasattr(node.parent.left, "value") or not isinstance(
+                node.parent.left.value, str
+            ):
+                return
+
+            # Brackets can be inconvenient in f-string expressions
+            if "{" in node.parent.left.value or "}" in node.parent.left.value:
+                return
+
+            inferred_right = utils.safe_infer(node.parent.right)
+
+            # If dicts or lists of length > 1 are used
+            if isinstance(inferred_right, nodes.Dict):
+                if len(inferred_right.items) > 1:
+                    return
+            elif isinstance(inferred_right, nodes.List):
+                if len(inferred_right.elts) > 1:
+                    return
+
+            # If all tests pass, then raise message
+            self.add_message(
+                "consider-using-f-string",
+                node=node,
+                line=node.lineno,
+                col_offset=node.col_offset,
+            )
diff --git a/pylint/checkers/refactoring/refactoring_checker.py b/pylint/checkers/refactoring/refactoring_checker.py
index bfd096850..8e3dc4919 100644
--- a/pylint/checkers/refactoring/refactoring_checker.py
+++ b/pylint/checkers/refactoring/refactoring_checker.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import collections
 import copy
 import itertools
@@ -7,39 +12,73 @@ from collections.abc import Iterator
 from functools import cached_property, reduce
 from re import Pattern
 from typing import TYPE_CHECKING, Any, NamedTuple, Union, cast
+
 import astroid
 from astroid import bases, nodes
 from astroid.util import UninferableBase
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.checkers.base.basic_error_checker import _loop_exits_early
 from pylint.checkers.utils import node_frame_class
 from pylint.interfaces import HIGH, INFERENCE, Confidence
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
+
 NodesWithNestedBlocks = Union[nodes.Try, nodes.While, nodes.For, nodes.If]
-KNOWN_INFINITE_ITERATORS = {'itertools.count', 'itertools.cycle'}
-BUILTIN_EXIT_FUNCS = frozenset(('quit', 'exit'))
-CALLS_THAT_COULD_BE_REPLACED_BY_WITH = frozenset(('threading.lock.acquire',
-    'threading._RLock.acquire', 'threading.Semaphore.acquire',
-    'multiprocessing.managers.BaseManager.start',
-    'multiprocessing.managers.SyncManager.start'))
-CALLS_RETURNING_CONTEXT_MANAGERS = frozenset(('_io.open',
-    'pathlib.Path.open', 'codecs.open', 'urllib.request.urlopen',
-    'tempfile.NamedTemporaryFile', 'tempfile.SpooledTemporaryFile',
-    'tempfile.TemporaryDirectory', 'tempfile.TemporaryFile',
-    'zipfile.ZipFile', 'zipfile.PyZipFile', 'zipfile.ZipFile.open',
-    'zipfile.PyZipFile.open', 'tarfile.TarFile', 'tarfile.TarFile.open',
-    'multiprocessing.context.BaseContext.Pool', 'subprocess.Popen'))
-
-
-def _except_statement_is_always_returning(node: nodes.Try,
-    returning_node_class: nodes.NodeNG) ->bool:
+
+KNOWN_INFINITE_ITERATORS = {"itertools.count", "itertools.cycle"}
+BUILTIN_EXIT_FUNCS = frozenset(("quit", "exit"))
+CALLS_THAT_COULD_BE_REPLACED_BY_WITH = frozenset(
+    (
+        "threading.lock.acquire",
+        "threading._RLock.acquire",
+        "threading.Semaphore.acquire",
+        "multiprocessing.managers.BaseManager.start",
+        "multiprocessing.managers.SyncManager.start",
+    )
+)
+CALLS_RETURNING_CONTEXT_MANAGERS = frozenset(
+    (
+        "_io.open",  # regular 'open()' call
+        "pathlib.Path.open",
+        "codecs.open",
+        "urllib.request.urlopen",
+        "tempfile.NamedTemporaryFile",
+        "tempfile.SpooledTemporaryFile",
+        "tempfile.TemporaryDirectory",
+        "tempfile.TemporaryFile",
+        "zipfile.ZipFile",
+        "zipfile.PyZipFile",
+        "zipfile.ZipFile.open",
+        "zipfile.PyZipFile.open",
+        "tarfile.TarFile",
+        "tarfile.TarFile.open",
+        "multiprocessing.context.BaseContext.Pool",
+        "subprocess.Popen",
+    )
+)
+
+
+def _if_statement_is_always_returning(
+    if_node: nodes.If, returning_node_class: nodes.NodeNG
+) -> bool:
+    return any(isinstance(node, returning_node_class) for node in if_node.body)
+
+
+def _except_statement_is_always_returning(
+    node: nodes.Try, returning_node_class: nodes.NodeNG
+) -> bool:
     """Detect if all except statements return."""
-    pass
+    return all(
+        any(isinstance(child, returning_node_class) for child in handler.body)
+        for handler in node.handlers
+    )


-def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) ->bool:
+def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) -> bool:
     """Check if the given token is a trailing comma.

     :param tokens: Sequence of modules tokens
@@ -48,52 +87,139 @@ def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) ->bool:
     :returns: True if the token is a comma which trails an expression
     :rtype: bool
     """
-    pass
-
-
-def _is_part_of_with_items(node: nodes.Call) ->bool:
+    token = tokens[index]
+    if token.exact_type != tokenize.COMMA:
+        return False
+    # Must have remaining tokens on the same line such as NEWLINE
+    left_tokens = itertools.islice(tokens, index + 1, None)
+
+    more_tokens_on_line = False
+    for remaining_token in left_tokens:
+        if remaining_token.start[0] == token.start[0]:
+            more_tokens_on_line = True
+            # If one of the remaining same line tokens is not NEWLINE or COMMENT
+            # the comma is not trailing.
+            if remaining_token.type not in (tokenize.NEWLINE, tokenize.COMMENT):
+                return False
+
+    if not more_tokens_on_line:
+        return False
+
+    def get_curline_index_start() -> int:
+        """Get the index denoting the start of the current line."""
+        for subindex, token in enumerate(reversed(tokens[:index])):
+            # See Lib/tokenize.py and Lib/token.py in cpython for more info
+            if token.type == tokenize.NEWLINE:
+                return index - subindex
+        return 0
+
+    curline_start = get_curline_index_start()
+    expected_tokens = {"return", "yield"}
+    return any(
+        "=" in prevtoken.string or prevtoken.string in expected_tokens
+        for prevtoken in tokens[curline_start:index]
+    )
+
+
+def _is_inside_context_manager(node: nodes.Call) -> bool:
+    frame = node.frame()
+    if not isinstance(
+        frame, (nodes.FunctionDef, astroid.BoundMethod, astroid.UnboundMethod)
+    ):
+        return False
+    return frame.name == "__enter__" or utils.decorated_with(
+        frame, "contextlib.contextmanager"
+    )
+
+
+def _is_a_return_statement(node: nodes.Call) -> bool:
+    frame = node.frame()
+    for parent in node.node_ancestors():
+        if parent is frame:
+            break
+        if isinstance(parent, nodes.Return):
+            return True
+    return False
+
+
+def _is_part_of_with_items(node: nodes.Call) -> bool:
     """Checks if one of the node's parents is a ``nodes.With`` node and that the node
     itself is located somewhere under its ``items``.
     """
-    pass
-
-
-def _will_be_released_automatically(node: nodes.Call) ->bool:
+    frame = node.frame()
+    current = node
+    while current != frame:
+        if isinstance(current, nodes.With):
+            items_start = current.items[0][0].lineno
+            items_end = current.items[-1][0].tolineno
+            return items_start <= node.lineno <= items_end  # type: ignore[no-any-return]
+        current = current.parent
+    return False
+
+
+def _will_be_released_automatically(node: nodes.Call) -> bool:
     """Checks if a call that could be used in a ``with`` statement is used in an
     alternative construct which would ensure that its __exit__ method is called.
     """
-    pass
-
-
-def _is_part_of_assignment_target(node: nodes.NodeNG) ->bool:
+    callables_taking_care_of_exit = frozenset(
+        (
+            "contextlib._BaseExitStack.enter_context",
+            "contextlib.ExitStack.enter_context",  # necessary for Python 3.6 compatibility
+        )
+    )
+    if not isinstance(node.parent, nodes.Call):
+        return False
+    func = utils.safe_infer(node.parent.func)
+    if not func:
+        return False
+    return func.qname() in callables_taking_care_of_exit
+
+
+def _is_part_of_assignment_target(node: nodes.NodeNG) -> bool:
     """Check whether use of a variable is happening as part of the left-hand
     side of an assignment.

     This requires recursive checking, because destructuring assignment can have
     arbitrarily nested tuples and lists to unpack.
     """
-    pass
+    if isinstance(node.parent, nodes.Assign):
+        return node in node.parent.targets
+
+    if isinstance(node.parent, nodes.AugAssign):
+        return node == node.parent.target  # type: ignore[no-any-return]
+
+    if isinstance(node.parent, (nodes.Tuple, nodes.List)):
+        return _is_part_of_assignment_target(node.parent)
+
+    return False


 class ConsiderUsingWithStack(NamedTuple):
     """Stack for objects that may potentially trigger a R1732 message
     if they are not used in a ``with`` block later on.
     """
+
     module_scope: dict[str, nodes.NodeNG] = {}
     class_scope: dict[str, nodes.NodeNG] = {}
     function_scope: dict[str, nodes.NodeNG] = {}

-    def __iter__(self) ->Iterator[dict[str, nodes.NodeNG]]:
+    def __iter__(self) -> Iterator[dict[str, nodes.NodeNG]]:
         yield from (self.function_scope, self.class_scope, self.module_scope)

-    def get_stack_for_frame(self, frame: (nodes.FunctionDef | nodes.
-        ClassDef | nodes.Module)) ->dict[str, nodes.NodeNG]:
+    def get_stack_for_frame(
+        self, frame: nodes.FunctionDef | nodes.ClassDef | nodes.Module
+    ) -> dict[str, nodes.NodeNG]:
         """Get the stack corresponding to the scope of the given frame."""
-        pass
+        if isinstance(frame, nodes.FunctionDef):
+            return self.function_scope
+        if isinstance(frame, nodes.ClassDef):
+            return self.class_scope
+        return self.module_scope

-    def clear_all(self) ->None:
+    def clear_all(self) -> None:
         """Convenience method to clear all stacks."""
-        pass
+        for stack in self:
+            stack.clear()


 class RefactoringChecker(checkers.BaseTokenChecker):
@@ -103,143 +229,305 @@ class RefactoringChecker(checkers.BaseTokenChecker):
     in order to create knowledge about whether an "else if" node
     is a true "else if" node, or an "elif" node.
     """
-    name = 'refactoring'
-    msgs = {'R1701': (
-        'Consider merging these isinstance calls to isinstance(%s, (%s))',
-        'consider-merging-isinstance',
-        'Used when multiple consecutive isinstance calls can be merged into one.'
-        ), 'R1706': ('Consider using ternary (%s)',
-        'consider-using-ternary',
-        'Used when one of known pre-python 2.5 ternary syntax is used.'),
-        'R1709': ('Boolean expression may be simplified to %s',
-        'simplify-boolean-expression',
-        'Emitted when redundant pre-python 2.5 ternary syntax is used.'),
-        'R1726': ('Boolean condition "%s" may be simplified to "%s"',
-        'simplifiable-condition',
-        'Emitted when a boolean condition is able to be simplified.'),
-        'R1727': ("Boolean condition '%s' will always evaluate to '%s'",
-        'condition-evals-to-constant',
-        'Emitted when a boolean condition can be simplified to a constant value.'
-        ), 'R1702': ('Too many nested blocks (%s/%s)',
-        'too-many-nested-blocks',
-        'Used when a function or a method has too many nested blocks. This makes the code less understandable and maintainable.'
-        , {'old_names': [('R0101', 'old-too-many-nested-blocks')]}),
-        'R1703': ('The if statement can be replaced with %s',
-        'simplifiable-if-statement',
-        "Used when an if statement can be replaced with 'bool(test)'.", {
-        'old_names': [('R0102', 'old-simplifiable-if-statement')]}),
-        'R1704': ('Redefining argument with the local name %r',
-        'redefined-argument-from-local',
-        'Used when a local name is redefining an argument, which might suggest a potential error. This is taken in account only for a handful of name binding operations, such as for iteration, with statement assignment and exception handler assignment.'
-        ), 'R1705': ('Unnecessary "%s" after "return", %s',
-        'no-else-return',
-        'Used in order to highlight an unnecessary block of code following an if containing a return statement. As such, it will warn when it encounters an else following a chain of ifs, all of them containing a return statement.'
-        ), 'R1707': ('Disallow trailing comma tuple',
-        'trailing-comma-tuple',
-        'In Python, a tuple is actually created by the comma symbol, not by the parentheses. Unfortunately, one can actually create a tuple by misplacing a trailing comma, which can lead to potential weird bugs in your code. You should always use parentheses explicitly for creating a tuple.'
-        ), 'R1708': (
-        'Do not raise StopIteration in generator, use return statement instead'
-        , 'stop-iteration-return',
-        'According to PEP479, the raise of StopIteration to end the loop of a generator may lead to hard to find bugs. This PEP specify that raise StopIteration has to be replaced by a simple return statement'
-        ), 'R1710': (
-        'Either all return statements in a function should return an expression, or none of them should.'
-        , 'inconsistent-return-statements',
-        'According to PEP8, if any return statement returns an expression, any return statements where no value is returned should explicitly state this as return None, and an explicit return statement should be present at the end of the function (if reachable)'
-        ), 'R1711': ('Useless return at end of function or method',
-        'useless-return',
-        'Emitted when a single "return" or "return None" statement is found at the end of function or method definition. This statement can safely be removed because Python will implicitly return None'
-        ), 'R1712': (
-        'Consider using tuple unpacking for swapping variables',
-        'consider-swap-variables',
-        'You do not have to use a temporary variable in order to swap variables. Using "tuple unpacking" to directly swap variables makes the intention more clear.'
-        ), 'R1713': (
-        'Consider using str.join(sequence) for concatenating strings from an iterable'
-        , 'consider-using-join',
-        'Using str.join(sequence) is faster, uses less memory and increases readability compared to for-loop iteration.'
-        ), 'R1714': (
-        "Consider merging these comparisons with 'in' by using '%s %sin (%s)'. Use a set instead if elements are hashable."
-        , 'consider-using-in',
-        'To check if a variable is equal to one of many values, combine the values into a set or tuple and check if the variable is contained "in" it instead of checking for equality against each of the values. This is faster and less verbose.'
-        ), 'R1715': (
-        'Consider using dict.get for getting values from a dict if a key is present or a default if not'
-        , 'consider-using-get',
-        'Using the builtin dict.get for getting a value from a dictionary if a key is present or a default if not, is simpler and considered more idiomatic, although sometimes a bit slower'
-        ), 'R1716': ('Simplify chained comparison between the operands',
-        'chained-comparison',
-        'This message is emitted when pylint encounters boolean operation like "a < b and b < c", suggesting instead to refactor it to "a < b < c"'
-        ), 'R1717': ('Consider using a dictionary comprehension',
-        'consider-using-dict-comprehension',
-        "Emitted when we detect the creation of a dictionary using the dict() callable and a transient list. Although there is nothing syntactically wrong with this code, it is hard to read and can be simplified to a dict comprehension. Also it is faster since you don't need to create another transient list"
-        ), 'R1718': ('Consider using a set comprehension',
-        'consider-using-set-comprehension',
-        "Although there is nothing syntactically wrong with this code, it is hard to read and can be simplified to a set comprehension. Also it is faster since you don't need to create another transient list"
-        ), 'R1719': ('The if expression can be replaced with %s',
-        'simplifiable-if-expression',
-        "Used when an if expression can be replaced with 'bool(test)' or simply 'test' if the boolean cast is implicit."
-        ), 'R1720': ('Unnecessary "%s" after "raise", %s', 'no-else-raise',
-        'Used in order to highlight an unnecessary block of code following an if containing a raise statement. As such, it will warn when it encounters an else following a chain of ifs, all of them containing a raise statement.'
-        ), 'R1721': ('Unnecessary use of a comprehension, use %s instead.',
-        'unnecessary-comprehension',
-        'Instead of using an identity comprehension, consider using the list, dict or set constructor. It is faster and simpler.'
-        ), 'R1722': ("Consider using 'sys.exit' instead",
-        'consider-using-sys-exit',
-        "Contrary to 'exit()' or 'quit()', 'sys.exit' does not rely on the site module being available (as the 'sys' module is always available)."
-        ), 'R1723': ('Unnecessary "%s" after "break", %s', 'no-else-break',
-        'Used in order to highlight an unnecessary block of code following an if containing a break statement. As such, it will warn when it encounters an else following a chain of ifs, all of them containing a break statement.'
-        ), 'R1724': ('Unnecessary "%s" after "continue", %s',
-        'no-else-continue',
-        'Used in order to highlight an unnecessary block of code following an if containing a continue statement. As such, it will warn when it encounters an else following a chain of ifs, all of them containing a continue statement.'
-        ), 'R1725': (
-        'Consider using Python 3 style super() without arguments',
-        'super-with-arguments',
-        'Emitted when calling the super() builtin with the current class and instance. On Python 3 these arguments are the default and they can be omitted.'
-        ), 'R1728': ("Consider using a generator instead '%s(%s)'",
-        'consider-using-generator',
-        'If your container can be large using a generator will bring better performance.'
-        ), 'R1729': ("Use a generator instead '%s(%s)'", 'use-a-generator',
-        "Comprehension inside of 'any', 'all', 'max', 'min' or 'sum' is unnecessary. A generator would be sufficient and faster."
-        ), 'R1730': ("Consider using '%s' instead of unnecessary if block",
-        'consider-using-min-builtin',
-        'Using the min builtin instead of a conditional improves readability and conciseness.'
-        ), 'R1731': ("Consider using '%s' instead of unnecessary if block",
-        'consider-using-max-builtin',
-        'Using the max builtin instead of a conditional improves readability and conciseness.'
-        ), 'R1732': (
-        "Consider using 'with' for resource-allocating operations",
-        'consider-using-with',
-        "Emitted if a resource-allocating assignment or call may be replaced by a 'with' block. By using 'with' the release of the allocated resources is ensured even in the case of an exception."
-        ), 'R1733': (
-        "Unnecessary dictionary index lookup, use '%s' instead",
-        'unnecessary-dict-index-lookup',
-        'Emitted when iterating over the dictionary items (key-item pairs) and accessing the value by index lookup. The value can be accessed directly instead.'
-        ), 'R1734': ('Consider using [] instead of list()',
-        'use-list-literal',
-        'Emitted when using list() to create an empty list instead of the literal []. The literal is faster as it avoids an additional function call.'
-        ), 'R1735': ("Consider using '%s' instead of a call to 'dict'.",
-        'use-dict-literal',
-        "Emitted when using dict() to create a dictionary instead of a literal '{ ... }'. The literal is faster as it avoids an additional function call."
-        ), 'R1736': ("Unnecessary list index lookup, use '%s' instead",
-        'unnecessary-list-index-lookup',
-        'Emitted when iterating over an enumeration and accessing the value by index lookup. The value can be accessed directly instead.'
-        ), 'R1737': (
-        "Use 'yield from' directly instead of yielding each element one by one"
-        , 'use-yield-from',
-        'Yielding directly from the iterator is faster and arguably cleaner code than yielding each element one by one in the loop.'
-        )}
-    options = ('max-nested-blocks', {'default': 5, 'type': 'int', 'metavar':
-        '<int>', 'help':
-        'Maximum number of nested blocks for function / method body'}), (
-        'never-returning-functions', {'default': ('sys.exit',
-        'argparse.parse_error'), 'type': 'csv', 'metavar':
-        '<members names>', 'help':
-        'Complete name of functions that never returns. When checking for inconsistent-return-statements if a never returning function is called then it will be considered as an explicit return statement and no message will be printed.'
-        }), ('suggest-join-with-non-empty-separator', {'default': True,
-        'type': 'yn', 'metavar': '<y or n>', 'help':
-        """Let 'consider-using-join' be raised when the separator to join on would be non-empty (resulting in expected fixes of the type: ``"- " + "
-- ".join(items)``)"""
-        })
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "refactoring"
+
+    msgs = {
+        "R1701": (
+            "Consider merging these isinstance calls to isinstance(%s, (%s))",
+            "consider-merging-isinstance",
+            "Used when multiple consecutive isinstance calls can be merged into one.",
+        ),
+        "R1706": (
+            "Consider using ternary (%s)",
+            "consider-using-ternary",
+            "Used when one of known pre-python 2.5 ternary syntax is used.",
+        ),
+        "R1709": (
+            "Boolean expression may be simplified to %s",
+            "simplify-boolean-expression",
+            "Emitted when redundant pre-python 2.5 ternary syntax is used.",
+        ),
+        "R1726": (
+            'Boolean condition "%s" may be simplified to "%s"',
+            "simplifiable-condition",
+            "Emitted when a boolean condition is able to be simplified.",
+        ),
+        "R1727": (
+            "Boolean condition '%s' will always evaluate to '%s'",
+            "condition-evals-to-constant",
+            "Emitted when a boolean condition can be simplified to a constant value.",
+        ),
+        "R1702": (
+            "Too many nested blocks (%s/%s)",
+            "too-many-nested-blocks",
+            "Used when a function or a method has too many nested "
+            "blocks. This makes the code less understandable and "
+            "maintainable.",
+            {"old_names": [("R0101", "old-too-many-nested-blocks")]},
+        ),
+        "R1703": (
+            "The if statement can be replaced with %s",
+            "simplifiable-if-statement",
+            "Used when an if statement can be replaced with 'bool(test)'.",
+            {"old_names": [("R0102", "old-simplifiable-if-statement")]},
+        ),
+        "R1704": (
+            "Redefining argument with the local name %r",
+            "redefined-argument-from-local",
+            "Used when a local name is redefining an argument, which might "
+            "suggest a potential error. This is taken in account only for "
+            "a handful of name binding operations, such as for iteration, "
+            "with statement assignment and exception handler assignment.",
+        ),
+        "R1705": (
+            'Unnecessary "%s" after "return", %s',
+            "no-else-return",
+            "Used in order to highlight an unnecessary block of "
+            "code following an if containing a return statement. "
+            "As such, it will warn when it encounters an else "
+            "following a chain of ifs, all of them containing a "
+            "return statement.",
+        ),
+        "R1707": (
+            "Disallow trailing comma tuple",
+            "trailing-comma-tuple",
+            "In Python, a tuple is actually created by the comma symbol, "
+            "not by the parentheses. Unfortunately, one can actually create a "
+            "tuple by misplacing a trailing comma, which can lead to potential "
+            "weird bugs in your code. You should always use parentheses "
+            "explicitly for creating a tuple.",
+        ),
+        "R1708": (
+            "Do not raise StopIteration in generator, use return statement instead",
+            "stop-iteration-return",
+            "According to PEP479, the raise of StopIteration to end the loop of "
+            "a generator may lead to hard to find bugs. This PEP specify that "
+            "raise StopIteration has to be replaced by a simple return statement",
+        ),
+        "R1710": (
+            "Either all return statements in a function should return an expression, "
+            "or none of them should.",
+            "inconsistent-return-statements",
+            "According to PEP8, if any return statement returns an expression, "
+            "any return statements where no value is returned should explicitly "
+            "state this as return None, and an explicit return statement "
+            "should be present at the end of the function (if reachable)",
+        ),
+        "R1711": (
+            "Useless return at end of function or method",
+            "useless-return",
+            'Emitted when a single "return" or "return None" statement is found '
+            "at the end of function or method definition. This statement can safely be "
+            "removed because Python will implicitly return None",
+        ),
+        "R1712": (
+            "Consider using tuple unpacking for swapping variables",
+            "consider-swap-variables",
+            "You do not have to use a temporary variable in order to "
+            'swap variables. Using "tuple unpacking" to directly swap '
+            "variables makes the intention more clear.",
+        ),
+        "R1713": (
+            "Consider using str.join(sequence) for concatenating "
+            "strings from an iterable",
+            "consider-using-join",
+            "Using str.join(sequence) is faster, uses less memory "
+            "and increases readability compared to for-loop iteration.",
+        ),
+        "R1714": (
+            "Consider merging these comparisons with 'in' by using '%s %sin (%s)'."
+            " Use a set instead if elements are hashable.",
+            "consider-using-in",
+            "To check if a variable is equal to one of many values, "
+            'combine the values into a set or tuple and check if the variable is contained "in" it '
+            "instead of checking for equality against each of the values. "
+            "This is faster and less verbose.",
+        ),
+        "R1715": (
+            "Consider using dict.get for getting values from a dict "
+            "if a key is present or a default if not",
+            "consider-using-get",
+            "Using the builtin dict.get for getting a value from a dictionary "
+            "if a key is present or a default if not, is simpler and considered "
+            "more idiomatic, although sometimes a bit slower",
+        ),
+        "R1716": (
+            "Simplify chained comparison between the operands",
+            "chained-comparison",
+            "This message is emitted when pylint encounters boolean operation like "
+            '"a < b and b < c", suggesting instead to refactor it to "a < b < c"',
+        ),
+        "R1717": (
+            "Consider using a dictionary comprehension",
+            "consider-using-dict-comprehension",
+            "Emitted when we detect the creation of a dictionary "
+            "using the dict() callable and a transient list. "
+            "Although there is nothing syntactically wrong with this code, "
+            "it is hard to read and can be simplified to a dict comprehension. "
+            "Also it is faster since you don't need to create another "
+            "transient list",
+        ),
+        "R1718": (
+            "Consider using a set comprehension",
+            "consider-using-set-comprehension",
+            "Although there is nothing syntactically wrong with this code, "
+            "it is hard to read and can be simplified to a set comprehension. "
+            "Also it is faster since you don't need to create another "
+            "transient list",
+        ),
+        "R1719": (
+            "The if expression can be replaced with %s",
+            "simplifiable-if-expression",
+            "Used when an if expression can be replaced with 'bool(test)' "
+            "or simply 'test' if the boolean cast is implicit.",
+        ),
+        "R1720": (
+            'Unnecessary "%s" after "raise", %s',
+            "no-else-raise",
+            "Used in order to highlight an unnecessary block of "
+            "code following an if containing a raise statement. "
+            "As such, it will warn when it encounters an else "
+            "following a chain of ifs, all of them containing a "
+            "raise statement.",
+        ),
+        "R1721": (
+            "Unnecessary use of a comprehension, use %s instead.",
+            "unnecessary-comprehension",
+            "Instead of using an identity comprehension, "
+            "consider using the list, dict or set constructor. "
+            "It is faster and simpler.",
+        ),
+        "R1722": (
+            "Consider using 'sys.exit' instead",
+            "consider-using-sys-exit",
+            "Contrary to 'exit()' or 'quit()', 'sys.exit' does not rely on the "
+            "site module being available (as the 'sys' module is always available).",
+        ),
+        "R1723": (
+            'Unnecessary "%s" after "break", %s',
+            "no-else-break",
+            "Used in order to highlight an unnecessary block of "
+            "code following an if containing a break statement. "
+            "As such, it will warn when it encounters an else "
+            "following a chain of ifs, all of them containing a "
+            "break statement.",
+        ),
+        "R1724": (
+            'Unnecessary "%s" after "continue", %s',
+            "no-else-continue",
+            "Used in order to highlight an unnecessary block of "
+            "code following an if containing a continue statement. "
+            "As such, it will warn when it encounters an else "
+            "following a chain of ifs, all of them containing a "
+            "continue statement.",
+        ),
+        "R1725": (
+            "Consider using Python 3 style super() without arguments",
+            "super-with-arguments",
+            "Emitted when calling the super() builtin with the current class "
+            "and instance. On Python 3 these arguments are the default and they can be omitted.",
+        ),
+        "R1728": (
+            "Consider using a generator instead '%s(%s)'",
+            "consider-using-generator",
+            "If your container can be large using "
+            "a generator will bring better performance.",
+        ),
+        "R1729": (
+            "Use a generator instead '%s(%s)'",
+            "use-a-generator",
+            "Comprehension inside of 'any', 'all', 'max', 'min' or 'sum' is unnecessary. "
+            "A generator would be sufficient and faster.",
+        ),
+        "R1730": (
+            "Consider using '%s' instead of unnecessary if block",
+            "consider-using-min-builtin",
+            "Using the min builtin instead of a conditional improves readability and conciseness.",
+        ),
+        "R1731": (
+            "Consider using '%s' instead of unnecessary if block",
+            "consider-using-max-builtin",
+            "Using the max builtin instead of a conditional improves readability and conciseness.",
+        ),
+        "R1732": (
+            "Consider using 'with' for resource-allocating operations",
+            "consider-using-with",
+            "Emitted if a resource-allocating assignment or call may be replaced by a 'with' block. "
+            "By using 'with' the release of the allocated resources is ensured even in the case "
+            "of an exception.",
+        ),
+        "R1733": (
+            "Unnecessary dictionary index lookup, use '%s' instead",
+            "unnecessary-dict-index-lookup",
+            "Emitted when iterating over the dictionary items (key-item pairs) and accessing the "
+            "value by index lookup. "
+            "The value can be accessed directly instead.",
+        ),
+        "R1734": (
+            "Consider using [] instead of list()",
+            "use-list-literal",
+            "Emitted when using list() to create an empty list instead of the literal []. "
+            "The literal is faster as it avoids an additional function call.",
+        ),
+        "R1735": (
+            "Consider using '%s' instead of a call to 'dict'.",
+            "use-dict-literal",
+            "Emitted when using dict() to create a dictionary instead of a literal '{ ... }'. "
+            "The literal is faster as it avoids an additional function call.",
+        ),
+        "R1736": (
+            "Unnecessary list index lookup, use '%s' instead",
+            "unnecessary-list-index-lookup",
+            "Emitted when iterating over an enumeration and accessing the "
+            "value by index lookup. "
+            "The value can be accessed directly instead.",
+        ),
+        "R1737": (
+            "Use 'yield from' directly instead of yielding each element one by one",
+            "use-yield-from",
+            "Yielding directly from the iterator is faster and arguably cleaner code than yielding each element "
+            "one by one in the loop.",
+        ),
+    }
+    options = (
+        (
+            "max-nested-blocks",
+            {
+                "default": 5,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of nested blocks for function / method body",
+            },
+        ),
+        (
+            "never-returning-functions",
+            {
+                "default": ("sys.exit", "argparse.parse_error"),
+                "type": "csv",
+                "metavar": "<members names>",
+                "help": "Complete name of functions that never returns. When checking "
+                "for inconsistent-return-statements if a never returning function is "
+                "called then it will be considered as an explicit return statement "
+                "and no message will be printed.",
+            },
+        ),
+        (
+            "suggest-join-with-non-empty-separator",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": (
+                    "Let 'consider-using-join' be raised when the separator to "
+                    "join on would be non-empty (resulting in expected fixes "
+                    'of the type: ``"- " + "\n- ".join(items)``)'
+                ),
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._return_nodes: dict[str, list[nodes.Return]] = {}
         self._consider_using_with_stack = ConsiderUsingWithStack()
@@ -247,7 +535,33 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         self._never_returning_functions: set[str] = set()
         self._suggest_join_with_non_empty_separator: bool = False

-    def _is_actual_elif(self, node: (nodes.If | nodes.Try)) ->bool:
+    def _init(self) -> None:
+        self._nested_blocks: list[NodesWithNestedBlocks] = []
+        self._elifs: list[tuple[int, int]] = []
+        self._reported_swap_nodes: set[nodes.NodeNG] = set()
+        self._can_simplify_bool_op: bool = False
+        self._consider_using_with_stack.clear_all()
+
+    def open(self) -> None:
+        # do this in open since config not fully initialized in __init__
+        self._never_returning_functions = set(
+            self.linter.config.never_returning_functions
+        )
+        self._suggest_join_with_non_empty_separator = (
+            self.linter.config.suggest_join_with_non_empty_separator
+        )
+
+    @cached_property
+    def _dummy_rgx(self) -> Pattern[str]:
+        return self.linter.config.dummy_variables_rgx  # type: ignore[no-any-return]
+
+    @staticmethod
+    def _is_bool_const(node: nodes.Return | nodes.Assign) -> bool:
+        return isinstance(node.value, nodes.Const) and isinstance(
+            node.value.value, bool
+        )
+
+    def _is_actual_elif(self, node: nodes.If | nodes.Try) -> bool:
         """Check if the given node is an actual elif.

         This is a problem we're having with the builtin ast module,
@@ -255,9 +569,15 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Unfortunately we need to know the exact type in certain
         cases.
         """
-        pass
-
-    def _check_simplifiable_if(self, node: nodes.If) ->None:
+        if isinstance(node.parent, nodes.If):
+            orelse = node.parent.orelse
+            # current if node must directly follow an "else"
+            if orelse and orelse == [node]:
+                if (node.lineno, node.col_offset) in self._elifs:
+                    return True
+        return False
+
+    def _check_simplifiable_if(self, node: nodes.If) -> None:
         """Check if the given if node can be simplified.

         The if statement can be reduced to a boolean expression
@@ -266,25 +586,640 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         the result of the statement's test, then this can be reduced
         to `bool(test)` without losing any functionality.
         """
-        pass
+        if self._is_actual_elif(node):
+            # Not interested in if statements with multiple branches.
+            return
+        if len(node.orelse) != 1 or len(node.body) != 1:
+            return
+
+        # Check if both branches can be reduced.
+        first_branch = node.body[0]
+        else_branch = node.orelse[0]
+        if isinstance(first_branch, nodes.Return):
+            if not isinstance(else_branch, nodes.Return):
+                return
+            first_branch_is_bool = self._is_bool_const(first_branch)
+            else_branch_is_bool = self._is_bool_const(else_branch)
+            reduced_to = "'return bool(test)'"
+        elif isinstance(first_branch, nodes.Assign):
+            if not isinstance(else_branch, nodes.Assign):
+                return
+
+            # Check if we assign to the same value
+            first_branch_targets = [
+                target.name
+                for target in first_branch.targets
+                if isinstance(target, nodes.AssignName)
+            ]
+            else_branch_targets = [
+                target.name
+                for target in else_branch.targets
+                if isinstance(target, nodes.AssignName)
+            ]
+            if not first_branch_targets or not else_branch_targets:
+                return
+            if sorted(first_branch_targets) != sorted(else_branch_targets):
+                return
+
+            first_branch_is_bool = self._is_bool_const(first_branch)
+            else_branch_is_bool = self._is_bool_const(else_branch)
+            reduced_to = "'var = bool(test)'"
+        else:
+            return
+
+        if not first_branch_is_bool or not else_branch_is_bool:
+            return
+        if not first_branch.value.value:
+            # This is a case that can't be easily simplified and
+            # if it can be simplified, it will usually result in a
+            # code that's harder to understand and comprehend.
+            # Let's take for instance `arg and arg <= 3`. This could theoretically be
+            # reduced to `not arg or arg > 3`, but the net result is that now the
+            # condition is harder to understand, because it requires understanding of
+            # an extra clause:
+            #   * first, there is the negation of truthness with `not arg`
+            #   * the second clause is `arg > 3`, which occurs when arg has a
+            #     a truth value, but it implies that `arg > 3` is equivalent
+            #     with `arg and arg > 3`, which means that the user must
+            #     think about this assumption when evaluating `arg > 3`.
+            #     The original form is easier to grasp.
+            return
+
+        self.add_message("simplifiable-if-statement", node=node, args=(reduced_to,))
+
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
+        # Optimization flag because '_is_trailing_comma' is costly
+        trailing_comma_tuple_enabled_for_file = self.linter.is_message_enabled(
+            "trailing-comma-tuple"
+        )
+        trailing_comma_tuple_enabled_once: bool = trailing_comma_tuple_enabled_for_file
+        # Process tokens and look for 'if' or 'elif'
+        for index, token in enumerate(tokens):
+            token_string = token[1]
+            if (
+                not trailing_comma_tuple_enabled_once
+                and token_string.startswith("#")
+                # We have at least 1 '#' (one char) at the start of the token
+                and "pylint:" in token_string[1:]
+                # We have at least '#' 'pylint' ( + ':') (8 chars) at the start of the token
+                and "enable" in token_string[8:]
+                # We have at least '#', 'pylint', ( + ':'), 'enable' (+ '=') (15 chars) at
+                # the start of the token
+                and any(
+                    c in token_string[15:] for c in ("trailing-comma-tuple", "R1707")
+                )
+            ):
+                # Way to not have to check if "trailing-comma-tuple" is enabled or
+                # disabled on each line: Any enable for it during tokenization and
+                # we'll start using the costly '_is_trailing_comma' to check if we
+                # need to raise the message. We still won't raise if it's disabled
+                # again due to the usual generic message control handling later.
+                trailing_comma_tuple_enabled_once = True
+            if token_string == "elif":
+                # AST exists by the time process_tokens is called, so
+                # it's safe to assume tokens[index+1] exists.
+                # tokens[index+1][2] is the elif's position as
+                # reported by CPython and PyPy,
+                # token[2] is the actual position and also is
+                # reported by IronPython.
+                self._elifs.extend([token[2], tokens[index + 1][2]])
+            elif (
+                trailing_comma_tuple_enabled_for_file
+                or trailing_comma_tuple_enabled_once
+            ) and _is_trailing_comma(tokens, index):
+                # If "trailing-comma-tuple" is enabled globally we always check _is_trailing_comma
+                # it might be for nothing if there's a local disable, or if the message control is
+                # not enabling 'trailing-comma-tuple', but the alternative is having to check if
+                # it's enabled for a line each line (just to avoid calling '_is_trailing_comma').
+                self.add_message(
+                    "trailing-comma-tuple", line=token.start[0], confidence=HIGH
+                )
+
+    @utils.only_required_for_messages("consider-using-with")
+    def leave_module(self, _: nodes.Module) -> None:
+        # check for context managers that have been created but not used
+        self._emit_consider_using_with_if_needed(
+            self._consider_using_with_stack.module_scope
+        )
+        self._init()
+
+    @utils.only_required_for_messages("too-many-nested-blocks", "no-else-return")
+    def visit_try(self, node: nodes.Try) -> None:
+        self._check_nested_blocks(node)
+
+        self._check_superfluous_else_return(node)
+        self._check_superfluous_else_raise(node)
+
     visit_while = visit_try

-    def _check_consider_using_min_max_builtin(self, node: nodes.If) ->None:
-        """Check if the given if node can be refactored as a min/max python builtin."""
-        pass
+    def _check_redefined_argument_from_local(self, name_node: nodes.AssignName) -> None:
+        if self._dummy_rgx and self._dummy_rgx.match(name_node.name):
+            return
+        if not name_node.lineno:
+            # Unknown position, maybe it is a manually built AST?
+            return
+
+        scope = name_node.scope()
+        if not isinstance(scope, nodes.FunctionDef):
+            return
+
+        for defined_argument in scope.args.nodes_of_class(
+            nodes.AssignName, skip_klass=(nodes.Lambda,)
+        ):
+            if defined_argument.name == name_node.name:
+                self.add_message(
+                    "redefined-argument-from-local",
+                    node=name_node,
+                    args=(name_node.name,),
+                )
+
+    @utils.only_required_for_messages(
+        "redefined-argument-from-local",
+        "too-many-nested-blocks",
+        "unnecessary-dict-index-lookup",
+        "unnecessary-list-index-lookup",
+    )
+    def visit_for(self, node: nodes.For) -> None:
+        self._check_nested_blocks(node)
+        self._check_unnecessary_dict_index_lookup(node)
+        self._check_unnecessary_list_index_lookup(node)
+
+        for name in node.target.nodes_of_class(nodes.AssignName):
+            self._check_redefined_argument_from_local(name)
+
+    @utils.only_required_for_messages("redefined-argument-from-local")
+    def visit_excepthandler(self, node: nodes.ExceptHandler) -> None:
+        if node.name and isinstance(node.name, nodes.AssignName):
+            self._check_redefined_argument_from_local(node.name)
+
+    @utils.only_required_for_messages(
+        "redefined-argument-from-local", "consider-using-with"
+    )
+    def visit_with(self, node: nodes.With) -> None:
+        for var, names in node.items:
+            if isinstance(var, nodes.Name):
+                for stack in self._consider_using_with_stack:
+                    # We don't need to restrict the stacks we search to the current scope and
+                    # outer scopes, as e.g. the function_scope stack will be empty when we
+                    # check a ``with`` on the class level.
+                    if var.name in stack:
+                        del stack[var.name]
+                        break
+            if not names:
+                continue
+            for name in names.nodes_of_class(nodes.AssignName):
+                self._check_redefined_argument_from_local(name)
+
+    def _check_superfluous_else(
+        self,
+        node: nodes.If | nodes.Try,
+        msg_id: str,
+        returning_node_class: nodes.NodeNG,
+    ) -> None:
+        if isinstance(node, nodes.Try) and node.finalbody:
+            # Not interested in try/except/else/finally statements.
+            return
+
+        if not node.orelse:
+            # Not interested in if/try statements without else.
+            return
+
+        if self._is_actual_elif(node):
+            # Not interested in elif nodes; only if
+            return
+
+        if (
+            isinstance(node, nodes.If)
+            and _if_statement_is_always_returning(node, returning_node_class)
+        ) or (
+            isinstance(node, nodes.Try)
+            and not node.finalbody
+            and _except_statement_is_always_returning(node, returning_node_class)
+        ):
+            orelse = node.orelse[0]
+            if (orelse.lineno, orelse.col_offset) in self._elifs:
+                args = ("elif", 'remove the leading "el" from "elif"')
+            else:
+                args = ("else", 'remove the "else" and de-indent the code inside it')
+            self.add_message(msg_id, node=node, args=args, confidence=HIGH)
+
+    def _check_superfluous_else_return(self, node: nodes.If) -> None:
+        return self._check_superfluous_else(
+            node, msg_id="no-else-return", returning_node_class=nodes.Return
+        )
+
+    def _check_superfluous_else_raise(self, node: nodes.If) -> None:
+        return self._check_superfluous_else(
+            node, msg_id="no-else-raise", returning_node_class=nodes.Raise
+        )
+
+    def _check_superfluous_else_break(self, node: nodes.If) -> None:
+        return self._check_superfluous_else(
+            node, msg_id="no-else-break", returning_node_class=nodes.Break
+        )
+
+    def _check_superfluous_else_continue(self, node: nodes.If) -> None:
+        return self._check_superfluous_else(
+            node, msg_id="no-else-continue", returning_node_class=nodes.Continue
+        )

-    def _check_stop_iteration_inside_generator(self, node: nodes.Raise) ->None:
+    @staticmethod
+    def _type_and_name_are_equal(node_a: Any, node_b: Any) -> bool:
+        if isinstance(node_a, nodes.Name) and isinstance(node_b, nodes.Name):
+            return node_a.name == node_b.name  # type: ignore[no-any-return]
+        if isinstance(node_a, nodes.AssignName) and isinstance(
+            node_b, nodes.AssignName
+        ):
+            return node_a.name == node_b.name  # type: ignore[no-any-return]
+        if isinstance(node_a, nodes.Const) and isinstance(node_b, nodes.Const):
+            return node_a.value == node_b.value  # type: ignore[no-any-return]
+        return False
+
+    def _is_dict_get_block(self, node: nodes.If) -> bool:
+        # "if <compare node>"
+        if not isinstance(node.test, nodes.Compare):
+            return False
+
+        # Does not have a single statement in the guard's body
+        if len(node.body) != 1:
+            return False
+
+        # Look for a single variable assignment on the LHS and a subscript on RHS
+        stmt = node.body[0]
+        if not (
+            isinstance(stmt, nodes.Assign)
+            and len(node.body[0].targets) == 1
+            and isinstance(node.body[0].targets[0], nodes.AssignName)
+            and isinstance(stmt.value, nodes.Subscript)
+        ):
+            return False
+
+        # The subscript's slice needs to be the same as the test variable.
+        slice_value = stmt.value.slice
+        if not (
+            self._type_and_name_are_equal(stmt.value.value, node.test.ops[0][1])
+            and self._type_and_name_are_equal(slice_value, node.test.left)
+        ):
+            return False
+
+        # The object needs to be a dictionary instance
+        return isinstance(utils.safe_infer(node.test.ops[0][1]), nodes.Dict)
+
+    def _check_consider_get(self, node: nodes.If) -> None:
+        if_block_ok = self._is_dict_get_block(node)
+        if if_block_ok and not node.orelse:
+            self.add_message("consider-using-get", node=node)
+        elif (
+            if_block_ok
+            and len(node.orelse) == 1
+            and isinstance(node.orelse[0], nodes.Assign)
+            and self._type_and_name_are_equal(
+                node.orelse[0].targets[0], node.body[0].targets[0]
+            )
+            and len(node.orelse[0].targets) == 1
+        ):
+            self.add_message("consider-using-get", node=node)
+
+    @utils.only_required_for_messages(
+        "too-many-nested-blocks",
+        "simplifiable-if-statement",
+        "no-else-return",
+        "no-else-raise",
+        "no-else-break",
+        "no-else-continue",
+        "consider-using-get",
+        "consider-using-min-builtin",
+        "consider-using-max-builtin",
+    )
+    def visit_if(self, node: nodes.If) -> None:
+        self._check_simplifiable_if(node)
+        self._check_nested_blocks(node)
+        self._check_superfluous_else_return(node)
+        self._check_superfluous_else_raise(node)
+        self._check_superfluous_else_break(node)
+        self._check_superfluous_else_continue(node)
+        self._check_consider_get(node)
+        self._check_consider_using_min_max_builtin(node)
+
+    def _check_consider_using_min_max_builtin(self, node: nodes.If) -> None:
+        """Check if the given if node can be refactored as a min/max python builtin."""
+        # This function is written expecting a test condition of form:
+        #  if a < b: # [consider-using-max-builtin]
+        #    a = b
+        #  if a > b: # [consider-using-min-builtin]
+        #    a = b
+        if self._is_actual_elif(node) or node.orelse:
+            # Not interested in if statements with multiple branches.
+            return
+
+        if len(node.body) != 1:
+            return
+
+        def get_node_name(node: nodes.NodeNG) -> str:
+            """Obtain simplest representation of a node as a string."""
+            if isinstance(node, nodes.Name):
+                return node.name  # type: ignore[no-any-return]
+            if isinstance(node, nodes.Const):
+                return str(node.value)
+            # this is a catch-all for nodes that are not of type Name or Const
+            # extremely helpful for Call or BinOp
+            return node.as_string()  # type: ignore[no-any-return]
+
+        body = node.body[0]
+        # Check if condition can be reduced.
+        if not hasattr(body, "targets") or len(body.targets) != 1:
+            return
+
+        target = body.targets[0]
+        if not (
+            isinstance(node.test, nodes.Compare)
+            and not isinstance(target, nodes.Subscript)
+            and not isinstance(node.test.left, nodes.Subscript)
+            and isinstance(body, nodes.Assign)
+        ):
+            return
+        # Assign body line has one requirement and that is the assign target
+        # is of type name or attribute. Attribute referring to NamedTuple.x perse.
+        # So we have to check that target is of these types
+
+        if not (hasattr(target, "name") or hasattr(target, "attrname")):
+            return
+
+        target_assignation = get_node_name(target)
+
+        if len(node.test.ops) > 1:
+            return
+        operator, right_statement = node.test.ops[0]
+
+        body_value = get_node_name(body.value)
+        left_operand = get_node_name(node.test.left)
+        right_statement_value = get_node_name(right_statement)
+
+        if left_operand == target_assignation:
+            # statement is in expected form
+            pass
+        elif right_statement_value == target_assignation:
+            # statement is in reverse form
+            operator = utils.get_inverse_comparator(operator)
+        else:
+            return
+
+        if body_value not in (right_statement_value, left_operand):
+            return
+
+        if operator in {"<", "<="}:
+            reduced_to = (
+                f"{target_assignation} = max({target_assignation}, {body_value})"
+            )
+            self.add_message(
+                "consider-using-max-builtin", node=node, args=(reduced_to,)
+            )
+        elif operator in {">", ">="}:
+            reduced_to = (
+                f"{target_assignation} = min({target_assignation}, {body_value})"
+            )
+            self.add_message(
+                "consider-using-min-builtin", node=node, args=(reduced_to,)
+            )
+
+    @utils.only_required_for_messages("simplifiable-if-expression")
+    def visit_ifexp(self, node: nodes.IfExp) -> None:
+        self._check_simplifiable_ifexp(node)
+
+    def _check_simplifiable_ifexp(self, node: nodes.IfExp) -> None:
+        if not isinstance(node.body, nodes.Const) or not isinstance(
+            node.orelse, nodes.Const
+        ):
+            return
+
+        if not isinstance(node.body.value, bool) or not isinstance(
+            node.orelse.value, bool
+        ):
+            return
+
+        if isinstance(node.test, nodes.Compare):
+            test_reduced_to = "test"
+        else:
+            test_reduced_to = "bool(test)"
+
+        if (node.body.value, node.orelse.value) == (True, False):
+            reduced_to = f"'{test_reduced_to}'"
+        elif (node.body.value, node.orelse.value) == (False, True):
+            reduced_to = "'not test'"
+        else:
+            return
+
+        self.add_message("simplifiable-if-expression", node=node, args=(reduced_to,))
+
+    @utils.only_required_for_messages(
+        "too-many-nested-blocks",
+        "inconsistent-return-statements",
+        "useless-return",
+        "consider-using-with",
+    )
+    def leave_functiondef(self, node: nodes.FunctionDef) -> None:
+        # check left-over nested blocks stack
+        self._emit_nested_blocks_message_if_needed(self._nested_blocks)
+        # new scope = reinitialize the stack of nested blocks
+        self._nested_blocks = []
+        # check consistent return statements
+        self._check_consistent_returns(node)
+        # check for single return or return None at the end
+        self._check_return_at_the_end(node)
+        self._return_nodes[node.name] = []
+        # check for context managers that have been created but not used
+        self._emit_consider_using_with_if_needed(
+            self._consider_using_with_stack.function_scope
+        )
+        self._consider_using_with_stack.function_scope.clear()
+
+    @utils.only_required_for_messages("consider-using-with")
+    def leave_classdef(self, _: nodes.ClassDef) -> None:
+        # check for context managers that have been created but not used
+        self._emit_consider_using_with_if_needed(
+            self._consider_using_with_stack.class_scope
+        )
+        self._consider_using_with_stack.class_scope.clear()
+
+    @utils.only_required_for_messages("stop-iteration-return")
+    def visit_raise(self, node: nodes.Raise) -> None:
+        self._check_stop_iteration_inside_generator(node)
+
+    def _check_stop_iteration_inside_generator(self, node: nodes.Raise) -> None:
         """Check if an exception of type StopIteration is raised inside a generator."""
-        pass
+        frame = node.frame()
+        if not isinstance(frame, nodes.FunctionDef) or not frame.is_generator():
+            return
+        if utils.node_ignores_exception(node, StopIteration):
+            return
+        if not node.exc:
+            return
+        exc = utils.safe_infer(node.exc)
+        if not exc or not isinstance(exc, (bases.Instance, nodes.ClassDef)):
+            return
+        if self._check_exception_inherit_from_stopiteration(exc):
+            self.add_message("stop-iteration-return", node=node, confidence=INFERENCE)

     @staticmethod
-    def _check_exception_inherit_from_stopiteration(exc: (nodes.ClassDef |
-        bases.Instance)) ->bool:
+    def _check_exception_inherit_from_stopiteration(
+        exc: nodes.ClassDef | bases.Instance,
+    ) -> bool:
         """Return True if the exception node in argument inherit from StopIteration."""
-        pass
+        stopiteration_qname = f"{utils.EXCEPTIONS_MODULE}.StopIteration"
+        return any(_class.qname() == stopiteration_qname for _class in exc.mro())
+
+    def _check_consider_using_comprehension_constructor(self, node: nodes.Call) -> None:
+        if (
+            isinstance(node.func, nodes.Name)
+            and node.args
+            and isinstance(node.args[0], nodes.ListComp)
+        ):
+            if node.func.name == "dict":
+                element = node.args[0].elt
+                if isinstance(element, nodes.Call):
+                    return
+
+                # If we have an `IfExp` here where both the key AND value
+                # are different, then don't raise the issue. See #5588
+                if (
+                    isinstance(element, nodes.IfExp)
+                    and isinstance(element.body, (nodes.Tuple, nodes.List))
+                    and len(element.body.elts) == 2
+                    and isinstance(element.orelse, (nodes.Tuple, nodes.List))
+                    and len(element.orelse.elts) == 2
+                ):
+                    key1, value1 = element.body.elts
+                    key2, value2 = element.orelse.elts
+                    if (
+                        key1.as_string() != key2.as_string()
+                        and value1.as_string() != value2.as_string()
+                    ):
+                        return
+
+                message_name = "consider-using-dict-comprehension"
+                self.add_message(message_name, node=node)
+            elif node.func.name == "set":
+                message_name = "consider-using-set-comprehension"
+                self.add_message(message_name, node=node)
+
+    def _check_consider_using_generator(self, node: nodes.Call) -> None:
+        # 'any', 'all', definitely should use generator, while 'list', 'tuple',
+        # 'sum', 'max', and 'min' need to be considered first
+        # See https://github.com/pylint-dev/pylint/pull/3309#discussion_r576683109
+        # https://github.com/pylint-dev/pylint/pull/6595#issuecomment-1125704244
+        # and https://peps.python.org/pep-0289/
+        checked_call = ["any", "all", "sum", "max", "min", "list", "tuple"]
+        if (
+            isinstance(node, nodes.Call)
+            and node.func
+            and isinstance(node.func, nodes.Name)
+            and node.func.name in checked_call
+        ):
+            # functions in checked_calls take exactly one positional argument
+            # check whether the argument is list comprehension
+            if len(node.args) == 1 and isinstance(node.args[0], nodes.ListComp):
+                # remove square brackets '[]'
+                inside_comp = node.args[0].as_string()[1:-1]
+                if node.keywords:
+                    inside_comp = f"({inside_comp})"
+                    inside_comp += ", "
+                    inside_comp += ", ".join(kw.as_string() for kw in node.keywords)
+                call_name = node.func.name
+                if call_name in {"any", "all"}:
+                    self.add_message(
+                        "use-a-generator",
+                        node=node,
+                        args=(call_name, inside_comp),
+                    )
+                else:
+                    self.add_message(
+                        "consider-using-generator",
+                        node=node,
+                        args=(call_name, inside_comp),
+                    )
+
+    @utils.only_required_for_messages(
+        "stop-iteration-return",
+        "consider-using-dict-comprehension",
+        "consider-using-set-comprehension",
+        "consider-using-sys-exit",
+        "super-with-arguments",
+        "consider-using-generator",
+        "consider-using-with",
+        "use-list-literal",
+        "use-dict-literal",
+        "use-a-generator",
+    )
+    def visit_call(self, node: nodes.Call) -> None:
+        self._check_raising_stopiteration_in_generator_next_call(node)
+        self._check_consider_using_comprehension_constructor(node)
+        self._check_quit_exit_call(node)
+        self._check_super_with_arguments(node)
+        self._check_consider_using_generator(node)
+        self._check_consider_using_with(node)
+        self._check_use_list_literal(node)
+        self._check_use_dict_literal(node)
+
+    @utils.only_required_for_messages("use-yield-from")
+    def visit_yield(self, node: nodes.Yield) -> None:
+        if not isinstance(node.value, nodes.Name):
+            return
+
+        loop_node = node.parent.parent
+        if (
+            not isinstance(loop_node, nodes.For)
+            or isinstance(loop_node, nodes.AsyncFor)
+            or len(loop_node.body) != 1
+            # Avoid a false positive if the return value from `yield` is used,
+            # (such as via Assign, AugAssign, etc).
+            or not isinstance(node.parent, nodes.Expr)
+        ):
+            return
+
+        if loop_node.target.name != node.value.name:
+            return
+
+        if isinstance(node.frame(), nodes.AsyncFunctionDef):
+            return
+
+        self.add_message("use-yield-from", node=loop_node, confidence=HIGH)

-    def _check_raising_stopiteration_in_generator_next_call(self, node:
-        nodes.Call) ->None:
+    @staticmethod
+    def _has_exit_in_scope(scope: nodes.LocalsDictNodeNG) -> bool:
+        exit_func = scope.locals.get("exit")
+        return bool(
+            exit_func and isinstance(exit_func[0], (nodes.ImportFrom, nodes.Import))
+        )
+
+    def _check_quit_exit_call(self, node: nodes.Call) -> None:
+        if isinstance(node.func, nodes.Name) and node.func.name in BUILTIN_EXIT_FUNCS:
+            # If we have `exit` imported from `sys` in the current or global scope,
+            # exempt this instance.
+            local_scope = node.scope()
+            if self._has_exit_in_scope(local_scope) or self._has_exit_in_scope(
+                node.root()
+            ):
+                return
+            self.add_message("consider-using-sys-exit", node=node, confidence=HIGH)
+
+    def _check_super_with_arguments(self, node: nodes.Call) -> None:
+        if not isinstance(node.func, nodes.Name) or node.func.name != "super":
+            return
+
+        if (
+            len(node.args) != 2
+            or not all(isinstance(arg, nodes.Name) for arg in node.args)
+            or node.args[1].name != "self"
+            or (frame_class := node_frame_class(node)) is None
+            or node.args[0].name != frame_class.name
+        ):
+            return
+
+        self.add_message("super-with-arguments", node=node)
+
+    def _check_raising_stopiteration_in_generator_next_call(
+        self, node: nodes.Call
+    ) -> None:
         """Check if a StopIteration exception is raised by the call to next function.

         If the next value has a default value, then do not add message.
@@ -292,14 +1227,90 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         :param node: Check to see if this Call node is a next function
         :type node: :class:`nodes.Call`
         """
-        pass

-    def _check_nested_blocks(self, node: NodesWithNestedBlocks) ->None:
+        def _looks_like_infinite_iterator(param: nodes.NodeNG) -> bool:
+            inferred = utils.safe_infer(param)
+            if isinstance(inferred, bases.Instance):
+                return inferred.qname() in KNOWN_INFINITE_ITERATORS
+            return False
+
+        if isinstance(node.func, nodes.Attribute):
+            # A next() method, which is now what we want.
+            return
+
+        if len(node.args) == 0:
+            # handle case when builtin.next is called without args.
+            # see https://github.com/pylint-dev/pylint/issues/7828
+            return
+
+        inferred = utils.safe_infer(node.func)
+
+        if (
+            isinstance(inferred, nodes.FunctionDef)
+            and inferred.qname() == "builtins.next"
+        ):
+            frame = node.frame()
+            # The next builtin can only have up to two
+            # positional arguments and no keyword arguments
+            has_sentinel_value = len(node.args) > 1
+            if (
+                isinstance(frame, nodes.FunctionDef)
+                and frame.is_generator()
+                and not has_sentinel_value
+                and not utils.node_ignores_exception(node, StopIteration)
+                and not _looks_like_infinite_iterator(node.args[0])
+            ):
+                self.add_message(
+                    "stop-iteration-return", node=node, confidence=INFERENCE
+                )
+
+    def _check_nested_blocks(
+        self,
+        node: NodesWithNestedBlocks,
+    ) -> None:
         """Update and check the number of nested blocks."""
-        pass
+        # only check block levels inside functions or methods
+        if not isinstance(node.scope(), nodes.FunctionDef):
+            return
+        # messages are triggered on leaving the nested block. Here we save the
+        # stack in case the current node isn't nested in the previous one
+        nested_blocks = self._nested_blocks[:]
+        if node.parent == node.scope():
+            self._nested_blocks = [node]
+        else:
+            # go through ancestors from the most nested to the less
+            for ancestor_node in reversed(self._nested_blocks):
+                if ancestor_node == node.parent:
+                    break
+                self._nested_blocks.pop()
+            # if the node is an elif, this should not be another nesting level
+            if isinstance(node, nodes.If) and self._is_actual_elif(node):
+                if self._nested_blocks:
+                    self._nested_blocks.pop()
+            self._nested_blocks.append(node)
+
+        # send message only once per group of nested blocks
+        if len(nested_blocks) > len(self._nested_blocks):
+            self._emit_nested_blocks_message_if_needed(nested_blocks)
+
+    def _emit_nested_blocks_message_if_needed(
+        self, nested_blocks: list[NodesWithNestedBlocks]
+    ) -> None:
+        if len(nested_blocks) > self.linter.config.max_nested_blocks:
+            self.add_message(
+                "too-many-nested-blocks",
+                node=nested_blocks[0],
+                args=(len(nested_blocks), self.linter.config.max_nested_blocks),
+            )
+
+    def _emit_consider_using_with_if_needed(
+        self, stack: dict[str, nodes.NodeNG]
+    ) -> None:
+        for node in stack.values():
+            self.add_message("consider-using-with", node=node)

     @staticmethod
-    def _duplicated_isinstance_types(node: nodes.BoolOp) ->dict[str, set[str]]:
+    def _duplicated_isinstance_types(node: nodes.BoolOp) -> dict[str, set[str]]:
         """Get the duplicated types from the underlying isinstance calls.

         :param nodes.BoolOp node: Node which should contain a bunch of isinstance calls.
@@ -307,13 +1318,100 @@ class RefactoringChecker(checkers.BaseTokenChecker):
                   to duplicate values from consecutive calls.
         :rtype: dict
         """
-        pass
+        duplicated_objects: set[str] = set()
+        all_types: collections.defaultdict[str, set[str]] = collections.defaultdict(set)

-    def _check_consider_merging_isinstance(self, node: nodes.BoolOp) ->None:
-        """Check isinstance calls which can be merged together."""
-        pass
+        for call in node.values:
+            if not isinstance(call, nodes.Call) or len(call.args) != 2:
+                continue
+
+            inferred = utils.safe_infer(call.func)
+            if not inferred or not utils.is_builtin_object(inferred):
+                continue
+
+            if inferred.name != "isinstance":
+                continue

-    def _check_chained_comparison(self, node: nodes.BoolOp) ->None:
+            isinstance_object = call.args[0].as_string()
+            isinstance_types = call.args[1]
+
+            if isinstance_object in all_types:
+                duplicated_objects.add(isinstance_object)
+
+            if isinstance(isinstance_types, nodes.Tuple):
+                elems = [
+                    class_type.as_string() for class_type in isinstance_types.itered()
+                ]
+            else:
+                elems = [isinstance_types.as_string()]
+            all_types[isinstance_object].update(elems)
+
+        # Remove all keys which not duplicated
+        return {
+            key: value for key, value in all_types.items() if key in duplicated_objects
+        }
+
+    def _check_consider_merging_isinstance(self, node: nodes.BoolOp) -> None:
+        """Check isinstance calls which can be merged together."""
+        if node.op != "or":
+            return
+
+        first_args = self._duplicated_isinstance_types(node)
+        for duplicated_name, class_names in first_args.items():
+            names = sorted(name for name in class_names)
+            self.add_message(
+                "consider-merging-isinstance",
+                node=node,
+                args=(duplicated_name, ", ".join(names)),
+            )
+
+    def _check_consider_using_in(self, node: nodes.BoolOp) -> None:
+        allowed_ops = {"or": "==", "and": "!="}
+
+        if node.op not in allowed_ops or len(node.values) < 2:
+            return
+
+        for value in node.values:
+            if (
+                not isinstance(value, nodes.Compare)
+                or len(value.ops) != 1
+                or value.ops[0][0] not in allowed_ops[node.op]
+            ):
+                return
+            for comparable in value.left, value.ops[0][1]:
+                if isinstance(comparable, nodes.Call):
+                    return
+
+        # Gather variables and values from comparisons
+        variables, values = [], []
+        for value in node.values:
+            variable_set = set()
+            for comparable in value.left, value.ops[0][1]:
+                if isinstance(comparable, (nodes.Name, nodes.Attribute)):
+                    variable_set.add(comparable.as_string())
+                values.append(comparable.as_string())
+            variables.append(variable_set)
+
+        # Look for (common-)variables that occur in all comparisons
+        common_variables = reduce(lambda a, b: a.intersection(b), variables)
+
+        if not common_variables:
+            return
+
+        # Gather information for the suggestion
+        common_variable = sorted(list(common_variables))[0]
+        values = list(collections.OrderedDict.fromkeys(values))
+        values.remove(common_variable)
+        values_string = ", ".join(values) if len(values) != 1 else values[0] + ","
+        maybe_not = "" if node.op == "or" else "not "
+        self.add_message(
+            "consider-using-in",
+            node=node,
+            args=(common_variable, maybe_not, values_string),
+            confidence=HIGH,
+        )
+
+    def _check_chained_comparison(self, node: nodes.BoolOp) -> None:
         """Check if there is any chained comparison in the expression.

         Add a refactoring message if a boolOp contains comparison like a < b and b < c,
@@ -321,11 +1419,58 @@ class RefactoringChecker(checkers.BaseTokenChecker):

         Care is taken to avoid simplifying a < b < c and b < d.
         """
-        pass
+        if node.op != "and" or len(node.values) < 2:
+            return
+
+        def _find_lower_upper_bounds(
+            comparison_node: nodes.Compare,
+            uses: collections.defaultdict[str, dict[str, set[nodes.Compare]]],
+        ) -> None:
+            left_operand = comparison_node.left
+            for operator, right_operand in comparison_node.ops:
+                for operand in (left_operand, right_operand):
+                    value = None
+                    if isinstance(operand, nodes.Name):
+                        value = operand.name
+                    elif isinstance(operand, nodes.Const):
+                        value = operand.value
+
+                    if value is None:
+                        continue
+
+                    if operator in {"<", "<="}:
+                        if operand is left_operand:
+                            uses[value]["lower_bound"].add(comparison_node)
+                        elif operand is right_operand:
+                            uses[value]["upper_bound"].add(comparison_node)
+                    elif operator in {">", ">="}:
+                        if operand is left_operand:
+                            uses[value]["upper_bound"].add(comparison_node)
+                        elif operand is right_operand:
+                            uses[value]["lower_bound"].add(comparison_node)
+                left_operand = right_operand
+
+        uses: collections.defaultdict[str, dict[str, set[nodes.Compare]]] = (
+            collections.defaultdict(
+                lambda: {"lower_bound": set(), "upper_bound": set()}
+            )
+        )
+        for comparison_node in node.values:
+            if isinstance(comparison_node, nodes.Compare):
+                _find_lower_upper_bounds(comparison_node, uses)
+
+        for bounds in uses.values():
+            num_shared = len(bounds["lower_bound"].intersection(bounds["upper_bound"]))
+            num_lower_bounds = len(bounds["lower_bound"])
+            num_upper_bounds = len(bounds["upper_bound"])
+            if num_shared < num_lower_bounds and num_shared < num_upper_bounds:
+                self.add_message("chained-comparison", node=node)
+                break

     @staticmethod
-    def _apply_boolean_simplification_rules(operator: str, values: list[
-        nodes.NodeNG]) ->list[nodes.NodeNG]:
+    def _apply_boolean_simplification_rules(
+        operator: str, values: list[nodes.NodeNG]
+    ) -> list[nodes.NodeNG]:
         """Removes irrelevant values or returns short-circuiting values.

         This function applies the following two rules:
@@ -335,43 +1480,294 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         2) False values in OR expressions are only relevant if all values are
            false, and the reverse for AND
         """
-        pass
+        simplified_values: list[nodes.NodeNG] = []
+
+        for subnode in values:
+            inferred_bool = None
+            if not next(subnode.nodes_of_class(nodes.Name), False):
+                inferred = utils.safe_infer(subnode)
+                if inferred:
+                    inferred_bool = inferred.bool_value()
+
+            if not isinstance(inferred_bool, bool):
+                simplified_values.append(subnode)
+            elif (operator == "or") == inferred_bool:
+                return [subnode]

-    def _simplify_boolean_operation(self, bool_op: nodes.BoolOp
-        ) ->nodes.BoolOp:
+        return simplified_values or [nodes.Const(operator == "and")]
+
+    def _simplify_boolean_operation(self, bool_op: nodes.BoolOp) -> nodes.BoolOp:
         """Attempts to simplify a boolean operation.

         Recursively applies simplification on the operator terms,
         and keeps track of whether reductions have been made.
         """
-        pass
-
-    def _check_simplifiable_condition(self, node: nodes.BoolOp) ->None:
+        children = list(bool_op.get_children())
+        intermediate = [
+            (
+                self._simplify_boolean_operation(child)
+                if isinstance(child, nodes.BoolOp)
+                else child
+            )
+            for child in children
+        ]
+        result = self._apply_boolean_simplification_rules(bool_op.op, intermediate)
+        if len(result) < len(children):
+            self._can_simplify_bool_op = True
+        if len(result) == 1:
+            return result[0]
+        simplified_bool_op = copy.copy(bool_op)
+        simplified_bool_op.postinit(result)
+        return simplified_bool_op
+
+    def _check_simplifiable_condition(self, node: nodes.BoolOp) -> None:
         """Check if a boolean condition can be simplified.

         Variables will not be simplified, even if the value can be inferred,
         and expressions like '3 + 4' will remain expanded.
         """
-        pass
+        if not utils.is_test_condition(node):
+            return
+
+        self._can_simplify_bool_op = False
+        simplified_expr = self._simplify_boolean_operation(node)
+
+        if not self._can_simplify_bool_op:
+            return
+
+        if not next(simplified_expr.nodes_of_class(nodes.Name), False):
+            self.add_message(
+                "condition-evals-to-constant",
+                node=node,
+                args=(node.as_string(), simplified_expr.as_string()),
+            )
+        else:
+            self.add_message(
+                "simplifiable-condition",
+                node=node,
+                args=(node.as_string(), simplified_expr.as_string()),
+            )
+
+    @utils.only_required_for_messages(
+        "consider-merging-isinstance",
+        "consider-using-in",
+        "chained-comparison",
+        "simplifiable-condition",
+        "condition-evals-to-constant",
+    )
+    def visit_boolop(self, node: nodes.BoolOp) -> None:
+        self._check_consider_merging_isinstance(node)
+        self._check_consider_using_in(node)
+        self._check_chained_comparison(node)
+        self._check_simplifiable_condition(node)

-    def _check_use_list_literal(self, node: nodes.Call) ->None:
+    @staticmethod
+    def _is_simple_assignment(node: nodes.NodeNG | None) -> bool:
+        return (
+            isinstance(node, nodes.Assign)
+            and len(node.targets) == 1
+            and isinstance(node.targets[0], nodes.AssignName)
+            and isinstance(node.value, nodes.Name)
+        )
+
+    def _check_swap_variables(self, node: nodes.Return | nodes.Assign) -> None:
+        if not node.next_sibling() or not node.next_sibling().next_sibling():
+            return
+        assignments = [node, node.next_sibling(), node.next_sibling().next_sibling()]
+        if not all(self._is_simple_assignment(node) for node in assignments):
+            return
+        if any(node in self._reported_swap_nodes for node in assignments):
+            return
+        left = [node.targets[0].name for node in assignments]
+        right = [node.value.name for node in assignments]
+        if left[0] == right[-1] and left[1:] == right[:-1]:
+            self._reported_swap_nodes.update(assignments)
+            message = "consider-swap-variables"
+            self.add_message(message, node=node)
+
+    @utils.only_required_for_messages(
+        "simplify-boolean-expression",
+        "consider-using-ternary",
+        "consider-swap-variables",
+        "consider-using-with",
+    )
+    def visit_assign(self, node: nodes.Assign) -> None:
+        self._append_context_managers_to_stack(node)
+        self.visit_return(node)  # remaining checks are identical as for return nodes
+
+    @utils.only_required_for_messages(
+        "simplify-boolean-expression",
+        "consider-using-ternary",
+        "consider-swap-variables",
+    )
+    def visit_return(self, node: nodes.Return | nodes.Assign) -> None:
+        self._check_swap_variables(node)
+        if self._is_and_or_ternary(node.value):
+            cond, truth_value, false_value = self._and_or_ternary_arguments(node.value)
+        else:
+            return
+
+        if all(
+            isinstance(value, nodes.Compare) for value in (truth_value, false_value)
+        ):
+            return
+
+        inferred_truth_value = utils.safe_infer(truth_value, compare_constants=True)
+        if inferred_truth_value is None or isinstance(
+            inferred_truth_value, UninferableBase
+        ):
+            return
+        truth_boolean_value = inferred_truth_value.bool_value()
+
+        if truth_boolean_value is False:
+            message = "simplify-boolean-expression"
+            suggestion = false_value.as_string()
+        else:
+            message = "consider-using-ternary"
+            suggestion = f"{truth_value.as_string()} if {cond.as_string()} else {false_value.as_string()}"
+        self.add_message(message, node=node, args=(suggestion,), confidence=INFERENCE)
+
+    def _append_context_managers_to_stack(self, node: nodes.Assign) -> None:
+        if _is_inside_context_manager(node):
+            # if we are inside a context manager itself, we assume that it will handle
+            # the resource management itself.
+            return
+        if isinstance(node.targets[0], (nodes.Tuple, nodes.List, nodes.Set)):
+            assignees = node.targets[0].elts
+            value = utils.safe_infer(node.value)
+            if value is None or not hasattr(value, "elts"):
+                # We cannot deduce what values are assigned, so we have to skip this
+                return
+            values = value.elts
+        else:
+            assignees = [node.targets[0]]
+            values = [node.value]
+        if any(isinstance(n, UninferableBase) for n in (assignees, values)):
+            return
+        for assignee, value in zip(assignees, values):
+            if not isinstance(value, nodes.Call):
+                continue
+            inferred = utils.safe_infer(value.func)
+            if (
+                not inferred
+                or inferred.qname() not in CALLS_RETURNING_CONTEXT_MANAGERS
+                or not isinstance(assignee, (nodes.AssignName, nodes.AssignAttr))
+            ):
+                continue
+            stack = self._consider_using_with_stack.get_stack_for_frame(node.frame())
+            varname = (
+                assignee.name
+                if isinstance(assignee, nodes.AssignName)
+                else assignee.attrname
+            )
+            if varname in stack:
+                existing_node = stack[varname]
+                if astroid.are_exclusive(node, existing_node):
+                    # only one of the two assignments can be executed at runtime, thus it is fine
+                    stack[varname] = value
+                    continue
+                # variable was redefined before it was used in a ``with`` block
+                self.add_message(
+                    "consider-using-with",
+                    node=existing_node,
+                )
+            stack[varname] = value
+
+    def _check_consider_using_with(self, node: nodes.Call) -> None:
+        if _is_inside_context_manager(node) or _is_a_return_statement(node):
+            # If we are inside a context manager itself, we assume that it will handle the
+            # resource management itself.
+            # If the node is a child of a return, we assume that the caller knows he is getting
+            # a context manager he should use properly (i.e. in a ``with``).
+            return
+        if (
+            node
+            in self._consider_using_with_stack.get_stack_for_frame(
+                node.frame()
+            ).values()
+        ):
+            # the result of this call was already assigned to a variable and will be
+            # checked when leaving the scope.
+            return
+        inferred = utils.safe_infer(node.func)
+        if not inferred or not isinstance(
+            inferred, (nodes.FunctionDef, nodes.ClassDef, bases.UnboundMethod)
+        ):
+            return
+        could_be_used_in_with = (
+            # things like ``lock.acquire()``
+            inferred.qname() in CALLS_THAT_COULD_BE_REPLACED_BY_WITH
+            or (
+                # things like ``open("foo")`` which are not already inside a ``with`` statement
+                inferred.qname() in CALLS_RETURNING_CONTEXT_MANAGERS
+                and not _is_part_of_with_items(node)
+            )
+        )
+        if could_be_used_in_with and not _will_be_released_automatically(node):
+            self.add_message("consider-using-with", node=node)
+
+    def _check_use_list_literal(self, node: nodes.Call) -> None:
         """Check if empty list is created by using the literal []."""
-        pass
+        if node.as_string() == "list()":
+            inferred = utils.safe_infer(node.func)
+            if isinstance(inferred, nodes.ClassDef) and not node.args:
+                if inferred.qname() == "builtins.list":
+                    self.add_message("use-list-literal", node=node)

-    def _check_use_dict_literal(self, node: nodes.Call) ->None:
+    def _check_use_dict_literal(self, node: nodes.Call) -> None:
         """Check if dict is created by using the literal {}."""
-        pass
+        if not isinstance(node.func, astroid.Name) or node.func.name != "dict":
+            return
+        inferred = utils.safe_infer(node.func)
+        if (
+            isinstance(inferred, nodes.ClassDef)
+            and inferred.qname() == "builtins.dict"
+            and not node.args
+        ):
+            self.add_message(
+                "use-dict-literal",
+                args=(self._dict_literal_suggestion(node),),
+                node=node,
+                confidence=INFERENCE,
+            )

     @staticmethod
-    def _dict_literal_suggestion(node: nodes.Call) ->str:
+    def _dict_literal_suggestion(node: nodes.Call) -> str:
         """Return a suggestion of reasonable length."""
-        pass
-
-    def _name_to_concatenate(self, node: nodes.NodeNG) ->(str | None):
+        elements: list[str] = []
+        for keyword in node.keywords:
+            if len(", ".join(elements)) >= 64:
+                break
+            if keyword not in node.kwargs:
+                elements.append(f'"{keyword.arg}": {keyword.value.as_string()}')
+        for keyword in node.kwargs:
+            if len(", ".join(elements)) >= 64:
+                break
+            elements.append(f"**{keyword.value.as_string()}")
+        suggestion = ", ".join(elements)
+        return f"{{{suggestion}{', ... '  if len(suggestion) > 64 else ''}}}"
+
+    def _name_to_concatenate(self, node: nodes.NodeNG) -> str | None:
         """Try to extract the name used in a concatenation loop."""
-        pass
-
-    def _check_consider_using_join(self, aug_assign: nodes.AugAssign) ->None:
+        if isinstance(node, nodes.Name):
+            return cast("str | None", node.name)
+        if not isinstance(node, nodes.JoinedStr):
+            return None
+
+        values = [
+            value for value in node.values if isinstance(value, nodes.FormattedValue)
+        ]
+        if len(values) != 1 or not isinstance(values[0].value, nodes.Name):
+            return None
+        # If there are more values in joined string than formatted values,
+        # they are probably separators.
+        # Allow them only if the option `suggest-join-with-non-empty-separator` is set
+        with_separators = len(node.values) > len(values)
+        if with_separators and not self._suggest_join_with_non_empty_separator:
+            return None
+        return cast("str | None", values[0].value.name)
+
+    def _check_consider_using_join(self, aug_assign: nodes.AugAssign) -> None:
         """We start with the augmented assignment and work our way upwards.

         Names of variables for nodes if match successful:
@@ -379,17 +1775,156 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         for number in ['1', '2', '3']  # for_loop
             result += number  # aug_assign
         """
-        pass
+        for_loop = aug_assign.parent
+        if not isinstance(for_loop, nodes.For) or len(for_loop.body) > 1:
+            return
+        assign = for_loop.previous_sibling()
+        if not isinstance(assign, nodes.Assign):
+            return
+        result_assign_names = {
+            target.name
+            for target in assign.targets
+            if isinstance(target, nodes.AssignName)
+        }
+
+        is_concat_loop = (
+            aug_assign.op == "+="
+            and isinstance(aug_assign.target, nodes.AssignName)
+            and len(for_loop.body) == 1
+            and aug_assign.target.name in result_assign_names
+            and isinstance(assign.value, nodes.Const)
+            and isinstance(assign.value.value, str)
+            and self._name_to_concatenate(aug_assign.value) == for_loop.target.name
+        )
+        if is_concat_loop:
+            self.add_message("consider-using-join", node=aug_assign)
+
+    @utils.only_required_for_messages("consider-using-join")
+    def visit_augassign(self, node: nodes.AugAssign) -> None:
+        self._check_consider_using_join(node)
+
+    @utils.only_required_for_messages(
+        "unnecessary-comprehension",
+        "unnecessary-dict-index-lookup",
+        "unnecessary-list-index-lookup",
+    )
+    def visit_comprehension(self, node: nodes.Comprehension) -> None:
+        self._check_unnecessary_comprehension(node)
+        self._check_unnecessary_dict_index_lookup(node)
+        self._check_unnecessary_list_index_lookup(node)
+
+    def _check_unnecessary_comprehension(self, node: nodes.Comprehension) -> None:
+        if (
+            isinstance(node.parent, nodes.GeneratorExp)
+            or len(node.ifs) != 0
+            or len(node.parent.generators) != 1
+            or node.is_async
+        ):
+            return
+
+        if (
+            isinstance(node.parent, nodes.DictComp)
+            and isinstance(node.parent.key, nodes.Name)
+            and isinstance(node.parent.value, nodes.Name)
+            and isinstance(node.target, nodes.Tuple)
+            and all(isinstance(elt, nodes.AssignName) for elt in node.target.elts)
+        ):
+            expr_list = [node.parent.key.name, node.parent.value.name]
+            target_list = [elt.name for elt in node.target.elts]
+
+        elif isinstance(node.parent, (nodes.ListComp, nodes.SetComp)):
+            expr = node.parent.elt
+            if isinstance(expr, nodes.Name):
+                expr_list = expr.name
+            elif isinstance(expr, nodes.Tuple):
+                if any(not isinstance(elt, nodes.Name) for elt in expr.elts):
+                    return
+                expr_list = [elt.name for elt in expr.elts]
+            else:
+                expr_list = []
+            target = node.parent.generators[0].target
+            target_list = (
+                target.name
+                if isinstance(target, nodes.AssignName)
+                else (
+                    [
+                        elt.name
+                        for elt in target.elts
+                        if isinstance(elt, nodes.AssignName)
+                    ]
+                    if isinstance(target, nodes.Tuple)
+                    else []
+                )
+            )
+        else:
+            return
+        if expr_list == target_list and expr_list:
+            args: tuple[str] | None = None
+            inferred = utils.safe_infer(node.iter)
+            if isinstance(node.parent, nodes.DictComp) and isinstance(
+                inferred, astroid.objects.DictItems
+            ):
+                args = (f"dict({node.iter.func.expr.as_string()})",)
+            elif isinstance(node.parent, nodes.ListComp) and isinstance(
+                inferred, nodes.List
+            ):
+                args = (f"list({node.iter.as_string()})",)
+            elif isinstance(node.parent, nodes.SetComp) and isinstance(
+                inferred, nodes.Set
+            ):
+                args = (f"set({node.iter.as_string()})",)
+            if args:
+                self.add_message(
+                    "unnecessary-comprehension", node=node.parent, args=args
+                )
+                return
+
+            if isinstance(node.parent, nodes.DictComp):
+                func = "dict"
+            elif isinstance(node.parent, nodes.ListComp):
+                func = "list"
+            elif isinstance(node.parent, nodes.SetComp):
+                func = "set"
+            else:
+                return
+
+            self.add_message(
+                "unnecessary-comprehension",
+                node=node.parent,
+                args=(f"{func}({node.iter.as_string()})",),
+            )

     @staticmethod
-    def _is_and_or_ternary(node: (nodes.NodeNG | None)) ->bool:
+    def _is_and_or_ternary(node: nodes.NodeNG | None) -> bool:
         """Returns true if node is 'condition and true_value or false_value' form.

         All of: condition, true_value and false_value should not be a complex boolean expression
         """
-        pass
+        return (
+            isinstance(node, nodes.BoolOp)
+            and node.op == "or"
+            and len(node.values) == 2
+            and isinstance(node.values[0], nodes.BoolOp)
+            and not isinstance(node.values[1], nodes.BoolOp)
+            and node.values[0].op == "and"
+            and not isinstance(node.values[0].values[1], nodes.BoolOp)
+            and len(node.values[0].values) == 2
+        )

-    def _check_consistent_returns(self, node: nodes.FunctionDef) ->None:
+    @staticmethod
+    def _and_or_ternary_arguments(
+        node: nodes.BoolOp,
+    ) -> tuple[nodes.NodeNG, nodes.NodeNG, nodes.NodeNG]:
+        false_value = node.values[1]
+        condition, true_value = node.values[0].values
+        return condition, true_value, false_value
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        self._return_nodes[node.name] = list(
+            node.nodes_of_class(nodes.Return, skip_klass=nodes.FunctionDef)
+        )
+
+    def _check_consistent_returns(self, node: nodes.FunctionDef) -> None:
         """Check that all return statements inside a function are consistent.

         Return statements are consistent if:
@@ -399,9 +1934,19 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Args:
             node (nodes.FunctionDef): the function holding the return statements.
         """
-        pass
-
-    def _is_if_node_return_ended(self, node: nodes.If) ->bool:
+        # explicit return statements are those with a not None value
+        explicit_returns = [
+            _node for _node in self._return_nodes[node.name] if _node.value is not None
+        ]
+        if not explicit_returns:
+            return
+        if len(explicit_returns) == len(
+            self._return_nodes[node.name]
+        ) and self._is_node_return_ended(node):
+            return
+        self.add_message("inconsistent-return-statements", node=node)
+
+    def _is_if_node_return_ended(self, node: nodes.If) -> bool:
         """Check if the If node ends with an explicit return statement.

         Args:
@@ -410,9 +1955,28 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Returns:
             bool: True if the node ends with an explicit statement, False otherwise.
         """
-        pass
-
-    def _is_raise_node_return_ended(self, node: nodes.Raise) ->bool:
+        # Do not check if inner function definition are return ended.
+        is_if_returning = any(
+            self._is_node_return_ended(_ifn)
+            for _ifn in node.body
+            if not isinstance(_ifn, nodes.FunctionDef)
+        )
+        if not node.orelse:
+            # If there is not orelse part then the if statement is returning if :
+            # - there is at least one return statement in its siblings;
+            # - the if body is itself returning.
+            if not self._has_return_in_siblings(node):
+                return False
+            return is_if_returning
+        # If there is an orelse part then both if body and orelse part should return.
+        is_orelse_returning = any(
+            self._is_node_return_ended(_ore)
+            for _ore in node.orelse
+            if not isinstance(_ore, nodes.FunctionDef)
+        )
+        return is_if_returning and is_orelse_returning
+
+    def _is_raise_node_return_ended(self, node: nodes.Raise) -> bool:
         """Check if the Raise node ends with an explicit return statement.

         Args:
@@ -421,9 +1985,35 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Returns:
             bool: True if the node ends with an explicit statement, False otherwise.
         """
-        pass
-
-    def _is_node_return_ended(self, node: nodes.NodeNG) ->bool:
+        # a Raise statement doesn't need to end with a return statement
+        # but if the exception raised is handled, then the handler has to
+        # ends with a return statement
+        if not node.exc:
+            # Ignore bare raises
+            return True
+        if not utils.is_node_inside_try_except(node):
+            # If the raise statement is not inside a try/except statement
+            # then the exception is raised and cannot be caught. No need
+            # to infer it.
+            return True
+        exc = utils.safe_infer(node.exc)
+        if (
+            exc is None
+            or isinstance(exc, UninferableBase)
+            or not hasattr(exc, "pytype")
+        ):
+            return False
+        exc_name = exc.pytype().split(".")[-1]
+        handlers = utils.get_exception_handlers(node, exc_name)
+        handlers = list(handlers) if handlers is not None else []
+        if handlers:
+            # among all the handlers handling the exception at least one
+            # must end with a return statement
+            return any(self._is_node_return_ended(_handler) for _handler in handlers)
+        # if no handlers handle the exception then it's ok
+        return True
+
+    def _is_node_return_ended(self, node: nodes.NodeNG) -> bool:
         """Check if the node ends with an explicit return statement.

         Args:
@@ -432,15 +2022,59 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Returns:
             bool: True if the node ends with an explicit statement, False otherwise.
         """
-        pass
+        # Recursion base case
+        if isinstance(node, nodes.Return):
+            return True
+        if isinstance(node, nodes.Call):
+            try:
+                funcdef_node = node.func.inferred()[0]
+                if self._is_function_def_never_returning(funcdef_node):
+                    return True
+            except astroid.InferenceError:
+                pass
+        if isinstance(node, nodes.While):
+            # A while-loop is considered return-ended if it has a
+            # truthy test and no break statements
+            return (node.test.bool_value() and not _loop_exits_early(node)) or any(
+                self._is_node_return_ended(child) for child in node.orelse
+            )
+        if isinstance(node, nodes.Raise):
+            return self._is_raise_node_return_ended(node)
+        if isinstance(node, nodes.If):
+            return self._is_if_node_return_ended(node)
+        if isinstance(node, nodes.Try):
+            handlers = {
+                _child
+                for _child in node.get_children()
+                if isinstance(_child, nodes.ExceptHandler)
+            }
+            all_but_handler = set(node.get_children()) - handlers
+            return any(
+                self._is_node_return_ended(_child) for _child in all_but_handler
+            ) and all(self._is_node_return_ended(_child) for _child in handlers)
+        if (
+            isinstance(node, nodes.Assert)
+            and isinstance(node.test, nodes.Const)
+            and not node.test.value
+        ):
+            # consider assert False as a return node
+            return True
+        # recurses on the children of the node
+        return any(self._is_node_return_ended(_child) for _child in node.get_children())

     @staticmethod
-    def _has_return_in_siblings(node: nodes.NodeNG) ->bool:
+    def _has_return_in_siblings(node: nodes.NodeNG) -> bool:
         """Returns True if there is at least one return in the node's siblings."""
-        pass
-
-    def _is_function_def_never_returning(self, node: (nodes.FunctionDef |
-        astroid.BoundMethod)) ->bool:
+        next_sibling = node.next_sibling()
+        while next_sibling:
+            if isinstance(next_sibling, nodes.Return):
+                return True
+            next_sibling = next_sibling.next_sibling()
+        return False
+
+    def _is_function_def_never_returning(
+        self, node: nodes.FunctionDef | astroid.BoundMethod
+    ) -> bool:
         """Return True if the function never returns, False otherwise.

         Args:
@@ -449,9 +2083,19 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Returns:
             bool: True if the function never returns, False otherwise.
         """
-        pass
-
-    def _check_return_at_the_end(self, node: nodes.FunctionDef) ->None:
+        if isinstance(node, (nodes.FunctionDef, astroid.BoundMethod)) and node.returns:
+            return (
+                isinstance(node.returns, nodes.Attribute)
+                and node.returns.attrname == "NoReturn"
+                or isinstance(node.returns, nodes.Name)
+                and node.returns.name == "NoReturn"
+            )
+        try:
+            return node.qname() in self._never_returning_functions
+        except (TypeError, AttributeError):
+            return False
+
+    def _check_return_at_the_end(self, node: nodes.FunctionDef) -> None:
         """Check for presence of a *single* return statement at the end of a
         function.

@@ -463,15 +2107,311 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         Per its implementation and PEP8 we can have a "return None" at the end
         of the function body if there are other return statements before that!
         """
-        pass
-
-    def _check_unnecessary_dict_index_lookup(self, node: (nodes.For | nodes
-        .Comprehension)) ->None:
+        if len(self._return_nodes[node.name]) != 1:
+            return
+        if not node.body:
+            return
+
+        last = node.body[-1]
+        if isinstance(last, nodes.Return) and len(node.body) == 1:
+            return
+
+        while isinstance(last, (nodes.If, nodes.Try, nodes.ExceptHandler)):
+            last = last.last_child()
+
+        if isinstance(last, nodes.Return):
+            # e.g. "return"
+            if last.value is None:
+                self.add_message("useless-return", node=node)
+            # return None"
+            elif isinstance(last.value, nodes.Const) and (last.value.value is None):
+                self.add_message("useless-return", node=node)
+
+    def _check_unnecessary_dict_index_lookup(
+        self, node: nodes.For | nodes.Comprehension
+    ) -> None:
         """Add message when accessing dict values by index lookup."""
-        pass
-
-    def _enumerate_with_start(self, node: (nodes.For | nodes.Comprehension)
-        ) ->tuple[bool, Confidence]:
+        # Verify that we have an .items() call and
+        # that the object which is iterated is used as a subscript in the
+        # body of the for.
+        # Is it a proper items call?
+        if (
+            isinstance(node.iter, nodes.Call)
+            and isinstance(node.iter.func, nodes.Attribute)
+            and node.iter.func.attrname == "items"
+        ):
+            inferred = utils.safe_infer(node.iter.func)
+            if not isinstance(inferred, astroid.BoundMethod):
+                return
+            iterating_object_name = node.iter.func.expr.as_string()
+
+            # Store potential violations. These will only be reported if we don't
+            # discover any writes to the collection during the loop.
+            messages = []
+
+            # Verify that the body of the for loop uses a subscript
+            # with the object that was iterated. This uses some heuristics
+            # in order to make sure that the same object is used in the
+            # for body.
+
+            children = (
+                node.body
+                if isinstance(node, nodes.For)
+                else list(node.parent.get_children())
+            )
+
+            # Check if there are any for / while loops within the loop in question;
+            # If so, we will be more conservative about reporting errors as we
+            # can't yet do proper control flow analysis to be sure when
+            # reassignment will affect us
+            nested_loops = itertools.chain.from_iterable(
+                child.nodes_of_class((nodes.For, nodes.While)) for child in children
+            )
+            has_nested_loops = next(nested_loops, None) is not None
+
+            for child in children:
+                for subscript in child.nodes_of_class(nodes.Subscript):
+                    if not isinstance(subscript.value, (nodes.Name, nodes.Attribute)):
+                        continue
+
+                    value = subscript.slice
+
+                    if isinstance(node, nodes.For) and _is_part_of_assignment_target(
+                        subscript
+                    ):
+                        # Ignore this subscript if it is the target of an assignment
+                        # Early termination; after reassignment dict index lookup will be necessary
+                        return
+
+                    if isinstance(subscript.parent, nodes.Delete):
+                        # Ignore this subscript if it's used with the delete keyword
+                        return
+
+                    # Case where .items is assigned to k,v (i.e., for k, v in d.items())
+                    if isinstance(value, nodes.Name):
+                        if (
+                            not isinstance(node.target, nodes.Tuple)
+                            # Ignore 1-tuples: for k, in d.items()
+                            or len(node.target.elts) < 2
+                            or value.name != node.target.elts[0].name
+                            or iterating_object_name != subscript.value.as_string()
+                        ):
+                            continue
+
+                        if (
+                            isinstance(node, nodes.For)
+                            and value.lookup(value.name)[1][-1].lineno > node.lineno
+                        ):
+                            # Ignore this subscript if it has been redefined after
+                            # the for loop. This checks for the line number using .lookup()
+                            # to get the line number where the iterating object was last
+                            # defined and compare that to the for loop's line number
+                            continue
+
+                        if has_nested_loops:
+                            messages.append(
+                                {
+                                    "node": subscript,
+                                    "variable": node.target.elts[1].as_string(),
+                                }
+                            )
+                        else:
+                            self.add_message(
+                                "unnecessary-dict-index-lookup",
+                                node=subscript,
+                                args=(node.target.elts[1].as_string(),),
+                            )
+
+                    # Case where .items is assigned to single var (i.e., for item in d.items())
+                    elif isinstance(value, nodes.Subscript):
+                        if (
+                            not isinstance(node.target, nodes.AssignName)
+                            or not isinstance(value.value, nodes.Name)
+                            or node.target.name != value.value.name
+                            or iterating_object_name != subscript.value.as_string()
+                        ):
+                            continue
+
+                        if (
+                            isinstance(node, nodes.For)
+                            and value.value.lookup(value.value.name)[1][-1].lineno
+                            > node.lineno
+                        ):
+                            # Ignore this subscript if it has been redefined after
+                            # the for loop. This checks for the line number using .lookup()
+                            # to get the line number where the iterating object was last
+                            # defined and compare that to the for loop's line number
+                            continue
+
+                        # check if subscripted by 0 (key)
+                        inferred = utils.safe_infer(value.slice)
+                        if not isinstance(inferred, nodes.Const) or inferred.value != 0:
+                            continue
+
+                        if has_nested_loops:
+                            messages.append(
+                                {
+                                    "node": subscript,
+                                    "variable": "1".join(
+                                        value.as_string().rsplit("0", maxsplit=1)
+                                    ),
+                                }
+                            )
+                        else:
+                            self.add_message(
+                                "unnecessary-dict-index-lookup",
+                                node=subscript,
+                                args=(
+                                    "1".join(value.as_string().rsplit("0", maxsplit=1)),
+                                ),
+                            )
+
+            for message in messages:
+                self.add_message(
+                    "unnecessary-dict-index-lookup",
+                    node=message["node"],
+                    args=(message["variable"],),
+                )
+
+    def _check_unnecessary_list_index_lookup(
+        self, node: nodes.For | nodes.Comprehension
+    ) -> None:
+        if (
+            not isinstance(node.iter, nodes.Call)
+            or not isinstance(node.iter.func, nodes.Name)
+            or not node.iter.func.name == "enumerate"
+        ):
+            return
+
+        preliminary_confidence = HIGH
+        try:
+            iterable_arg = utils.get_argument_from_call(
+                node.iter, position=0, keyword="iterable"
+            )
+        except utils.NoSuchArgumentError:
+            iterable_arg = utils.infer_kwarg_from_call(node.iter, keyword="iterable")
+            preliminary_confidence = INFERENCE
+
+        if not isinstance(iterable_arg, nodes.Name):
+            return
+
+        if not isinstance(node.target, nodes.Tuple) or len(node.target.elts) < 2:
+            # enumerate() result is being assigned without destructuring
+            return
+
+        if not isinstance(node.target.elts[1], nodes.AssignName):
+            # The value is not being assigned to a single variable, e.g. being
+            # destructured, so we can't necessarily use it.
+            return
+
+        has_start_arg, confidence = self._enumerate_with_start(node)
+        if has_start_arg:
+            # enumerate is being called with start arg/kwarg so resulting index lookup
+            # is not redundant, hence we should not report an error.
+            return
+
+        # Preserve preliminary_confidence if it was INFERENCE
+        confidence = (
+            preliminary_confidence
+            if preliminary_confidence == INFERENCE
+            else confidence
+        )
+
+        iterating_object_name = iterable_arg.name
+        value_variable = node.target.elts[1]
+
+        # Store potential violations. These will only be reported if we don't
+        # discover any writes to the collection during the loop.
+        bad_nodes = []
+
+        children = (
+            node.body
+            if isinstance(node, nodes.For)
+            else list(node.parent.get_children())
+        )
+
+        # Check if there are any for / while loops within the loop in question;
+        # If so, we will be more conservative about reporting errors as we
+        # can't yet do proper control flow analysis to be sure when
+        # reassignment will affect us
+        nested_loops = itertools.chain.from_iterable(
+            child.nodes_of_class((nodes.For, nodes.While)) for child in children
+        )
+        has_nested_loops = next(nested_loops, None) is not None
+
+        # Check if there are any if statements within the loop in question;
+        # If so, we will be more conservative about reporting errors as we
+        # can't yet do proper control flow analysis to be sure when
+        # reassignment will affect us
+        if_statements = itertools.chain.from_iterable(
+            child.nodes_of_class(nodes.If) for child in children
+        )
+        has_if_statements = next(if_statements, None) is not None
+
+        for child in children:
+            for subscript in child.nodes_of_class(nodes.Subscript):
+                if isinstance(node, nodes.For) and _is_part_of_assignment_target(
+                    subscript
+                ):
+                    # Ignore this subscript if it is the target of an assignment
+                    # Early termination; after reassignment index lookup will be necessary
+                    return
+
+                if isinstance(subscript.parent, nodes.Delete):
+                    # Ignore this subscript if it's used with the delete keyword
+                    return
+
+                index = subscript.slice
+                if isinstance(index, nodes.Name):
+                    if (
+                        index.name != node.target.elts[0].name
+                        or iterating_object_name != subscript.value.as_string()
+                    ):
+                        continue
+
+                    if (
+                        isinstance(node, nodes.For)
+                        and index.lookup(index.name)[1][-1].lineno > node.lineno
+                    ):
+                        # Ignore this subscript if it has been redefined after
+                        # the for loop.
+                        continue
+
+                    if (
+                        isinstance(node, nodes.For)
+                        and index.lookup(value_variable.name)[1][-1].lineno
+                        > node.lineno
+                    ):
+                        # The variable holding the value from iteration has been
+                        # reassigned on a later line, so it can't be used.
+                        continue
+
+                    if has_nested_loops:
+                        # Have found a likely issue, but since there are nested
+                        # loops we don't want to report this unless we get to the
+                        # end of the loop without updating the collection
+                        bad_nodes.append(subscript)
+                    elif has_if_statements:
+                        continue
+                    else:
+                        self.add_message(
+                            "unnecessary-list-index-lookup",
+                            node=subscript,
+                            args=(node.target.elts[1].name,),
+                            confidence=confidence,
+                        )
+
+        for subscript in bad_nodes:
+            self.add_message(
+                "unnecessary-list-index-lookup",
+                node=subscript,
+                args=(node.target.elts[1].name,),
+                confidence=confidence,
+            )
+
+    def _enumerate_with_start(
+        self, node: nodes.For | nodes.Comprehension
+    ) -> tuple[bool, Confidence]:
         """Check presence of `start` kwarg or second argument to enumerate.

         For example:
@@ -482,4 +2422,40 @@ class RefactoringChecker(checkers.BaseTokenChecker):
         If `start` is assigned to `0`, the default value, this is equivalent to
         not calling `enumerate` with start.
         """
-        pass
+        confidence = HIGH
+
+        if len(node.iter.args) > 1:
+            # We assume the second argument to `enumerate` is the `start` int arg.
+            # It's a reasonable assumption for now as it's the only possible argument:
+            # https://docs.python.org/3/library/functions.html#enumerate
+            start_arg = node.iter.args[1]
+            start_val, confidence = self._get_start_value(start_arg)
+            if start_val is None:
+                return False, confidence
+            return not start_val == 0, confidence
+
+        for keyword in node.iter.keywords:
+            if keyword.arg == "start":
+                start_val, confidence = self._get_start_value(keyword.value)
+                if start_val is None:
+                    return False, confidence
+                return not start_val == 0, confidence
+
+        return False, confidence
+
+    def _get_start_value(self, node: nodes.NodeNG) -> tuple[int | None, Confidence]:
+        if (
+            isinstance(node, (nodes.Name, nodes.Call, nodes.Attribute))
+            or isinstance(node, nodes.UnaryOp)
+            and isinstance(node.operand, (nodes.Attribute, nodes.Name))
+        ):
+            inferred = utils.safe_infer(node)
+            # inferred can be an astroid.base.Instance as in 'enumerate(x, int(y))' or
+            # not correctly inferred (None)
+            start_val = inferred.value if isinstance(inferred, nodes.Const) else None
+            return start_val, INFERENCE
+        if isinstance(node, nodes.UnaryOp):
+            return node.operand.value, HIGH
+        if isinstance(node, nodes.Const):
+            return node.value, HIGH
+        return None, HIGH
diff --git a/pylint/checkers/similar.py b/pylint/checkers/similar.py
index b85648de9..ee1b60843 100644
--- a/pylint/checkers/similar.py
+++ b/pylint/checkers/similar.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """A similarities / code duplication command line tool and pylint checker.

 The algorithm is based on comparing the hash value of n successive lines of a file.
@@ -22,7 +26,9 @@ are common.
 Once post-processed the values of association table are the result looked for, i.e.
 start and end lines numbers of common lines in both files.
 """
+
 from __future__ import annotations
+
 import argparse
 import copy
 import functools
@@ -36,28 +42,54 @@ from collections.abc import Callable, Generator, Iterable, Sequence
 from getopt import GetoptError, getopt
 from io import BufferedIOBase, BufferedReader, BytesIO
 from itertools import chain
-from typing import TYPE_CHECKING, Dict, List, NamedTuple, NewType, NoReturn, TextIO, Tuple, Union
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    List,
+    NamedTuple,
+    NewType,
+    NoReturn,
+    TextIO,
+    Tuple,
+    Union,
+)
+
 import astroid
 from astroid import nodes
+
 from pylint.checkers import BaseChecker, BaseRawFileChecker, table_lines_from_stats
 from pylint.reporters.ureports.nodes import Section, Table
 from pylint.typing import MessageDefinitionTuple, Options
 from pylint.utils import LinterStats, decoding_stream
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 DEFAULT_MIN_SIMILARITY_LINE = 4
-REGEX_FOR_LINES_WITH_CONTENT = re.compile('.*\\w+')
-Index = NewType('Index', int)
-LineNumber = NewType('LineNumber', int)

+REGEX_FOR_LINES_WITH_CONTENT = re.compile(r".*\w+")

+# Index defines a location in a LineSet stripped lines collection
+Index = NewType("Index", int)
+
+# LineNumber defines a location in a LinesSet real lines collection (the whole file lines)
+LineNumber = NewType("LineNumber", int)
+
+
+# LineSpecifs holds characteristics of a line in a file
 class LineSpecifs(NamedTuple):
     line_number: LineNumber
     text: str


-HashToIndex_T = Dict['LinesChunk', List[Index]]
-IndexToLines_T = Dict[Index, 'SuccessiveLinesLimits']
+# Links LinesChunk object to the starting indices (in lineset's stripped lines)
+# of the different chunk of lines that are used to compute the hash
+HashToIndex_T = Dict["LinesChunk", List[Index]]
+
+# Links index in the lineset's stripped lines to the real lines in the file
+IndexToLines_T = Dict[Index, "SuccessiveLinesLimits"]
+
+# The types the streams read by pylint can take. Originating from astroid.nodes.Module.stream() and open()
 STREAM_TYPES = Union[TextIO, BufferedReader, BytesIO]


@@ -65,51 +97,62 @@ class CplSuccessiveLinesLimits:
     """Holds a SuccessiveLinesLimits object for each checked file and counts the number
     of common lines between both stripped lines collections extracted from both files.
     """
-    __slots__ = 'first_file', 'second_file', 'effective_cmn_lines_nb'

-    def __init__(self, first_file: SuccessiveLinesLimits, second_file:
-        SuccessiveLinesLimits, effective_cmn_lines_nb: int) ->None:
+    __slots__ = ("first_file", "second_file", "effective_cmn_lines_nb")
+
+    def __init__(
+        self,
+        first_file: SuccessiveLinesLimits,
+        second_file: SuccessiveLinesLimits,
+        effective_cmn_lines_nb: int,
+    ) -> None:
         self.first_file = first_file
         self.second_file = second_file
         self.effective_cmn_lines_nb = effective_cmn_lines_nb


-CplIndexToCplLines_T = Dict['LineSetStartCouple', CplSuccessiveLinesLimits]
+# Links the indices to the starting line in both lineset's stripped lines to
+# the start and end lines in both files
+CplIndexToCplLines_T = Dict["LineSetStartCouple", CplSuccessiveLinesLimits]


 class LinesChunk:
     """The LinesChunk object computes and stores the hash of some consecutive stripped
     lines of a lineset.
     """
-    __slots__ = '_fileid', '_index', '_hash'

-    def __init__(self, fileid: str, num_line: int, *lines: Iterable[str]
-        ) ->None:
+    __slots__ = ("_fileid", "_index", "_hash")
+
+    def __init__(self, fileid: str, num_line: int, *lines: Iterable[str]) -> None:
         self._fileid: str = fileid
         """The name of the file from which the LinesChunk object is generated."""
+
         self._index: Index = Index(num_line)
         """The index in the stripped lines that is the starting of consecutive
         lines.
         """
+
         self._hash: int = sum(hash(lin) for lin in lines)
         """The hash of some consecutive lines."""

-    def __eq__(self, o: object) ->bool:
+    def __eq__(self, o: object) -> bool:
         if not isinstance(o, LinesChunk):
             return NotImplemented
         return self._hash == o._hash

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return self._hash

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return (
-            f'<LinesChunk object for file {self._fileid} ({self._index}, {self._hash})>'
-            )
+            f"<LinesChunk object for file {self._fileid} ({self._index}, {self._hash})>"
+        )

-    def __str__(self) ->str:
-        return f"""LinesChunk object for file {self._fileid}, starting at line {self._index} 
-Hash is {self._hash}"""
+    def __str__(self) -> str:
+        return (
+            f"LinesChunk object for file {self._fileid}, starting at line {self._index} \n"
+            f"Hash is {self._hash}"
+        )


 class SuccessiveLinesLimits:
@@ -117,41 +160,64 @@ class SuccessiveLinesLimits:

     :note: Only the end line number can be updated.
     """
-    __slots__ = '_start', '_end'

-    def __init__(self, start: LineNumber, end: LineNumber) ->None:
+    __slots__ = ("_start", "_end")
+
+    def __init__(self, start: LineNumber, end: LineNumber) -> None:
         self._start: LineNumber = start
         self._end: LineNumber = end

-    def __repr__(self) ->str:
-        return f'<SuccessiveLinesLimits <{self._start};{self._end}>>'
+    @property
+    def start(self) -> LineNumber:
+        return self._start
+
+    @property
+    def end(self) -> LineNumber:
+        return self._end
+
+    @end.setter
+    def end(self, value: LineNumber) -> None:
+        self._end = value
+
+    def __repr__(self) -> str:
+        return f"<SuccessiveLinesLimits <{self._start};{self._end}>>"


 class LineSetStartCouple(NamedTuple):
     """Indices in both linesets that mark the beginning of successive lines."""
+
     fst_lineset_index: Index
     snd_lineset_index: Index

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return (
-            f'<LineSetStartCouple <{self.fst_lineset_index};{self.snd_lineset_index}>>'
-            )
+            f"<LineSetStartCouple <{self.fst_lineset_index};{self.snd_lineset_index}>>"
+        )

-    def __eq__(self, other: object) ->bool:
+    def __eq__(self, other: object) -> bool:
         if not isinstance(other, LineSetStartCouple):
             return NotImplemented
-        return (self.fst_lineset_index == other.fst_lineset_index and self.
-            snd_lineset_index == other.snd_lineset_index)
+        return (
+            self.fst_lineset_index == other.fst_lineset_index
+            and self.snd_lineset_index == other.snd_lineset_index
+        )

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return hash(self.fst_lineset_index) + hash(self.snd_lineset_index)

+    def increment(self, value: Index) -> LineSetStartCouple:
+        return LineSetStartCouple(
+            Index(self.fst_lineset_index + value),
+            Index(self.snd_lineset_index + value),
+        )
+

-LinesChunkLimits_T = Tuple['LineSet', LineNumber, LineNumber]
+LinesChunkLimits_T = Tuple["LineSet", LineNumber, LineNumber]


-def hash_lineset(lineset: LineSet, min_common_lines: int=
-    DEFAULT_MIN_SIMILARITY_LINE) ->tuple[HashToIndex_T, IndexToLines_T]:
+def hash_lineset(
+    lineset: LineSet, min_common_lines: int = DEFAULT_MIN_SIMILARITY_LINE
+) -> tuple[HashToIndex_T, IndexToLines_T]:
     """Return two dicts.

     The first associates the hash of successive stripped lines of a lineset
@@ -164,10 +230,33 @@ def hash_lineset(lineset: LineSet, min_common_lines: int=
     :return: a dict linking hashes to corresponding start index and a dict that links this
              index to the start and end lines in the file
     """
-    pass
+    hash2index = defaultdict(list)
+    index2lines = {}
+    # Comments, docstring and other specific patterns maybe excluded -> call to stripped_lines
+    # to get only what is desired
+    lines = tuple(x.text for x in lineset.stripped_lines)
+    # Need different iterators on same lines but each one is shifted 1 from the precedent
+    shifted_lines = [iter(lines[i:]) for i in range(min_common_lines)]
+
+    for i, *succ_lines in enumerate(zip(*shifted_lines)):
+        start_linenumber = LineNumber(lineset.stripped_lines[i].line_number)
+        try:
+            end_linenumber = lineset.stripped_lines[i + min_common_lines].line_number
+        except IndexError:
+            end_linenumber = LineNumber(lineset.stripped_lines[-1].line_number + 1)

+        index = Index(i)
+        index2lines[index] = SuccessiveLinesLimits(
+            start=start_linenumber, end=end_linenumber
+        )

-def remove_successive(all_couples: CplIndexToCplLines_T) ->None:
+        l_c = LinesChunk(lineset.name, index, *succ_lines)
+        hash2index[l_c].append(index)
+
+    return hash2index, index2lines
+
+
+def remove_successive(all_couples: CplIndexToCplLines_T) -> None:
     """Removes all successive entries in the dictionary in argument.

     :param all_couples: collection that has to be cleaned up from successive entries.
@@ -192,11 +281,31 @@ def remove_successive(all_couples: CplIndexToCplLines_T) ->None:
     {(11, 34): ([5, 10], [27, 32]),
      (23, 79): ([15, 19], [45, 49])}
     """
-    pass
-
-
-def filter_noncode_lines(ls_1: LineSet, stindex_1: Index, ls_2: LineSet,
-    stindex_2: Index, common_lines_nb: int) ->int:
+    couple: LineSetStartCouple
+    for couple in tuple(all_couples.keys()):
+        to_remove = []
+        test = couple.increment(Index(1))
+        while test in all_couples:
+            all_couples[couple].first_file.end = all_couples[test].first_file.end
+            all_couples[couple].second_file.end = all_couples[test].second_file.end
+            all_couples[couple].effective_cmn_lines_nb += 1
+            to_remove.append(test)
+            test = test.increment(Index(1))
+
+        for target in to_remove:
+            try:
+                all_couples.pop(target)
+            except KeyError:
+                pass
+
+
+def filter_noncode_lines(
+    ls_1: LineSet,
+    stindex_1: Index,
+    ls_2: LineSet,
+    stindex_2: Index,
+    common_lines_nb: int,
+) -> int:
     """Return the effective number of common lines between lineset1
     and lineset2 filtered from non code lines.

@@ -211,7 +320,17 @@ def filter_noncode_lines(ls_1: LineSet, stindex_1: Index, ls_2: LineSet,
     :param common_lines_nb: number of common successive stripped lines before being filtered from non code lines
     :return: the number of common successive stripped lines that contain code
     """
-    pass
+    stripped_l1 = [
+        lspecif.text
+        for lspecif in ls_1.stripped_lines[stindex_1 : stindex_1 + common_lines_nb]
+        if REGEX_FOR_LINES_WITH_CONTENT.match(lspecif.text)
+    ]
+    stripped_l2 = [
+        lspecif.text
+        for lspecif in ls_2.stripped_lines[stindex_2 : stindex_2 + common_lines_nb]
+        if REGEX_FOR_LINES_WITH_CONTENT.match(lspecif.text)
+    ]
+    return sum(sline_1 == sline_2 for sline_1, sline_2 in zip(stripped_l1, stripped_l2))


 class Commonality(NamedTuple):
@@ -227,13 +346,20 @@ class Commonality(NamedTuple):
 class Similar:
     """Finds copy-pasted lines of code in a project."""

-    def __init__(self, min_lines: int=DEFAULT_MIN_SIMILARITY_LINE,
-        ignore_comments: bool=False, ignore_docstrings: bool=False,
-        ignore_imports: bool=False, ignore_signatures: bool=False) ->None:
+    def __init__(
+        self,
+        min_lines: int = DEFAULT_MIN_SIMILARITY_LINE,
+        ignore_comments: bool = False,
+        ignore_docstrings: bool = False,
+        ignore_imports: bool = False,
+        ignore_signatures: bool = False,
+    ) -> None:
+        # If we run in pylint mode we link the namespace objects
         if isinstance(self, BaseChecker):
             self.namespace = self.linter.config
         else:
             self.namespace = argparse.Namespace()
+
         self.namespace.min_similarity_lines = min_lines
         self.namespace.ignore_comments = ignore_comments
         self.namespace.ignore_docstrings = ignore_docstrings
@@ -241,31 +367,119 @@ class Similar:
         self.namespace.ignore_signatures = ignore_signatures
         self.linesets: list[LineSet] = []

-    def append_stream(self, streamid: str, stream: STREAM_TYPES, encoding:
-        (str | None)=None) ->None:
+    def append_stream(
+        self, streamid: str, stream: STREAM_TYPES, encoding: str | None = None
+    ) -> None:
         """Append a file to search for similarities."""
-        pass
+        if isinstance(stream, BufferedIOBase):
+            if encoding is None:
+                raise ValueError
+            readlines = decoding_stream(stream, encoding).readlines
+        else:
+            # hint parameter is incorrectly typed as non-optional
+            readlines = stream.readlines  # type: ignore[assignment]
+
+        try:
+            lines = readlines()
+        except UnicodeDecodeError:
+            lines = []
+
+        self.linesets.append(
+            LineSet(
+                streamid,
+                lines,
+                self.namespace.ignore_comments,
+                self.namespace.ignore_docstrings,
+                self.namespace.ignore_imports,
+                self.namespace.ignore_signatures,
+                line_enabled_callback=(
+                    self.linter._is_one_message_enabled
+                    if hasattr(self, "linter")
+                    else None
+                ),
+            )
+        )

-    def run(self) ->None:
+    def run(self) -> None:
         """Start looking for similarities and display results on stdout."""
-        pass
+        if self.namespace.min_similarity_lines == 0:
+            return
+        self._display_sims(self._compute_sims())

-    def _compute_sims(self) ->list[tuple[int, set[LinesChunkLimits_T]]]:
+    def _compute_sims(self) -> list[tuple[int, set[LinesChunkLimits_T]]]:
         """Compute similarities in appended files."""
-        pass
-
-    def _display_sims(self, similarities: list[tuple[int, set[
-        LinesChunkLimits_T]]]) ->None:
+        no_duplicates: dict[int, list[set[LinesChunkLimits_T]]] = defaultdict(list)
+
+        for commonality in self._iter_sims():
+            num = commonality.cmn_lines_nb
+            lineset1 = commonality.fst_lset
+            start_line_1 = commonality.fst_file_start
+            end_line_1 = commonality.fst_file_end
+            lineset2 = commonality.snd_lset
+            start_line_2 = commonality.snd_file_start
+            end_line_2 = commonality.snd_file_end
+
+            duplicate = no_duplicates[num]
+            couples: set[LinesChunkLimits_T]
+            for couples in duplicate:
+                if (lineset1, start_line_1, end_line_1) in couples or (
+                    lineset2,
+                    start_line_2,
+                    end_line_2,
+                ) in couples:
+                    break
+            else:
+                duplicate.append(
+                    {
+                        (lineset1, start_line_1, end_line_1),
+                        (lineset2, start_line_2, end_line_2),
+                    }
+                )
+        sims: list[tuple[int, set[LinesChunkLimits_T]]] = []
+        ensembles: list[set[LinesChunkLimits_T]]
+        for num, ensembles in no_duplicates.items():
+            cpls: set[LinesChunkLimits_T]
+            for cpls in ensembles:
+                sims.append((num, cpls))
+        sims.sort()
+        sims.reverse()
+        return sims
+
+    def _display_sims(
+        self, similarities: list[tuple[int, set[LinesChunkLimits_T]]]
+    ) -> None:
         """Display computed similarities on stdout."""
-        pass
+        report = self._get_similarity_report(similarities)
+        print(report)

-    def _get_similarity_report(self, similarities: list[tuple[int, set[
-        LinesChunkLimits_T]]]) ->str:
+    def _get_similarity_report(
+        self, similarities: list[tuple[int, set[LinesChunkLimits_T]]]
+    ) -> str:
         """Create a report from similarities."""
-        pass
-
-    def _find_common(self, lineset1: LineSet, lineset2: LineSet) ->Generator[
-        Commonality, None, None]:
+        report: str = ""
+        duplicated_line_number: int = 0
+        for number, couples in similarities:
+            report += f"\n{number} similar lines in {len(couples)} files\n"
+            couples_l = sorted(couples)
+            line_set = start_line = end_line = None
+            for line_set, start_line, end_line in couples_l:
+                report += f"=={line_set.name}:[{start_line}:{end_line}]\n"
+            if line_set:
+                for line in line_set._real_lines[start_line:end_line]:
+                    report += f"   {line.rstrip()}\n" if line.rstrip() else "\n"
+            duplicated_line_number += number * (len(couples_l) - 1)
+        total_line_number: int = sum(len(lineset) for lineset in self.linesets)
+        report += (
+            f"TOTAL lines={total_line_number} "
+            f"duplicates={duplicated_line_number} "
+            f"percent={duplicated_line_number * 100.0 / total_line_number:.2f}\n"
+        )
+        return report
+
+    # pylint: disable = too-many-locals
+    def _find_common(
+        self, lineset1: LineSet, lineset2: LineSet
+    ) -> Generator[Commonality, None, None]:
         """Find similarities in the two given linesets.

         This the core of the algorithm. The idea is to compute the hashes of a
@@ -278,35 +492,98 @@ class Similar:
         account common chunk of lines that have more than the minimal number of
         successive lines required.
         """
-        pass
+        hash_to_index_1: HashToIndex_T
+        hash_to_index_2: HashToIndex_T
+        index_to_lines_1: IndexToLines_T
+        index_to_lines_2: IndexToLines_T
+        hash_to_index_1, index_to_lines_1 = hash_lineset(
+            lineset1, self.namespace.min_similarity_lines
+        )
+        hash_to_index_2, index_to_lines_2 = hash_lineset(
+            lineset2, self.namespace.min_similarity_lines
+        )
+
+        hash_1: frozenset[LinesChunk] = frozenset(hash_to_index_1.keys())
+        hash_2: frozenset[LinesChunk] = frozenset(hash_to_index_2.keys())
+
+        common_hashes: Iterable[LinesChunk] = sorted(
+            hash_1 & hash_2, key=lambda m: hash_to_index_1[m][0]
+        )
+
+        # all_couples is a dict that links the couple of indices in both linesets that mark the beginning of
+        # successive common lines, to the corresponding starting and ending number lines in both files
+        all_couples: CplIndexToCplLines_T = {}
+
+        for c_hash in sorted(common_hashes, key=operator.attrgetter("_index")):
+            for indices_in_linesets in itertools.product(
+                hash_to_index_1[c_hash], hash_to_index_2[c_hash]
+            ):
+                index_1 = indices_in_linesets[0]
+                index_2 = indices_in_linesets[1]
+                all_couples[LineSetStartCouple(index_1, index_2)] = (
+                    CplSuccessiveLinesLimits(
+                        copy.copy(index_to_lines_1[index_1]),
+                        copy.copy(index_to_lines_2[index_2]),
+                        effective_cmn_lines_nb=self.namespace.min_similarity_lines,
+                    )
+                )
+
+        remove_successive(all_couples)
+
+        for cml_stripped_l, cmn_l in all_couples.items():
+            start_index_1 = cml_stripped_l.fst_lineset_index
+            start_index_2 = cml_stripped_l.snd_lineset_index
+            nb_common_lines = cmn_l.effective_cmn_lines_nb
+
+            com = Commonality(
+                cmn_lines_nb=nb_common_lines,
+                fst_lset=lineset1,
+                fst_file_start=cmn_l.first_file.start,
+                fst_file_end=cmn_l.first_file.end,
+                snd_lset=lineset2,
+                snd_file_start=cmn_l.second_file.start,
+                snd_file_end=cmn_l.second_file.end,
+            )
+
+            eff_cmn_nb = filter_noncode_lines(
+                lineset1, start_index_1, lineset2, start_index_2, nb_common_lines
+            )
+
+            if eff_cmn_nb > self.namespace.min_similarity_lines:
+                yield com

-    def _iter_sims(self) ->Generator[Commonality, None, None]:
+    def _iter_sims(self) -> Generator[Commonality, None, None]:
         """Iterate on similarities among all files, by making a Cartesian
         product.
         """
-        pass
+        for idx, lineset in enumerate(self.linesets[:-1]):
+            for lineset2 in self.linesets[idx + 1 :]:
+                yield from self._find_common(lineset, lineset2)

-    def get_map_data(self) ->list[LineSet]:
+    def get_map_data(self) -> list[LineSet]:
         """Returns the data we can use for a map/reduce process.

         In this case we are returning this instance's Linesets, that is all file
         information that will later be used for vectorisation.
         """
-        pass
+        return self.linesets

-    def combine_mapreduce_data(self, linesets_collection: list[list[LineSet]]
-        ) ->None:
+    def combine_mapreduce_data(self, linesets_collection: list[list[LineSet]]) -> None:
         """Reduces and recombines data into a format that we can report on.

         The partner function of get_map_data()
         """
-        pass
+        self.linesets = [line for lineset in linesets_collection for line in lineset]


-def stripped_lines(lines: Iterable[str], ignore_comments: bool,
-    ignore_docstrings: bool, ignore_imports: bool, ignore_signatures: bool,
-    line_enabled_callback: (Callable[[str, int], bool] | None)=None) ->list[
-    LineSpecifs]:
+def stripped_lines(
+    lines: Iterable[str],
+    ignore_comments: bool,
+    ignore_docstrings: bool,
+    ignore_imports: bool,
+    ignore_signatures: bool,
+    line_enabled_callback: Callable[[str, int], bool] | None = None,
+) -> list[LineSpecifs]:
     """Return tuples of line/line number/line type with leading/trailing white-space and
     any ignored code features removed.

@@ -319,7 +596,79 @@ def stripped_lines(lines: Iterable[str], ignore_comments: bool,
            the line
     :return: the collection of line/line number/line type tuples
     """
-    pass
+    if ignore_imports or ignore_signatures:
+        tree = astroid.parse("".join(lines))
+    if ignore_imports:
+        import_lines = {}
+        for node in tree.nodes_of_class((nodes.Import, nodes.ImportFrom)):
+            for lineno in range(node.lineno, (node.end_lineno or node.lineno) + 1):
+                import_lines[lineno] = True
+    if ignore_signatures:
+
+        def _get_functions(
+            functions: list[nodes.NodeNG], tree: nodes.NodeNG
+        ) -> list[nodes.NodeNG]:
+            """Recursively get all functions including nested in the classes from the
+            tree.
+            """
+            for node in tree.body:
+                if isinstance(node, (nodes.FunctionDef, nodes.AsyncFunctionDef)):
+                    functions.append(node)
+
+                if isinstance(
+                    node,
+                    (nodes.ClassDef, nodes.FunctionDef, nodes.AsyncFunctionDef),
+                ):
+                    _get_functions(functions, node)
+
+            return functions
+
+        functions = _get_functions([], tree)
+        signature_lines = set(
+            chain(
+                *(
+                    range(
+                        func.lineno,
+                        func.body[0].lineno if func.body else func.tolineno + 1,
+                    )
+                    for func in functions
+                )
+            )
+        )
+
+    strippedlines = []
+    docstring = None
+    for lineno, line in enumerate(lines, start=1):
+        if line_enabled_callback is not None and not line_enabled_callback(
+            "R0801", lineno
+        ):
+            continue
+        line = line.strip()
+        if ignore_docstrings:
+            if not docstring:
+                if line.startswith(('"""', "'''")):
+                    docstring = line[:3]
+                    line = line[3:]
+                elif line.startswith(('r"""', "r'''")):
+                    docstring = line[1:4]
+                    line = line[4:]
+            if docstring:
+                if line.endswith(docstring):
+                    docstring = None
+                line = ""
+        if ignore_imports:
+            current_line_is_import = import_lines.get(lineno, False)
+            if current_line_is_import:
+                line = ""
+        if ignore_comments:
+            line = line.split("#", 1)[0].strip()
+        if ignore_signatures and lineno in signature_lines:
+            line = ""
+        if line:
+            strippedlines.append(
+                LineSpecifs(text=line, line_number=LineNumber(lineno - 1))
+            )
+    return strippedlines


 @functools.total_ordering
@@ -330,125 +679,281 @@ class LineSet:
     are the real ones from which undesired patterns have been removed.
     """

-    def __init__(self, name: str, lines: list[str], ignore_comments: bool=
-        False, ignore_docstrings: bool=False, ignore_imports: bool=False,
-        ignore_signatures: bool=False, line_enabled_callback: (Callable[[
-        str, int], bool] | None)=None) ->None:
+    def __init__(
+        self,
+        name: str,
+        lines: list[str],
+        ignore_comments: bool = False,
+        ignore_docstrings: bool = False,
+        ignore_imports: bool = False,
+        ignore_signatures: bool = False,
+        line_enabled_callback: Callable[[str, int], bool] | None = None,
+    ) -> None:
         self.name = name
         self._real_lines = lines
-        self._stripped_lines = stripped_lines(lines, ignore_comments,
-            ignore_docstrings, ignore_imports, ignore_signatures,
-            line_enabled_callback=line_enabled_callback)
-
-    def __str__(self) ->str:
-        return f'<Lineset for {self.name}>'
-
-    def __len__(self) ->int:
+        self._stripped_lines = stripped_lines(
+            lines,
+            ignore_comments,
+            ignore_docstrings,
+            ignore_imports,
+            ignore_signatures,
+            line_enabled_callback=line_enabled_callback,
+        )
+
+    def __str__(self) -> str:
+        return f"<Lineset for {self.name}>"
+
+    def __len__(self) -> int:
         return len(self._real_lines)

-    def __getitem__(self, index: int) ->LineSpecifs:
+    def __getitem__(self, index: int) -> LineSpecifs:
         return self._stripped_lines[index]

-    def __lt__(self, other: LineSet) ->bool:
+    def __lt__(self, other: LineSet) -> bool:
         return self.name < other.name

-    def __hash__(self) ->int:
+    def __hash__(self) -> int:
         return id(self)

-    def __eq__(self, other: object) ->bool:
+    def __eq__(self, other: object) -> bool:
         if not isinstance(other, LineSet):
             return False
         return self.__dict__ == other.__dict__

+    @property
+    def stripped_lines(self) -> list[LineSpecifs]:
+        return self._stripped_lines

-MSGS: dict[str, MessageDefinitionTuple] = {'R0801': (
-    """Similar lines in %s files
-%s""", 'duplicate-code',
-    'Indicates that a set of similar lines has been detected among multiple file. This usually means that the code should be refactored to avoid this duplication.'
-    )}
+    @property
+    def real_lines(self) -> list[str]:
+        return self._real_lines


-def report_similarities(sect: Section, stats: LinterStats, old_stats: (
-    LinterStats | None)) ->None:
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "R0801": (
+        "Similar lines in %s files\n%s",
+        "duplicate-code",
+        "Indicates that a set of similar lines has been detected "
+        "among multiple file. This usually means that the code should "
+        "be refactored to avoid this duplication.",
+    )
+}
+
+
+def report_similarities(
+    sect: Section,
+    stats: LinterStats,
+    old_stats: LinterStats | None,
+) -> None:
     """Make a layout with some stats about duplication."""
-    pass
+    lines = ["", "now", "previous", "difference"]
+    lines += table_lines_from_stats(stats, old_stats, "duplicated_lines")
+    sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))


+# wrapper to get a pylint checker from the similar class
 class SimilarChecker(BaseRawFileChecker, Similar):
     """Checks for similarities and duplicated code.

     This computation may be memory / CPU intensive, so you
     should disable it if you experience some problems.
     """
-    name = 'similarities'
+
+    # configuration section name
+    name = "similarities"
+    # messages
     msgs = MSGS
-    options: Options = (('min-similarity-lines', {'default':
-        DEFAULT_MIN_SIMILARITY_LINE, 'type': 'int', 'metavar': '<int>',
-        'help': 'Minimum lines number of a similarity.'}), (
-        'ignore-comments', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Comments are removed from the similarity computation'}), (
-        'ignore-docstrings', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Docstrings are removed from the similarity computation'}), (
-        'ignore-imports', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Imports are removed from the similarity computation'}), (
-        'ignore-signatures', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Signatures are removed from the similarity computation'}))
-    reports = ('RP0801', 'Duplication', report_similarities),
-
-    def __init__(self, linter: PyLinter) ->None:
+    # configuration options
+    # for available dict keys/values see the optik parser 'add_option' method
+    options: Options = (
+        (
+            "min-similarity-lines",
+            {
+                "default": DEFAULT_MIN_SIMILARITY_LINE,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Minimum lines number of a similarity.",
+            },
+        ),
+        (
+            "ignore-comments",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Comments are removed from the similarity computation",
+            },
+        ),
+        (
+            "ignore-docstrings",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Docstrings are removed from the similarity computation",
+            },
+        ),
+        (
+            "ignore-imports",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Imports are removed from the similarity computation",
+            },
+        ),
+        (
+            "ignore-signatures",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Signatures are removed from the similarity computation",
+            },
+        ),
+    )
+    # reports
+    reports = (("RP0801", "Duplication", report_similarities),)
+
+    def __init__(self, linter: PyLinter) -> None:
         BaseRawFileChecker.__init__(self, linter)
-        Similar.__init__(self, min_lines=self.linter.config.
-            min_similarity_lines, ignore_comments=self.linter.config.
-            ignore_comments, ignore_docstrings=self.linter.config.
-            ignore_docstrings, ignore_imports=self.linter.config.
-            ignore_imports, ignore_signatures=self.linter.config.
-            ignore_signatures)
-
-    def open(self) ->None:
+        Similar.__init__(
+            self,
+            min_lines=self.linter.config.min_similarity_lines,
+            ignore_comments=self.linter.config.ignore_comments,
+            ignore_docstrings=self.linter.config.ignore_docstrings,
+            ignore_imports=self.linter.config.ignore_imports,
+            ignore_signatures=self.linter.config.ignore_signatures,
+        )
+
+    def open(self) -> None:
         """Init the checkers: reset linesets and statistics information."""
-        pass
+        self.linesets = []
+        self.linter.stats.reset_duplicated_lines()

-    def process_module(self, node: nodes.Module) ->None:
+    def process_module(self, node: nodes.Module) -> None:
         """Process a module.

         the module's content is accessible via the stream object

         stream must implement the readlines method
         """
-        pass
+        if self.linter.current_name is None:
+            # TODO: 4.0 Fix current_name
+            warnings.warn(
+                (
+                    "In pylint 3.0 the current_name attribute of the linter object should be a string. "
+                    "If unknown it should be initialized as an empty string."
+                ),
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        with node.stream() as stream:
+            self.append_stream(self.linter.current_name, stream, node.file_encoding)

-    def close(self) ->None:
+    def close(self) -> None:
         """Compute and display similarities on closing (i.e. end of parsing)."""
-        pass
-
-    def get_map_data(self) ->list[LineSet]:
+        total = sum(len(lineset) for lineset in self.linesets)
+        duplicated = 0
+        stats = self.linter.stats
+        for num, couples in self._compute_sims():
+            msg = []
+            lineset = start_line = end_line = None
+            for lineset, start_line, end_line in couples:
+                msg.append(f"=={lineset.name}:[{start_line}:{end_line}]")
+            msg.sort()
+
+            if lineset:
+                for line in lineset.real_lines[start_line:end_line]:
+                    msg.append(line.rstrip())
+
+            self.add_message("R0801", args=(len(couples), "\n".join(msg)))
+            duplicated += num * (len(couples) - 1)
+        stats.nb_duplicated_lines += int(duplicated)
+        stats.percent_duplicated_lines += float(total and duplicated * 100.0 / total)
+
+    def get_map_data(self) -> list[LineSet]:
         """Passthru override."""
-        pass
+        return Similar.get_map_data(self)

-    def reduce_map_data(self, linter: PyLinter, data: list[list[LineSet]]
-        ) ->None:
+    def reduce_map_data(self, linter: PyLinter, data: list[list[LineSet]]) -> None:
         """Reduces and recombines data into a format that we can report on.

         The partner function of get_map_data()

         Calls self.close() to actually calculate and report duplicate code.
         """
-        pass
+        Similar.combine_mapreduce_data(self, linesets_collection=data)
+        self.close()


-def usage(status: int=0) ->NoReturn:
-    """Display command line usage information."""
-    pass
+def register(linter: PyLinter) -> None:
+    linter.register_checker(SimilarChecker(linter))


-def Run(argv: (Sequence[str] | None)=None) ->NoReturn:
-    """Standalone command line access point."""
-    pass
+def usage(status: int = 0) -> NoReturn:
+    """Display command line usage information."""
+    print("finds copy pasted blocks in a set of files")
+    print()
+    print(
+        "Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] [--ignore-signatures] file1..."
+    )
+    sys.exit(status)


-if __name__ == '__main__':
+def Run(argv: Sequence[str] | None = None) -> NoReturn:
+    """Standalone command line access point."""
+    if argv is None:
+        argv = sys.argv[1:]
+
+    s_opts = "hd:i:"
+    l_opts = [
+        "help",
+        "duplicates=",
+        "ignore-comments",
+        "ignore-imports",
+        "ignore-docstrings",
+        "ignore-signatures",
+    ]
+    min_lines = DEFAULT_MIN_SIMILARITY_LINE
+    ignore_comments = False
+    ignore_docstrings = False
+    ignore_imports = False
+    ignore_signatures = False
+    try:
+        opts, args = getopt(list(argv), s_opts, l_opts)
+    except GetoptError as e:
+        print(e)
+        usage(2)
+    for opt, val in opts:
+        if opt in {"-d", "--duplicates"}:
+            try:
+                min_lines = int(val)
+            except ValueError as e:
+                print(e)
+                usage(2)
+        elif opt in {"-h", "--help"}:
+            usage()
+        elif opt in {"-i", "--ignore-comments"}:
+            ignore_comments = True
+        elif opt in {"--ignore-docstrings"}:
+            ignore_docstrings = True
+        elif opt in {"--ignore-imports"}:
+            ignore_imports = True
+        elif opt in {"--ignore-signatures"}:
+            ignore_signatures = True
+    if not args:
+        usage(1)
+    sim = Similar(
+        min_lines, ignore_comments, ignore_docstrings, ignore_imports, ignore_signatures
+    )
+    for filename in args:
+        with open(filename, encoding="utf-8") as stream:
+            sim.append_stream(filename, stream)
+    sim.run()
+    sys.exit(0)
+
+
+if __name__ == "__main__":
     Run()
diff --git a/pylint/checkers/spelling.py b/pylint/checkers/spelling.py
index 110818045..27d1c7ce0 100644
--- a/pylint/checkers/spelling.py
+++ b/pylint/checkers/spelling.py
@@ -1,146 +1,474 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for spelling errors in comments and docstrings."""
+
 from __future__ import annotations
+
 import re
 import tokenize
 from re import Pattern
 from typing import TYPE_CHECKING, Any, Literal
+
 from astroid import nodes
+
 from pylint.checkers import BaseTokenChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 try:
     import enchant
-    from enchant.tokenize import Chunker, EmailFilter, Filter, URLFilter, WikiWordFilter, get_tokenizer
+    from enchant.tokenize import (
+        Chunker,
+        EmailFilter,
+        Filter,
+        URLFilter,
+        WikiWordFilter,
+        get_tokenizer,
+    )
+
     PYENCHANT_AVAILABLE = True
-except ImportError:
+except ImportError:  # pragma: no cover
     enchant = None
     PYENCHANT_AVAILABLE = False

-
-    class EmailFilter:
+    class EmailFilter:  # type: ignore[no-redef]
         ...

-
-    class URLFilter:
+    class URLFilter:  # type: ignore[no-redef]
         ...

-
-    class WikiWordFilter:
+    class WikiWordFilter:  # type: ignore[no-redef]
         ...

+    class Filter:  # type: ignore[no-redef]
+        def _skip(self, word: str) -> bool:
+            raise NotImplementedError

-    class Filter:
+    class Chunker:  # type: ignore[no-redef]
         pass

+    def get_tokenizer(
+        tag: str | None = None,  # pylint: disable=unused-argument
+        chunkers: list[Chunker] | None = None,  # pylint: disable=unused-argument
+        filters: list[Filter] | None = None,  # pylint: disable=unused-argument
+    ) -> Filter:
+        return Filter()
+
+
+def _get_enchant_dicts() -> list[tuple[Any, enchant.ProviderDesc]]:
+    # Broker().list_dicts() is not typed in enchant, but it does return tuples
+    return enchant.Broker().list_dicts() if PYENCHANT_AVAILABLE else []  # type: ignore[no-any-return]
+
+
+def _get_enchant_dict_choices(
+    inner_enchant_dicts: list[tuple[Any, enchant.ProviderDesc]]
+) -> list[str]:
+    return [""] + [d[0] for d in inner_enchant_dicts]
+
+
+def _get_enchant_dict_help(
+    inner_enchant_dicts: list[tuple[Any, enchant.ProviderDesc]],
+    pyenchant_available: bool,
+) -> str:
+    if inner_enchant_dicts:
+        dict_as_str = [f"{d[0]} ({d[1].name})" for d in inner_enchant_dicts]
+        enchant_help = f"Available dictionaries: {', '.join(dict_as_str)}"
+    else:
+        enchant_help = "No available dictionaries : You need to install "
+        if not pyenchant_available:
+            enchant_help += "both the python package and "
+        enchant_help += "the system dependency for enchant to work"
+    return f"Spelling dictionary name. {enchant_help}."
+

-    class Chunker:
-        pass
 enchant_dicts = _get_enchant_dicts()


-class WordsWithDigitsFilter(Filter):
+class WordsWithDigitsFilter(Filter):  # type: ignore[misc]
     """Skips words with digits."""

+    def _skip(self, word: str) -> bool:
+        return any(char.isdigit() for char in word)
+

-class WordsWithUnderscores(Filter):
+class WordsWithUnderscores(Filter):  # type: ignore[misc]
     """Skips words with underscores.

     They are probably function parameter names.
     """

+    def _skip(self, word: str) -> bool:
+        return "_" in word

-class RegExFilter(Filter):
+
+class RegExFilter(Filter):  # type: ignore[misc]
     """Parent class for filters using regular expressions.

     This filter skips any words the match the expression
     assigned to the class attribute ``_pattern``.
     """
+
     _pattern: Pattern[str]

+    def _skip(self, word: str) -> bool:
+        return bool(self._pattern.match(word))
+

 class CamelCasedWord(RegExFilter):
-    """Filter skipping over camelCasedWords.
+    r"""Filter skipping over camelCasedWords.
     This filter skips any words matching the following regular expression:

-           ^([a-z]\\w+[A-Z]+\\w+)
+           ^([a-z]\w+[A-Z]+\w+)

     That is, any words that are camelCasedWords.
     """
-    _pattern = re.compile('^([a-z]+(\\d|[A-Z])(?:\\w+)?)')
+
+    _pattern = re.compile(r"^([a-z]+(\d|[A-Z])(?:\w+)?)")


 class SphinxDirectives(RegExFilter):
-    """Filter skipping over Sphinx Directives.
+    r"""Filter skipping over Sphinx Directives.
     This filter skips any words matching the following regular expression:

            ^(:([a-z]+)){1,2}:`([^`]+)(`)?

     That is, for example, :class:`BaseQuery`
     """
-    _pattern = re.compile('^(:([a-z]+)){1,2}:`([^`]+)(`)?')
+
+    # The final ` in the pattern is optional because enchant strips it out
+    _pattern = re.compile(r"^(:([a-z]+)){1,2}:`([^`]+)(`)?")


-class ForwardSlashChunker(Chunker):
+class ForwardSlashChunker(Chunker):  # type: ignore[misc]
     """This chunker allows splitting words like 'before/after' into 'before' and
     'after'.
     """
-    _text: str
-
-
-CODE_FLANKED_IN_BACKTICK_REGEX = re.compile(
-    '(\\s|^)(`{1,2})([^`]+)(\\2)([^`]|$)')

+    _text: str

-def _strip_code_flanked_in_backticks(line: str) ->str:
+    def next(self) -> tuple[str, int]:
+        while True:
+            if not self._text:
+                raise StopIteration()
+            if "/" not in self._text:
+                text = self._text
+                self._offset = 0
+                self._text = ""
+                return text, 0
+            pre_text, post_text = self._text.split("/", 1)
+            self._text = post_text
+            self._offset = 0
+            if (
+                not pre_text
+                or not post_text
+                or not pre_text[-1].isalpha()
+                or not post_text[0].isalpha()
+            ):
+                self._text = ""
+                self._offset = 0
+                return f"{pre_text}/{post_text}", 0
+            return pre_text, 0
+
+    def _next(self) -> tuple[str, Literal[0]]:
+        while True:
+            if "/" not in self._text:
+                return self._text, 0
+            pre_text, post_text = self._text.split("/", 1)
+            if not pre_text or not post_text:
+                break
+            if not pre_text[-1].isalpha() or not post_text[0].isalpha():
+                raise StopIteration()
+            self._text = pre_text + " " + post_text
+        raise StopIteration()
+
+
+CODE_FLANKED_IN_BACKTICK_REGEX = re.compile(r"(\s|^)(`{1,2})([^`]+)(\2)([^`]|$)")
+
+
+def _strip_code_flanked_in_backticks(line: str) -> str:
     """Alter line so code flanked in back-ticks is ignored.

     Pyenchant automatically strips back-ticks when parsing tokens,
     so this cannot be done at the individual filter level.
     """
-    pass
+
+    def replace_code_but_leave_surrounding_characters(match_obj: re.Match[str]) -> str:
+        return match_obj.group(1) + match_obj.group(5)
+
+    return CODE_FLANKED_IN_BACKTICK_REGEX.sub(
+        replace_code_but_leave_surrounding_characters, line
+    )


 class SpellingChecker(BaseTokenChecker):
     """Check spelling in comments and docstrings."""
-    name = 'spelling'
-    msgs = {'C0401': (
-        "Wrong spelling of a word '%s' in a comment:\n%s\n%s\nDid you mean: '%s'?"
-        , 'wrong-spelling-in-comment',
-        'Used when a word in comment is not spelled correctly.'), 'C0402':
+
+    name = "spelling"
+    msgs = {
+        "C0401": (
+            "Wrong spelling of a word '%s' in a comment:\n%s\n"
+            "%s\nDid you mean: '%s'?",
+            "wrong-spelling-in-comment",
+            "Used when a word in comment is not spelled correctly.",
+        ),
+        "C0402": (
+            "Wrong spelling of a word '%s' in a docstring:\n%s\n"
+            "%s\nDid you mean: '%s'?",
+            "wrong-spelling-in-docstring",
+            "Used when a word in docstring is not spelled correctly.",
+        ),
+        "C0403": (
+            "Invalid characters %r in a docstring",
+            "invalid-characters-in-docstring",
+            "Used when a word in docstring cannot be checked by enchant.",
+        ),
+    }
+    options = (
+        (
+            "spelling-dict",
+            {
+                "default": "",
+                "type": "choice",
+                "metavar": "<dict name>",
+                "choices": _get_enchant_dict_choices(enchant_dicts),
+                "help": _get_enchant_dict_help(enchant_dicts, PYENCHANT_AVAILABLE),
+            },
+        ),
+        (
+            "spelling-ignore-words",
+            {
+                "default": "",
+                "type": "string",
+                "metavar": "<comma separated words>",
+                "help": "List of comma separated words that should not be checked.",
+            },
+        ),
+        (
+            "spelling-private-dict-file",
+            {
+                "default": "",
+                "type": "path",
+                "metavar": "<path to file>",
+                "help": "A path to a file that contains the private "
+                "dictionary; one word per line.",
+            },
+        ),
         (
-        """Wrong spelling of a word '%s' in a docstring:
-%s
-%s
-Did you mean: '%s'?"""
-        , 'wrong-spelling-in-docstring',
-        'Used when a word in docstring is not spelled correctly.'), 'C0403':
-        ('Invalid characters %r in a docstring',
-        'invalid-characters-in-docstring',
-        'Used when a word in docstring cannot be checked by enchant.')}
-    options = ('spelling-dict', {'default': '', 'type': 'choice', 'metavar':
-        '<dict name>', 'choices': _get_enchant_dict_choices(enchant_dicts),
-        'help': _get_enchant_dict_help(enchant_dicts, PYENCHANT_AVAILABLE)}), (
-        'spelling-ignore-words', {'default': '', 'type': 'string',
-        'metavar': '<comma separated words>', 'help':
-        'List of comma separated words that should not be checked.'}), (
-        'spelling-private-dict-file', {'default': '', 'type': 'path',
-        'metavar': '<path to file>', 'help':
-        'A path to a file that contains the private dictionary; one word per line.'
-        }), ('spelling-store-unknown-words', {'default': 'n', 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Tells whether to store unknown words to the private dictionary (see the --spelling-private-dict-file option) instead of raising a message.'
-        }), ('max-spelling-suggestions', {'default': 4, 'type': 'int',
-        'metavar': 'N', 'help':
-        'Limits count of emitted suggestions for spelling mistakes.'}), (
-        'spelling-ignore-comment-directives', {'default':
-        'fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:', 'type':
-        'string', 'metavar': '<comma separated words>', 'help':
-        'List of comma separated words that should be considered directives if they appear at the beginning of a comment and should not be checked.'
-        })
+            "spelling-store-unknown-words",
+            {
+                "default": "n",
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Tells whether to store unknown words to the "
+                "private dictionary (see the "
+                "--spelling-private-dict-file option) instead of "
+                "raising a message.",
+            },
+        ),
+        (
+            "max-spelling-suggestions",
+            {
+                "default": 4,
+                "type": "int",
+                "metavar": "N",
+                "help": "Limits count of emitted suggestions for spelling mistakes.",
+            },
+        ),
+        (
+            "spelling-ignore-comment-directives",
+            {
+                "default": "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:",
+                "type": "string",
+                "metavar": "<comma separated words>",
+                "help": "List of comma separated words that should be considered "
+                "directives if they appear at the beginning of a comment "
+                "and should not be checked.",
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        self.initialized = False
+        if not PYENCHANT_AVAILABLE:
+            return
+        dict_name = self.linter.config.spelling_dict
+        if not dict_name:
+            return
+
+        self.ignore_list = [
+            w.strip() for w in self.linter.config.spelling_ignore_words.split(",")
+        ]
+        # "param" appears in docstring in param description and
+        # "pylint" appears in comments in pylint pragmas.
+        self.ignore_list.extend(["param", "pylint"])
+
+        self.ignore_comment_directive_list = [
+            w.strip()
+            for w in self.linter.config.spelling_ignore_comment_directives.split(",")
+        ]
+
+        if self.linter.config.spelling_private_dict_file:
+            self.spelling_dict = enchant.DictWithPWL(
+                dict_name, self.linter.config.spelling_private_dict_file
+            )
+        else:
+            self.spelling_dict = enchant.Dict(dict_name)
+
+        if self.linter.config.spelling_store_unknown_words:
+            self.unknown_words: set[str] = set()
+
+        self.tokenizer = get_tokenizer(
+            dict_name,
+            chunkers=[ForwardSlashChunker],
+            filters=[
+                EmailFilter,
+                URLFilter,
+                WikiWordFilter,
+                WordsWithDigitsFilter,
+                WordsWithUnderscores,
+                CamelCasedWord,
+                SphinxDirectives,
+            ],
+        )
+        self.initialized = True
+
+    # pylint: disable = too-many-statements
+    def _check_spelling(self, msgid: str, line: str, line_num: int) -> None:
+        original_line = line
+        try:
+            # The mypy warning is caught by the except statement
+            initial_space = re.search(r"^\s+", line).regs[0][1]  # type: ignore[union-attr]
+        except (IndexError, AttributeError):
+            initial_space = 0
+        if line.strip().startswith("#") and "docstring" not in msgid:
+            line = line.strip()[1:]
+            # A ``Filter`` cannot determine if the directive is at the beginning of a line,
+            #   nor determine if a colon is present or not (``pyenchant`` strips trailing colons).
+            #   So implementing this here.
+            for iter_directive in self.ignore_comment_directive_list:
+                if line.startswith(" " + iter_directive):
+                    line = line[(len(iter_directive) + 1) :]
+                    break
+            starts_with_comment = True
+        else:
+            starts_with_comment = False
+
+        line = _strip_code_flanked_in_backticks(line)
+
+        for word, word_start_at in self.tokenizer(line.strip()):
+            word_start_at += initial_space
+            lower_cased_word = word.casefold()
+
+            # Skip words from ignore list.
+            if word in self.ignore_list or lower_cased_word in self.ignore_list:
+                continue
+
+            # Strip starting u' from unicode literals and r' from raw strings.
+            if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
+                word = word[2:]
+                lower_cased_word = lower_cased_word[2:]
+
+            # If it is a known word, then continue.
+            try:
+                if self.spelling_dict.check(lower_cased_word):
+                    # The lower cased version of word passed spell checking
+                    continue
+
+                # If we reached this far, it means there was a spelling mistake.
+                # Let's retry with the original work because 'unicode' is a
+                # spelling mistake but 'Unicode' is not
+                if self.spelling_dict.check(word):
+                    continue
+            except enchant.errors.Error:
+                self.add_message(
+                    "invalid-characters-in-docstring", line=line_num, args=(word,)
+                )
+                continue
+
+            # Store word to private dict or raise a message.
+            if self.linter.config.spelling_store_unknown_words:
+                if lower_cased_word not in self.unknown_words:
+                    with open(
+                        self.linter.config.spelling_private_dict_file,
+                        "a",
+                        encoding="utf-8",
+                    ) as f:
+                        f.write(f"{lower_cased_word}\n")
+                    self.unknown_words.add(lower_cased_word)
+            else:
+                # Present up to N suggestions.
+                suggestions = self.spelling_dict.suggest(word)
+                del suggestions[self.linter.config.max_spelling_suggestions :]
+                line_segment = line[word_start_at:]
+                match = re.search(rf"(\W|^)({word})(\W|$)", line_segment)
+                if match:
+                    # Start position of second group in regex.
+                    col = match.regs[2][0]
+                else:
+                    col = line_segment.index(word)
+                col += word_start_at
+                if starts_with_comment:
+                    col += 1
+                indicator = (" " * col) + ("^" * len(word))
+                all_suggestion = "' or '".join(suggestions)
+                args = (word, original_line, indicator, f"'{all_suggestion}'")
+                self.add_message(msgid, line=line_num, args=args)
+
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
+        if not self.initialized:
+            return
+
+        # Process tokens and look for comments.
+        for tok_type, token, (start_row, _), _, _ in tokens:
+            if tok_type == tokenize.COMMENT:
+                if start_row == 1 and token.startswith("#!/"):
+                    # Skip shebang lines
+                    continue
+                if token.startswith("# pylint:"):
+                    # Skip pylint enable/disable comments
+                    continue
+                if token.startswith("# type: "):
+                    # Skip python 2 type comments and mypy type ignore comments
+                    # mypy do not support additional text in type comments
+                    continue
+                self._check_spelling("wrong-spelling-in-comment", token, start_row)
+
+    @only_required_for_messages("wrong-spelling-in-docstring")
+    def visit_module(self, node: nodes.Module) -> None:
+        self._check_docstring(node)
+
+    @only_required_for_messages("wrong-spelling-in-docstring")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        self._check_docstring(node)
+
+    @only_required_for_messages("wrong-spelling-in-docstring")
+    def visit_functiondef(
+        self, node: nodes.FunctionDef | nodes.AsyncFunctionDef
+    ) -> None:
+        self._check_docstring(node)
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_docstring(self, node: (nodes.FunctionDef | nodes.
-        AsyncFunctionDef | nodes.ClassDef | nodes.Module)) ->None:
+    def _check_docstring(
+        self,
+        node: (
+            nodes.FunctionDef | nodes.AsyncFunctionDef | nodes.ClassDef | nodes.Module
+        ),
+    ) -> None:
         """Check if the node has any spelling errors."""
-        pass
+        if not self.initialized:
+            return
+        if not node.doc_node:
+            return
+        start_line = node.lineno + 1
+        # Go through lines of docstring
+        for idx, line in enumerate(node.doc_node.value.splitlines()):
+            self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(SpellingChecker(linter))
diff --git a/pylint/checkers/stdlib.py b/pylint/checkers/stdlib.py
index 0baac8efa..10c1d54bf 100644
--- a/pylint/checkers/stdlib.py
+++ b/pylint/checkers/stdlib.py
@@ -1,229 +1,548 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checkers for various standard library functions."""
+
 from __future__ import annotations
+
 import sys
 from collections.abc import Iterable
 from typing import TYPE_CHECKING, Any, Dict, Set, Tuple
+
 import astroid
 from astroid import nodes, util
 from astroid.typing import InferenceResult
+
 from pylint import interfaces
 from pylint.checkers import BaseChecker, DeprecatedMixin, utils
 from pylint.interfaces import HIGH, INFERENCE
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 DeprecationDict = Dict[Tuple[int, int, int], Set[str]]
-OPEN_FILES_MODE = 'open', 'file'
-OPEN_FILES_FUNCS = *OPEN_FILES_MODE, 'read_text', 'write_text'
-UNITTEST_CASE = 'unittest.case'
-THREADING_THREAD = 'threading.Thread'
-COPY_COPY = 'copy.copy'
-OS_ENVIRON = 'os._Environ'
-ENV_GETTERS = 'os.getenv',
-SUBPROCESS_POPEN = 'subprocess.Popen'
-SUBPROCESS_RUN = 'subprocess.run'
-OPEN_MODULE = {'_io', 'pathlib'}
-DEBUG_BREAKPOINTS = ('builtins.breakpoint', 'sys.breakpointhook',
-    'pdb.set_trace')
-LRU_CACHE = {'functools.lru_cache', 'functools._lru_cache_wrapper.wrapper',
-    'functools.lru_cache.decorating_function'}
-NON_INSTANCE_METHODS = {'builtins.staticmethod', 'builtins.classmethod'}
-DEPRECATED_ARGUMENTS: dict[tuple[int, int, int], dict[str, tuple[tuple[int |
-    None, str], ...]]] = {(0, 0, 0): {'int': ((None, 'x'),), 'bool': ((None,
-    'x'),), 'float': ((None, 'x'),)}, (3, 8, 0): {'asyncio.tasks.sleep': ((
-    None, 'loop'),), 'asyncio.tasks.gather': ((None, 'loop'),),
-    'asyncio.tasks.shield': ((None, 'loop'),), 'asyncio.tasks.wait_for': ((
-    None, 'loop'),), 'asyncio.tasks.wait': ((None, 'loop'),),
-    'asyncio.tasks.as_completed': ((None, 'loop'),),
-    'asyncio.subprocess.create_subprocess_exec': ((None, 'loop'),),
-    'asyncio.subprocess.create_subprocess_shell': ((4, 'loop'),),
-    'gettext.translation': ((5, 'codeset'),), 'gettext.install': ((2,
-    'codeset'),), 'functools.partialmethod': ((None, 'func'),),
-    'weakref.finalize': ((None, 'func'), (None, 'obj')),
-    'profile.Profile.runcall': ((None, 'func'),),
-    'cProfile.Profile.runcall': ((None, 'func'),), 'bdb.Bdb.runcall': ((
-    None, 'func'),), 'trace.Trace.runfunc': ((None, 'func'),),
-    'curses.wrapper': ((None, 'func'),),
-    'unittest.case.TestCase.addCleanup': ((None, 'function'),),
-    'concurrent.futures.thread.ThreadPoolExecutor.submit': ((None, 'fn'),),
-    'concurrent.futures.process.ProcessPoolExecutor.submit': ((None, 'fn'),
-    ), 'contextlib._BaseExitStack.callback': ((None, 'callback'),),
-    'contextlib.AsyncExitStack.push_async_callback': ((None, 'callback'),),
-    'multiprocessing.managers.Server.create': ((None, 'c'), (None, 'typeid'
-    )), 'multiprocessing.managers.SharedMemoryServer.create': ((None, 'c'),
-    (None, 'typeid'))}, (3, 9, 0): {'random.Random.shuffle': ((1, 'random')
-    ,)}, (3, 12, 0): {'argparse.BooleanOptionalAction': ((3, 'type'), (4,
-    'choices'), (7, 'metavar')), 'coroutine.throw': ((1, 'value'), (2,
-    'traceback')), 'email.utils.localtime': ((1, 'isdst'),),
-    'shutil.rmtree': ((2, 'onerror'),)}}
-DEPRECATED_DECORATORS: DeprecationDict = {(3, 8, 0): {'asyncio.coroutine'},
-    (3, 3, 0): {'abc.abstractclassmethod', 'abc.abstractstaticmethod',
-    'abc.abstractproperty'}, (3, 4, 0): {'importlib.util.module_for_loader'}}
-DEPRECATED_METHODS: dict[int, DeprecationDict] = {(0): {(0, 0, 0): {
-    'cgi.parse_qs', 'cgi.parse_qsl', 'ctypes.c_buffer',
-    'distutils.command.register.register.check_metadata',
-    'distutils.command.sdist.sdist.check_metadata',
-    'tkinter.Misc.tk_menuBar', 'tkinter.Menu.tk_bindForTraversal'}}, (2): {
-    (2, 6, 0): {'commands.getstatus', 'os.popen2', 'os.popen3', 'os.popen4',
-    'macostools.touched'}, (2, 7, 0): {
-    'unittest.case.TestCase.assertEquals',
-    'unittest.case.TestCase.assertNotEquals',
-    'unittest.case.TestCase.assertAlmostEquals',
-    'unittest.case.TestCase.assertNotAlmostEquals',
-    'unittest.case.TestCase.assert_',
-    'xml.etree.ElementTree.Element.getchildren',
-    'xml.etree.ElementTree.Element.getiterator',
-    'xml.etree.ElementTree.XMLParser.getiterator',
-    'xml.etree.ElementTree.XMLParser.doctype'}}, (3): {(3, 0, 0): {
-    'inspect.getargspec', 'failUnlessEqual', 'assertEquals', 'failIfEqual',
-    'assertNotEquals', 'failUnlessAlmostEqual', 'assertAlmostEquals',
-    'failIfAlmostEqual', 'assertNotAlmostEquals', 'failUnless', 'assert_',
-    'failUnlessRaises', 'failIf', 'assertRaisesRegexp',
-    'assertRegexpMatches', 'assertNotRegexpMatches'}, (3, 1, 0): {
-    'base64.encodestring', 'base64.decodestring', 'ntpath.splitunc',
-    'os.path.splitunc', 'os.stat_float_times',
-    'turtle.RawTurtle.settiltangle'}, (3, 2, 0): {'cgi.escape',
-    'configparser.RawConfigParser.readfp',
-    'xml.etree.ElementTree.Element.getchildren',
-    'xml.etree.ElementTree.Element.getiterator',
-    'xml.etree.ElementTree.XMLParser.getiterator',
-    'xml.etree.ElementTree.XMLParser.doctype'}, (3, 3, 0): {
-    'inspect.getmoduleinfo', 'logging.warn', 'logging.Logger.warn',
-    'logging.LoggerAdapter.warn', 'nntplib._NNTPBase.xpath',
-    'platform.popen', 'sqlite3.OptimizedUnicode', 'time.clock'}, (3, 4, 0):
-    {'importlib.find_loader', 'importlib.abc.Loader.load_module',
-    'importlib.abc.Loader.module_repr',
-    'importlib.abc.PathEntryFinder.find_loader',
-    'importlib.abc.PathEntryFinder.find_module', 'plistlib.readPlist',
-    'plistlib.writePlist', 'plistlib.readPlistFromBytes',
-    'plistlib.writePlistToBytes'}, (3, 4, 4): {'asyncio.tasks.async'}, (3, 
-    5, 0): {'fractions.gcd', 'inspect.formatargspec', 'inspect.getcallargs',
-    'platform.linux_distribution', 'platform.dist'}, (3, 6, 0): {
-    'importlib._bootstrap_external.FileLoader.load_module',
-    '_ssl.RAND_pseudo_bytes'}, (3, 7, 0): {'sys.set_coroutine_wrapper',
-    'sys.get_coroutine_wrapper', 'aifc.openfp', 'threading.Thread.isAlive',
-    'asyncio.Task.current_task', 'asyncio.Task.all_task', 'locale.format',
-    'ssl.wrap_socket', 'ssl.match_hostname', 'sunau.openfp', 'wave.openfp'},
-    (3, 8, 0): {'gettext.lgettext', 'gettext.ldgettext',
-    'gettext.lngettext', 'gettext.ldngettext',
-    'gettext.bind_textdomain_codeset',
-    'gettext.NullTranslations.output_charset',
-    'gettext.NullTranslations.set_output_charset',
-    'threading.Thread.isAlive'}, (3, 9, 0): {'binascii.b2a_hqx',
-    'binascii.a2b_hqx', 'binascii.rlecode_hqx', 'binascii.rledecode_hqx',
-    'importlib.resources.contents', 'importlib.resources.is_resource',
-    'importlib.resources.open_binary', 'importlib.resources.open_text',
-    'importlib.resources.path', 'importlib.resources.read_binary',
-    'importlib.resources.read_text'}, (3, 10, 0): {
-    '_sqlite3.enable_shared_cache', 'importlib.abc.Finder.find_module',
-    'pathlib.Path.link_to', 'zipimport.zipimporter.load_module',
-    'zipimport.zipimporter.find_module',
-    'zipimport.zipimporter.find_loader', 'threading.currentThread',
-    'threading.activeCount', 'threading.Condition.notifyAll',
-    'threading.Event.isSet', 'threading.Thread.setName',
-    'threading.Thread.getName', 'threading.Thread.isDaemon',
-    'threading.Thread.setDaemon', 'cgi.log'}, (3, 11, 0): {
-    'locale.getdefaultlocale', 'locale.resetlocale', 're.template',
-    'unittest.findTestCases', 'unittest.makeSuite',
-    'unittest.getTestCaseNames', 'unittest.TestLoader.loadTestsFromModule',
-    'unittest.TestLoader.loadTestsFromTestCase',
-    'unittest.TestLoader.getTestCaseNames',
-    'unittest.TestProgram.usageExit'}, (3, 12, 0): {
-    'builtins.bool.__invert__', 'datetime.datetime.utcfromtimestamp',
-    'datetime.datetime.utcnow', 'pkgutil.find_loader', 'pkgutil.get_loader',
-    'pty.master_open', 'pty.slave_open',
-    'xml.etree.ElementTree.Element.__bool__'}}}
-DEPRECATED_CLASSES: dict[tuple[int, int, int], dict[str, set[str]]] = {(3, 
-    2, 0): {'configparser': {'LegacyInterpolation', 'SafeConfigParser'}}, (
-    3, 3, 0): {'importlib.abc': {'Finder'}, 'pkgutil': {'ImpImporter',
-    'ImpLoader'}, 'collections': {'Awaitable', 'Coroutine', 'AsyncIterable',
-    'AsyncIterator', 'AsyncGenerator', 'Hashable', 'Iterable', 'Iterator',
-    'Generator', 'Reversible', 'Sized', 'Container', 'Callable',
-    'Collection', 'Set', 'MutableSet', 'Mapping', 'MutableMapping',
-    'MappingView', 'KeysView', 'ItemsView', 'ValuesView', 'Sequence',
-    'MutableSequence', 'ByteString'}}, (3, 9, 0): {'smtpd': {'MailmanProxy'
-    }}, (3, 11, 0): {'typing': {'Text'}, 'webbrowser': {'MacOSX'}}, (3, 12,
-    0): {'ast': {'Bytes', 'Ellipsis', 'NameConstant', 'Num', 'Str'},
-    'asyncio': {'AbstractChildWatcher', 'MultiLoopChildWatcher',
-    'FastChildWatcher', 'SafeChildWatcher'}, 'collections.abc': {
-    'ByteString'}, 'importlib.abc': {'ResourceReader', 'Traversable',
-    'TraversableResources'}, 'typing': {'ByteString', 'Hashable', 'Sized'}}}
-DEPRECATED_ATTRIBUTES: DeprecationDict = {(3, 2, 0): {
-    'configparser.ParsingError.filename'}, (3, 12, 0): {'calendar.January',
-    'calendar.February', 'sys.last_traceback', 'sys.last_type',
-    'sys.last_value'}}
+
+OPEN_FILES_MODE = ("open", "file")
+OPEN_FILES_FUNCS = (*OPEN_FILES_MODE, "read_text", "write_text")
+UNITTEST_CASE = "unittest.case"
+THREADING_THREAD = "threading.Thread"
+COPY_COPY = "copy.copy"
+OS_ENVIRON = "os._Environ"
+ENV_GETTERS = ("os.getenv",)
+SUBPROCESS_POPEN = "subprocess.Popen"
+SUBPROCESS_RUN = "subprocess.run"
+OPEN_MODULE = {"_io", "pathlib"}
+DEBUG_BREAKPOINTS = ("builtins.breakpoint", "sys.breakpointhook", "pdb.set_trace")
+LRU_CACHE = {
+    "functools.lru_cache",  # Inferred for @lru_cache
+    "functools._lru_cache_wrapper.wrapper",  # Inferred for @lru_cache() on >= Python 3.8
+    "functools.lru_cache.decorating_function",  # Inferred for @lru_cache() on <= Python 3.7
+}
+NON_INSTANCE_METHODS = {"builtins.staticmethod", "builtins.classmethod"}
+
+
+# For modules, see ImportsChecker
+
+DEPRECATED_ARGUMENTS: dict[
+    tuple[int, int, int], dict[str, tuple[tuple[int | None, str], ...]]
+] = {
+    (0, 0, 0): {
+        "int": ((None, "x"),),
+        "bool": ((None, "x"),),
+        "float": ((None, "x"),),
+    },
+    (3, 8, 0): {
+        "asyncio.tasks.sleep": ((None, "loop"),),
+        "asyncio.tasks.gather": ((None, "loop"),),
+        "asyncio.tasks.shield": ((None, "loop"),),
+        "asyncio.tasks.wait_for": ((None, "loop"),),
+        "asyncio.tasks.wait": ((None, "loop"),),
+        "asyncio.tasks.as_completed": ((None, "loop"),),
+        "asyncio.subprocess.create_subprocess_exec": ((None, "loop"),),
+        "asyncio.subprocess.create_subprocess_shell": ((4, "loop"),),
+        "gettext.translation": ((5, "codeset"),),
+        "gettext.install": ((2, "codeset"),),
+        "functools.partialmethod": ((None, "func"),),
+        "weakref.finalize": ((None, "func"), (None, "obj")),
+        "profile.Profile.runcall": ((None, "func"),),
+        "cProfile.Profile.runcall": ((None, "func"),),
+        "bdb.Bdb.runcall": ((None, "func"),),
+        "trace.Trace.runfunc": ((None, "func"),),
+        "curses.wrapper": ((None, "func"),),
+        "unittest.case.TestCase.addCleanup": ((None, "function"),),
+        "concurrent.futures.thread.ThreadPoolExecutor.submit": ((None, "fn"),),
+        "concurrent.futures.process.ProcessPoolExecutor.submit": ((None, "fn"),),
+        "contextlib._BaseExitStack.callback": ((None, "callback"),),
+        "contextlib.AsyncExitStack.push_async_callback": ((None, "callback"),),
+        "multiprocessing.managers.Server.create": ((None, "c"), (None, "typeid")),
+        "multiprocessing.managers.SharedMemoryServer.create": (
+            (None, "c"),
+            (None, "typeid"),
+        ),
+    },
+    (3, 9, 0): {"random.Random.shuffle": ((1, "random"),)},
+    (3, 12, 0): {
+        "argparse.BooleanOptionalAction": ((3, "type"), (4, "choices"), (7, "metavar")),
+        "coroutine.throw": ((1, "value"), (2, "traceback")),
+        "email.utils.localtime": ((1, "isdst"),),
+        "shutil.rmtree": ((2, "onerror"),),
+    },
+}
+
+DEPRECATED_DECORATORS: DeprecationDict = {
+    (3, 8, 0): {"asyncio.coroutine"},
+    (3, 3, 0): {
+        "abc.abstractclassmethod",
+        "abc.abstractstaticmethod",
+        "abc.abstractproperty",
+    },
+    (3, 4, 0): {"importlib.util.module_for_loader"},
+}
+
+
+DEPRECATED_METHODS: dict[int, DeprecationDict] = {
+    0: {
+        (0, 0, 0): {
+            "cgi.parse_qs",
+            "cgi.parse_qsl",
+            "ctypes.c_buffer",
+            "distutils.command.register.register.check_metadata",
+            "distutils.command.sdist.sdist.check_metadata",
+            "tkinter.Misc.tk_menuBar",
+            "tkinter.Menu.tk_bindForTraversal",
+        }
+    },
+    2: {
+        (2, 6, 0): {
+            "commands.getstatus",
+            "os.popen2",
+            "os.popen3",
+            "os.popen4",
+            "macostools.touched",
+        },
+        (2, 7, 0): {
+            "unittest.case.TestCase.assertEquals",
+            "unittest.case.TestCase.assertNotEquals",
+            "unittest.case.TestCase.assertAlmostEquals",
+            "unittest.case.TestCase.assertNotAlmostEquals",
+            "unittest.case.TestCase.assert_",
+            "xml.etree.ElementTree.Element.getchildren",
+            "xml.etree.ElementTree.Element.getiterator",
+            "xml.etree.ElementTree.XMLParser.getiterator",
+            "xml.etree.ElementTree.XMLParser.doctype",
+        },
+    },
+    3: {
+        (3, 0, 0): {
+            "inspect.getargspec",
+            "failUnlessEqual",
+            "assertEquals",
+            "failIfEqual",
+            "assertNotEquals",
+            "failUnlessAlmostEqual",
+            "assertAlmostEquals",
+            "failIfAlmostEqual",
+            "assertNotAlmostEquals",
+            "failUnless",
+            "assert_",
+            "failUnlessRaises",
+            "failIf",
+            "assertRaisesRegexp",
+            "assertRegexpMatches",
+            "assertNotRegexpMatches",
+        },
+        (3, 1, 0): {
+            "base64.encodestring",
+            "base64.decodestring",
+            "ntpath.splitunc",
+            "os.path.splitunc",
+            "os.stat_float_times",
+            "turtle.RawTurtle.settiltangle",
+        },
+        (3, 2, 0): {
+            "cgi.escape",
+            "configparser.RawConfigParser.readfp",
+            "xml.etree.ElementTree.Element.getchildren",
+            "xml.etree.ElementTree.Element.getiterator",
+            "xml.etree.ElementTree.XMLParser.getiterator",
+            "xml.etree.ElementTree.XMLParser.doctype",
+        },
+        (3, 3, 0): {
+            "inspect.getmoduleinfo",
+            "logging.warn",
+            "logging.Logger.warn",
+            "logging.LoggerAdapter.warn",
+            "nntplib._NNTPBase.xpath",
+            "platform.popen",
+            "sqlite3.OptimizedUnicode",
+            "time.clock",
+        },
+        (3, 4, 0): {
+            "importlib.find_loader",
+            "importlib.abc.Loader.load_module",
+            "importlib.abc.Loader.module_repr",
+            "importlib.abc.PathEntryFinder.find_loader",
+            "importlib.abc.PathEntryFinder.find_module",
+            "plistlib.readPlist",
+            "plistlib.writePlist",
+            "plistlib.readPlistFromBytes",
+            "plistlib.writePlistToBytes",
+        },
+        (3, 4, 4): {"asyncio.tasks.async"},
+        (3, 5, 0): {
+            "fractions.gcd",
+            "inspect.formatargspec",
+            "inspect.getcallargs",
+            "platform.linux_distribution",
+            "platform.dist",
+        },
+        (3, 6, 0): {
+            "importlib._bootstrap_external.FileLoader.load_module",
+            "_ssl.RAND_pseudo_bytes",
+        },
+        (3, 7, 0): {
+            "sys.set_coroutine_wrapper",
+            "sys.get_coroutine_wrapper",
+            "aifc.openfp",
+            "threading.Thread.isAlive",
+            "asyncio.Task.current_task",
+            "asyncio.Task.all_task",
+            "locale.format",
+            "ssl.wrap_socket",
+            "ssl.match_hostname",
+            "sunau.openfp",
+            "wave.openfp",
+        },
+        (3, 8, 0): {
+            "gettext.lgettext",
+            "gettext.ldgettext",
+            "gettext.lngettext",
+            "gettext.ldngettext",
+            "gettext.bind_textdomain_codeset",
+            "gettext.NullTranslations.output_charset",
+            "gettext.NullTranslations.set_output_charset",
+            "threading.Thread.isAlive",
+        },
+        (3, 9, 0): {
+            "binascii.b2a_hqx",
+            "binascii.a2b_hqx",
+            "binascii.rlecode_hqx",
+            "binascii.rledecode_hqx",
+            "importlib.resources.contents",
+            "importlib.resources.is_resource",
+            "importlib.resources.open_binary",
+            "importlib.resources.open_text",
+            "importlib.resources.path",
+            "importlib.resources.read_binary",
+            "importlib.resources.read_text",
+        },
+        (3, 10, 0): {
+            "_sqlite3.enable_shared_cache",
+            "importlib.abc.Finder.find_module",
+            "pathlib.Path.link_to",
+            "zipimport.zipimporter.load_module",
+            "zipimport.zipimporter.find_module",
+            "zipimport.zipimporter.find_loader",
+            "threading.currentThread",
+            "threading.activeCount",
+            "threading.Condition.notifyAll",
+            "threading.Event.isSet",
+            "threading.Thread.setName",
+            "threading.Thread.getName",
+            "threading.Thread.isDaemon",
+            "threading.Thread.setDaemon",
+            "cgi.log",
+        },
+        (3, 11, 0): {
+            "locale.getdefaultlocale",
+            "locale.resetlocale",
+            "re.template",
+            "unittest.findTestCases",
+            "unittest.makeSuite",
+            "unittest.getTestCaseNames",
+            "unittest.TestLoader.loadTestsFromModule",
+            "unittest.TestLoader.loadTestsFromTestCase",
+            "unittest.TestLoader.getTestCaseNames",
+            "unittest.TestProgram.usageExit",
+        },
+        (3, 12, 0): {
+            "builtins.bool.__invert__",
+            "datetime.datetime.utcfromtimestamp",
+            "datetime.datetime.utcnow",
+            "pkgutil.find_loader",
+            "pkgutil.get_loader",
+            "pty.master_open",
+            "pty.slave_open",
+            "xml.etree.ElementTree.Element.__bool__",
+        },
+    },
+}
+
+
+DEPRECATED_CLASSES: dict[tuple[int, int, int], dict[str, set[str]]] = {
+    (3, 2, 0): {
+        "configparser": {
+            "LegacyInterpolation",
+            "SafeConfigParser",
+        },
+    },
+    (3, 3, 0): {
+        "importlib.abc": {
+            "Finder",
+        },
+        "pkgutil": {
+            "ImpImporter",
+            "ImpLoader",
+        },
+        "collections": {
+            "Awaitable",
+            "Coroutine",
+            "AsyncIterable",
+            "AsyncIterator",
+            "AsyncGenerator",
+            "Hashable",
+            "Iterable",
+            "Iterator",
+            "Generator",
+            "Reversible",
+            "Sized",
+            "Container",
+            "Callable",
+            "Collection",
+            "Set",
+            "MutableSet",
+            "Mapping",
+            "MutableMapping",
+            "MappingView",
+            "KeysView",
+            "ItemsView",
+            "ValuesView",
+            "Sequence",
+            "MutableSequence",
+            "ByteString",
+        },
+    },
+    (3, 9, 0): {
+        "smtpd": {
+            "MailmanProxy",
+        }
+    },
+    (3, 11, 0): {
+        "typing": {
+            "Text",
+        },
+        "webbrowser": {
+            "MacOSX",
+        },
+    },
+    (3, 12, 0): {
+        "ast": {
+            "Bytes",
+            "Ellipsis",
+            "NameConstant",
+            "Num",
+            "Str",
+        },
+        "asyncio": {
+            "AbstractChildWatcher",
+            "MultiLoopChildWatcher",
+            "FastChildWatcher",
+            "SafeChildWatcher",
+        },
+        "collections.abc": {
+            "ByteString",
+        },
+        "importlib.abc": {
+            "ResourceReader",
+            "Traversable",
+            "TraversableResources",
+        },
+        "typing": {
+            "ByteString",
+            "Hashable",
+            "Sized",
+        },
+    },
+}
+
+
+DEPRECATED_ATTRIBUTES: DeprecationDict = {
+    (3, 2, 0): {
+        "configparser.ParsingError.filename",
+    },
+    (3, 12, 0): {
+        "calendar.January",
+        "calendar.February",
+        "sys.last_traceback",
+        "sys.last_type",
+        "sys.last_value",
+    },
+}
+
+
+def _check_mode_str(mode: Any) -> bool:
+    # check type
+    if not isinstance(mode, str):
+        return False
+    # check syntax
+    modes = set(mode)
+    _mode = "rwatb+Ux"
+    creating = "x" in modes
+    if modes - set(_mode) or len(mode) > len(modes):
+        return False
+    # check logic
+    reading = "r" in modes
+    writing = "w" in modes
+    appending = "a" in modes
+    text = "t" in modes
+    binary = "b" in modes
+    if "U" in modes:
+        if writing or appending or creating:
+            return False
+        reading = True
+    if text and binary:
+        return False
+    total = reading + writing + appending + creating
+    if total > 1:
+        return False
+    if not (reading or writing or appending or creating):
+        return False
+    return True


 class StdlibChecker(DeprecatedMixin, BaseChecker):
-    name = 'stdlib'
-    msgs: dict[str, MessageDefinitionTuple] = {**DeprecatedMixin.
-        DEPRECATED_METHOD_MESSAGE, **DeprecatedMixin.
-        DEPRECATED_ARGUMENT_MESSAGE, **DeprecatedMixin.
-        DEPRECATED_CLASS_MESSAGE, **DeprecatedMixin.
-        DEPRECATED_DECORATOR_MESSAGE, **DeprecatedMixin.
-        DEPRECATED_ATTRIBUTE_MESSAGE, 'W1501': (
-        '"%s" is not a valid mode for open.', 'bad-open-mode',
-        'Python supports: r, w, a[, x] modes with b, +, and U (only with r) options. See https://docs.python.org/3/library/functions.html#open'
-        ), 'W1502': ('Using datetime.time in a boolean context.',
-        'boolean-datetime',
-        'Using datetime.time in a boolean context can hide subtle bugs when the time they represent matches midnight UTC. This behaviour was fixed in Python 3.5. See https://bugs.python.org/issue13936 for reference.'
-        , {'maxversion': (3, 5)}), 'W1503': (
-        'Redundant use of %s with constant value %r',
-        'redundant-unittest-assert',
-        'The first argument of assertTrue and assertFalse is a condition. If a constant is passed as parameter, that condition will be always true. In this case a warning should be emitted.'
-        ), 'W1506': ('threading.Thread needs the target function',
-        'bad-thread-instantiation',
-        'The warning is emitted when a threading.Thread class is instantiated without the target function being passed as a kwarg or as a second argument. By default, the first parameter is the group param, not the target param.'
-        ), 'W1507': (
-        'Using copy.copy(os.environ). Use os.environ.copy() instead.',
-        'shallow-copy-environ',
-        'os.environ is not a dict object but proxy object, so shallow copy has still effects on original object. See https://bugs.python.org/issue15373 for reference.'
-        ), 'E1507': ('%s does not support %s type argument',
-        'invalid-envvar-value',
-        'Env manipulation functions support only string type arguments. See https://docs.python.org/3/library/os.html#os.getenv.'
-        ), 'E1519': (
-        'singledispatch decorator should not be used with methods, use singledispatchmethod instead.'
-        , 'singledispatch-method',
-        'singledispatch should decorate functions and not class/instance methods. Use singledispatchmethod for those cases.'
-        ), 'E1520': (
-        'singledispatchmethod decorator should not be used with functions, use singledispatch instead.'
-        , 'singledispatchmethod-function',
-        'singledispatchmethod should decorate class/instance methods and not functions. Use singledispatch for those cases.'
-        ), 'W1508': ('%s default type is %s. Expected str or None.',
-        'invalid-envvar-default',
-        'Env manipulation functions return None or str values. Supplying anything different as a default may cause bugs. See https://docs.python.org/3/library/os.html#os.getenv.'
-        ), 'W1509': (
-        'Using preexec_fn keyword which may be unsafe in the presence of threads'
-        , 'subprocess-popen-preexec-fn',
-        'The preexec_fn parameter is not safe to use in the presence of threads in your application. The child process could deadlock before exec is called. If you must use it, keep it trivial! Minimize the number of libraries you call into. See https://docs.python.org/3/library/subprocess.html#popen-constructor'
-        ), 'W1510': (
-        "'subprocess.run' used without explicitly defining the value for 'check'."
-        , 'subprocess-run-check',
-        "The ``check`` keyword  is set to False by default. It means the process launched by ``subprocess.run`` can exit with a non-zero exit code and fail silently. It's better to set it explicitly to make clear what the error-handling behavior is."
-        ), 'W1514': ('Using open without explicitly specifying an encoding',
-        'unspecified-encoding',
-        'It is better to specify an encoding when opening documents. Using the system default implicitly can create problems on other operating systems. See https://peps.python.org/pep-0597/'
-        ), 'W1515': (
-        'Leaving functions creating breakpoints in production code is not recommended'
-        , 'forgotten-debug-statement',
-        'Calls to breakpoint(), sys.breakpointhook() and pdb.set_trace() should be removed from code that is not actively being debugged.'
-        ), 'W1518': (
-        "'lru_cache(maxsize=None)' or 'cache' will keep all method args alive indefinitely, including 'self'"
-        , 'method-cache-max-size-none',
-        "By decorating a method with lru_cache or cache the 'self' argument will be linked to the function and therefore never garbage collected. Unless your instance will never need to be garbage collected (singleton) it is recommended to refactor code to avoid this pattern or add a maxsize to the cache. The default value for maxsize is 128."
-        , {'old_names': [('W1516', 'lru-cache-decorating-method'), ('W1517',
-        'cache-max-size-none')]})}
-
-    def __init__(self, linter: PyLinter) ->None:
+    name = "stdlib"
+
+    msgs: dict[str, MessageDefinitionTuple] = {
+        **DeprecatedMixin.DEPRECATED_METHOD_MESSAGE,
+        **DeprecatedMixin.DEPRECATED_ARGUMENT_MESSAGE,
+        **DeprecatedMixin.DEPRECATED_CLASS_MESSAGE,
+        **DeprecatedMixin.DEPRECATED_DECORATOR_MESSAGE,
+        **DeprecatedMixin.DEPRECATED_ATTRIBUTE_MESSAGE,
+        "W1501": (
+            '"%s" is not a valid mode for open.',
+            "bad-open-mode",
+            "Python supports: r, w, a[, x] modes with b, +, "
+            "and U (only with r) options. "
+            "See https://docs.python.org/3/library/functions.html#open",
+        ),
+        "W1502": (
+            "Using datetime.time in a boolean context.",
+            "boolean-datetime",
+            "Using datetime.time in a boolean context can hide "
+            "subtle bugs when the time they represent matches "
+            "midnight UTC. This behaviour was fixed in Python 3.5. "
+            "See https://bugs.python.org/issue13936 for reference.",
+            {"maxversion": (3, 5)},
+        ),
+        "W1503": (
+            "Redundant use of %s with constant value %r",
+            "redundant-unittest-assert",
+            "The first argument of assertTrue and assertFalse is "
+            "a condition. If a constant is passed as parameter, that "
+            "condition will be always true. In this case a warning "
+            "should be emitted.",
+        ),
+        "W1506": (
+            "threading.Thread needs the target function",
+            "bad-thread-instantiation",
+            "The warning is emitted when a threading.Thread class "
+            "is instantiated without the target function being passed as a kwarg or as a second argument. "
+            "By default, the first parameter is the group param, not the target param.",
+        ),
+        "W1507": (
+            "Using copy.copy(os.environ). Use os.environ.copy() instead.",
+            "shallow-copy-environ",
+            "os.environ is not a dict object but proxy object, so "
+            "shallow copy has still effects on original object. "
+            "See https://bugs.python.org/issue15373 for reference.",
+        ),
+        "E1507": (
+            "%s does not support %s type argument",
+            "invalid-envvar-value",
+            "Env manipulation functions support only string type arguments. "
+            "See https://docs.python.org/3/library/os.html#os.getenv.",
+        ),
+        "E1519": (
+            "singledispatch decorator should not be used with methods, "
+            "use singledispatchmethod instead.",
+            "singledispatch-method",
+            "singledispatch should decorate functions and not class/instance methods. "
+            "Use singledispatchmethod for those cases.",
+        ),
+        "E1520": (
+            "singledispatchmethod decorator should not be used with functions, "
+            "use singledispatch instead.",
+            "singledispatchmethod-function",
+            "singledispatchmethod should decorate class/instance methods and not functions. "
+            "Use singledispatch for those cases.",
+        ),
+        "W1508": (
+            "%s default type is %s. Expected str or None.",
+            "invalid-envvar-default",
+            "Env manipulation functions return None or str values. "
+            "Supplying anything different as a default may cause bugs. "
+            "See https://docs.python.org/3/library/os.html#os.getenv.",
+        ),
+        "W1509": (
+            "Using preexec_fn keyword which may be unsafe in the presence "
+            "of threads",
+            "subprocess-popen-preexec-fn",
+            "The preexec_fn parameter is not safe to use in the presence "
+            "of threads in your application. The child process could "
+            "deadlock before exec is called. If you must use it, keep it "
+            "trivial! Minimize the number of libraries you call into. "
+            "See https://docs.python.org/3/library/subprocess.html#popen-constructor",
+        ),
+        "W1510": (
+            "'subprocess.run' used without explicitly defining the value for 'check'.",
+            "subprocess-run-check",
+            "The ``check`` keyword  is set to False by default. It means the process "
+            "launched by ``subprocess.run`` can exit with a non-zero exit code and "
+            "fail silently. It's better to set it explicitly to make clear what the "
+            "error-handling behavior is.",
+        ),
+        "W1514": (
+            "Using open without explicitly specifying an encoding",
+            "unspecified-encoding",
+            "It is better to specify an encoding when opening documents. "
+            "Using the system default implicitly can create problems on other operating systems. "
+            "See https://peps.python.org/pep-0597/",
+        ),
+        "W1515": (
+            "Leaving functions creating breakpoints in production code is not recommended",
+            "forgotten-debug-statement",
+            "Calls to breakpoint(), sys.breakpointhook() and pdb.set_trace() should be removed "
+            "from code that is not actively being debugged.",
+        ),
+        "W1518": (
+            "'lru_cache(maxsize=None)' or 'cache' will keep all method args alive indefinitely, including 'self'",
+            "method-cache-max-size-none",
+            "By decorating a method with lru_cache or cache the 'self' argument will be linked to "
+            "the function and therefore never garbage collected. Unless your instance "
+            "will never need to be garbage collected (singleton) it is recommended to refactor "
+            "code to avoid this pattern or add a maxsize to the cache. "
+            "The default value for maxsize is 128.",
+            {
+                "old_names": [
+                    ("W1516", "lru-cache-decorating-method"),
+                    ("W1517", "cache-max-size-none"),
+                ]
+            },
+        ),
+    }
+
+    def __init__(self, linter: PyLinter) -> None:
         BaseChecker.__init__(self, linter)
         self._deprecated_methods: set[str] = set()
-        self._deprecated_arguments: dict[str, tuple[tuple[int | None, str],
-            ...]] = {}
+        self._deprecated_arguments: dict[str, tuple[tuple[int | None, str], ...]] = {}
         self._deprecated_classes: dict[str, set[str]] = {}
         self._deprecated_decorators: set[str] = set()
         self._deprecated_attributes: set[str] = set()
-        for since_vers, func_list in DEPRECATED_METHODS[sys.version_info[0]
-            ].items():
+
+        for since_vers, func_list in DEPRECATED_METHODS[sys.version_info[0]].items():
             if since_vers <= sys.version_info:
                 self._deprecated_methods.update(func_list)
         for since_vers, args_list in DEPRECATED_ARGUMENTS.items():
@@ -238,27 +557,374 @@ class StdlibChecker(DeprecatedMixin, BaseChecker):
         for since_vers, attribute_list in DEPRECATED_ATTRIBUTES.items():
             if since_vers <= sys.version_info:
                 self._deprecated_attributes.update(attribute_list)
+        # Modules are checked by the ImportsChecker, because the list is
+        # synced with the config argument deprecated-modules

-    @utils.only_required_for_messages('bad-open-mode',
-        'redundant-unittest-assert', 'deprecated-method',
-        'deprecated-argument', 'bad-thread-instantiation',
-        'shallow-copy-environ', 'invalid-envvar-value',
-        'invalid-envvar-default', 'subprocess-popen-preexec-fn',
-        'subprocess-run-check', 'deprecated-class', 'unspecified-encoding',
-        'forgotten-debug-statement')
-    def visit_call(self, node: nodes.Call) ->None:
+    def _check_bad_thread_instantiation(self, node: nodes.Call) -> None:
+        func_kwargs = {key.arg for key in node.keywords}
+        if "target" in func_kwargs:
+            return
+
+        if len(node.args) < 2 and (not node.kwargs or "target" not in func_kwargs):
+            self.add_message(
+                "bad-thread-instantiation", node=node, confidence=interfaces.HIGH
+            )
+
+    def _check_for_preexec_fn_in_popen(self, node: nodes.Call) -> None:
+        if node.keywords:
+            for keyword in node.keywords:
+                if keyword.arg == "preexec_fn":
+                    self.add_message("subprocess-popen-preexec-fn", node=node)
+
+    def _check_for_check_kw_in_run(self, node: nodes.Call) -> None:
+        kwargs = {keyword.arg for keyword in (node.keywords or ())}
+        if "check" not in kwargs:
+            self.add_message("subprocess-run-check", node=node, confidence=INFERENCE)
+
+    def _check_shallow_copy_environ(self, node: nodes.Call) -> None:
+        confidence = HIGH
+        try:
+            arg = utils.get_argument_from_call(node, position=0, keyword="x")
+        except utils.NoSuchArgumentError:
+            arg = utils.infer_kwarg_from_call(node, keyword="x")
+            if not arg:
+                return
+            confidence = INFERENCE
+        try:
+            inferred_args = arg.inferred()
+        except astroid.InferenceError:
+            return
+        for inferred in inferred_args:
+            if inferred.qname() == OS_ENVIRON:
+                self.add_message(
+                    "shallow-copy-environ", node=node, confidence=confidence
+                )
+                break
+
+    @utils.only_required_for_messages(
+        "bad-open-mode",
+        "redundant-unittest-assert",
+        "deprecated-method",
+        "deprecated-argument",
+        "bad-thread-instantiation",
+        "shallow-copy-environ",
+        "invalid-envvar-value",
+        "invalid-envvar-default",
+        "subprocess-popen-preexec-fn",
+        "subprocess-run-check",
+        "deprecated-class",
+        "unspecified-encoding",
+        "forgotten-debug-statement",
+    )
+    def visit_call(self, node: nodes.Call) -> None:
         """Visit a Call node."""
-        pass
+        self.check_deprecated_class_in_call(node)
+        for inferred in utils.infer_all(node.func):
+            if isinstance(inferred, util.UninferableBase):
+                continue
+            if inferred.root().name in OPEN_MODULE:
+                open_func_name: str | None = None
+                if isinstance(node.func, nodes.Name):
+                    open_func_name = node.func.name
+                if isinstance(node.func, nodes.Attribute):
+                    open_func_name = node.func.attrname
+                if open_func_name in OPEN_FILES_FUNCS:
+                    self._check_open_call(node, inferred.root().name, open_func_name)
+            elif inferred.root().name == UNITTEST_CASE:
+                self._check_redundant_assert(node, inferred)
+            elif isinstance(inferred, nodes.ClassDef):
+                if inferred.qname() == THREADING_THREAD:
+                    self._check_bad_thread_instantiation(node)
+                elif inferred.qname() == SUBPROCESS_POPEN:
+                    self._check_for_preexec_fn_in_popen(node)
+            elif isinstance(inferred, nodes.FunctionDef):
+                name = inferred.qname()
+                if name == COPY_COPY:
+                    self._check_shallow_copy_environ(node)
+                elif name in ENV_GETTERS:
+                    self._check_env_function(node, inferred)
+                elif name == SUBPROCESS_RUN:
+                    self._check_for_check_kw_in_run(node)
+                elif name in DEBUG_BREAKPOINTS:
+                    self.add_message("forgotten-debug-statement", node=node)
+            self.check_deprecated_method(node, inferred)
+
+    @utils.only_required_for_messages("boolean-datetime")
+    def visit_unaryop(self, node: nodes.UnaryOp) -> None:
+        if node.op == "not":
+            self._check_datetime(node.operand)
+
+    @utils.only_required_for_messages("boolean-datetime")
+    def visit_if(self, node: nodes.If) -> None:
+        self._check_datetime(node.test)

-    def _check_lru_cache_decorators(self, node: nodes.FunctionDef) ->None:
+    @utils.only_required_for_messages("boolean-datetime")
+    def visit_ifexp(self, node: nodes.IfExp) -> None:
+        self._check_datetime(node.test)
+
+    @utils.only_required_for_messages("boolean-datetime")
+    def visit_boolop(self, node: nodes.BoolOp) -> None:
+        for value in node.values:
+            self._check_datetime(value)
+
+    @utils.only_required_for_messages(
+        "method-cache-max-size-none",
+        "singledispatch-method",
+        "singledispatchmethod-function",
+    )
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        if node.decorators:
+            if isinstance(node.parent, nodes.ClassDef):
+                self._check_lru_cache_decorators(node)
+            self._check_dispatch_decorators(node)
+
+    def _check_lru_cache_decorators(self, node: nodes.FunctionDef) -> None:
         """Check if instance methods are decorated with functools.lru_cache."""
-        pass
+        if any(utils.is_enum(ancestor) for ancestor in node.parent.ancestors()):
+            # method of class inheriting from Enum is exempt from this check.
+            return
+
+        lru_cache_nodes: list[nodes.NodeNG] = []
+        for d_node in node.decorators.nodes:
+            # pylint: disable = too-many-try-statements
+            try:
+                for infered_node in d_node.infer():
+                    q_name = infered_node.qname()
+                    if q_name in NON_INSTANCE_METHODS:
+                        return
+
+                    # Check if there is a maxsize argument set to None in the call
+                    if q_name in LRU_CACHE and isinstance(d_node, nodes.Call):
+                        try:
+                            arg = utils.get_argument_from_call(
+                                d_node, position=0, keyword="maxsize"
+                            )
+                        except utils.NoSuchArgumentError:
+                            arg = utils.infer_kwarg_from_call(d_node, "maxsize")
+
+                        if not isinstance(arg, nodes.Const) or arg.value is not None:
+                            break
+
+                        lru_cache_nodes.append(d_node)
+                        break
+
+                    if q_name == "functools.cache":
+                        lru_cache_nodes.append(d_node)
+                        break
+            except astroid.InferenceError:
+                pass
+        for lru_cache_node in lru_cache_nodes:
+            self.add_message(
+                "method-cache-max-size-none",
+                node=lru_cache_node,
+                confidence=interfaces.INFERENCE,
+            )

-    def _check_datetime(self, node: nodes.NodeNG) ->None:
+    def _check_dispatch_decorators(self, node: nodes.FunctionDef) -> None:
+        decorators_map: dict[str, tuple[nodes.NodeNG, interfaces.Confidence]] = {}
+
+        for decorator in node.decorators.nodes:
+            if isinstance(decorator, nodes.Name) and decorator.name:
+                decorators_map[decorator.name] = (decorator, interfaces.HIGH)
+            elif utils.is_registered_in_singledispatch_function(node):
+                decorators_map["singledispatch"] = (decorator, interfaces.INFERENCE)
+            elif utils.is_registered_in_singledispatchmethod_function(node):
+                decorators_map["singledispatchmethod"] = (
+                    decorator,
+                    interfaces.INFERENCE,
+                )
+
+        if node.is_method():
+            if "singledispatch" in decorators_map:
+                self.add_message(
+                    "singledispatch-method",
+                    node=decorators_map["singledispatch"][0],
+                    confidence=decorators_map["singledispatch"][1],
+                )
+        elif "singledispatchmethod" in decorators_map:
+            self.add_message(
+                "singledispatchmethod-function",
+                node=decorators_map["singledispatchmethod"][0],
+                confidence=decorators_map["singledispatchmethod"][1],
+            )
+
+    def _check_redundant_assert(self, node: nodes.Call, infer: InferenceResult) -> None:
+        if (
+            isinstance(infer, astroid.BoundMethod)
+            and node.args
+            and isinstance(node.args[0], nodes.Const)
+            and infer.name in {"assertTrue", "assertFalse"}
+        ):
+            self.add_message(
+                "redundant-unittest-assert",
+                args=(infer.name, node.args[0].value),
+                node=node,
+            )
+
+    def _check_datetime(self, node: nodes.NodeNG) -> None:
         """Check that a datetime was inferred, if so, emit boolean-datetime warning."""
-        pass
+        try:
+            inferred = next(node.infer())
+        except astroid.InferenceError:
+            return
+        if isinstance(inferred, astroid.Instance) and inferred.qname() in {
+            "_pydatetime.time",
+            "datetime.time",
+        }:
+            self.add_message("boolean-datetime", node=node)

-    def _check_open_call(self, node: nodes.Call, open_module: str,
-        func_name: str) ->None:
+    def _check_open_call(
+        self, node: nodes.Call, open_module: str, func_name: str
+    ) -> None:
         """Various checks for an open call."""
-        pass
+        mode_arg = None
+        confidence = HIGH
+        try:
+            if open_module == "_io":
+                mode_arg = utils.get_argument_from_call(
+                    node, position=1, keyword="mode"
+                )
+            elif open_module == "pathlib":
+                mode_arg = utils.get_argument_from_call(
+                    node, position=0, keyword="mode"
+                )
+        except utils.NoSuchArgumentError:
+            mode_arg = utils.infer_kwarg_from_call(node, keyword="mode")
+            if mode_arg:
+                confidence = INFERENCE
+
+        if mode_arg:
+            mode_arg = utils.safe_infer(mode_arg)
+            if (
+                func_name in OPEN_FILES_MODE
+                and isinstance(mode_arg, nodes.Const)
+                and not _check_mode_str(mode_arg.value)
+            ):
+                self.add_message(
+                    "bad-open-mode",
+                    node=node,
+                    args=mode_arg.value or str(mode_arg.value),
+                    confidence=confidence,
+                )
+
+        if (
+            not mode_arg
+            or isinstance(mode_arg, nodes.Const)
+            and (not mode_arg.value or "b" not in str(mode_arg.value))
+        ):
+            confidence = HIGH
+            try:
+                if open_module == "pathlib":
+                    if node.func.attrname == "read_text":
+                        encoding_arg = utils.get_argument_from_call(
+                            node, position=0, keyword="encoding"
+                        )
+                    elif node.func.attrname == "write_text":
+                        encoding_arg = utils.get_argument_from_call(
+                            node, position=1, keyword="encoding"
+                        )
+                    else:
+                        encoding_arg = utils.get_argument_from_call(
+                            node, position=2, keyword="encoding"
+                        )
+                else:
+                    encoding_arg = utils.get_argument_from_call(
+                        node, position=3, keyword="encoding"
+                    )
+            except utils.NoSuchArgumentError:
+                encoding_arg = utils.infer_kwarg_from_call(node, keyword="encoding")
+                if encoding_arg:
+                    confidence = INFERENCE
+                else:
+                    self.add_message(
+                        "unspecified-encoding", node=node, confidence=confidence
+                    )
+
+            if encoding_arg:
+                encoding_arg = utils.safe_infer(encoding_arg)
+
+                if isinstance(encoding_arg, nodes.Const) and encoding_arg.value is None:
+                    self.add_message(
+                        "unspecified-encoding", node=node, confidence=confidence
+                    )
+
+    def _check_env_function(self, node: nodes.Call, infer: nodes.FunctionDef) -> None:
+        env_name_kwarg = "key"
+        env_value_kwarg = "default"
+        if node.keywords:
+            kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
+        else:
+            kwargs = None
+        if node.args:
+            env_name_arg = node.args[0]
+        elif kwargs and env_name_kwarg in kwargs:
+            env_name_arg = kwargs[env_name_kwarg]
+        else:
+            env_name_arg = None
+
+        if env_name_arg:
+            self._check_invalid_envvar_value(
+                node=node,
+                message="invalid-envvar-value",
+                call_arg=utils.safe_infer(env_name_arg),
+                infer=infer,
+                allow_none=False,
+            )
+
+        if len(node.args) == 2:
+            env_value_arg = node.args[1]
+        elif kwargs and env_value_kwarg in kwargs:
+            env_value_arg = kwargs[env_value_kwarg]
+        else:
+            env_value_arg = None
+
+        if env_value_arg:
+            self._check_invalid_envvar_value(
+                node=node,
+                infer=infer,
+                message="invalid-envvar-default",
+                call_arg=utils.safe_infer(env_value_arg),
+                allow_none=True,
+            )
+
+    def _check_invalid_envvar_value(
+        self,
+        node: nodes.Call,
+        infer: nodes.FunctionDef,
+        message: str,
+        call_arg: InferenceResult | None,
+        allow_none: bool,
+    ) -> None:
+        if call_arg is None or isinstance(call_arg, util.UninferableBase):
+            return
+
+        name = infer.qname()
+        if isinstance(call_arg, nodes.Const):
+            emit = False
+            if call_arg.value is None:
+                emit = not allow_none
+            elif not isinstance(call_arg.value, str):
+                emit = True
+            if emit:
+                self.add_message(message, node=node, args=(name, call_arg.pytype()))
+        else:
+            self.add_message(message, node=node, args=(name, call_arg.pytype()))
+
+    def deprecated_methods(self) -> set[str]:
+        return self._deprecated_methods
+
+    def deprecated_arguments(self, method: str) -> tuple[tuple[int | None, str], ...]:
+        return self._deprecated_arguments.get(method, ())
+
+    def deprecated_classes(self, module: str) -> Iterable[str]:
+        return self._deprecated_classes.get(module, ())
+
+    def deprecated_decorators(self) -> Iterable[str]:
+        return self._deprecated_decorators
+
+    def deprecated_attributes(self) -> Iterable[str]:
+        return self._deprecated_attributes
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(StdlibChecker(linter))
diff --git a/pylint/checkers/strings.py b/pylint/checkers/strings.py
index bb0cd648f..90493fa00 100644
--- a/pylint/checkers/strings.py
+++ b/pylint/checkers/strings.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for string formatting operations."""
+
 from __future__ import annotations
+
 import collections
 import re
 import sys
@@ -7,164 +13,817 @@ import tokenize
 from collections import Counter
 from collections.abc import Iterable, Sequence
 from typing import TYPE_CHECKING, Literal
+
 import astroid
 from astroid import bases, nodes, util
 from astroid.typing import SuccessfulInferenceResult
+
 from pylint.checkers import BaseChecker, BaseRawFileChecker, BaseTokenChecker, utils
 from pylint.checkers.utils import only_required_for_messages
 from pylint.interfaces import HIGH
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-_AST_NODE_STR_TYPES = '__builtin__.unicode', '__builtin__.str', 'builtins.str'
-_PREFIXES = {'r', 'u', 'R', 'U', 'f', 'F', 'fr', 'Fr', 'fR', 'FR', 'rf',
-    'rF', 'Rf', 'RF', 'b', 'B', 'br', 'Br', 'bR', 'BR', 'rb', 'rB', 'Rb', 'RB'}
-_PAREN_IGNORE_TOKEN_TYPES = tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT
+
+
+_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
+# Prefixes for both strings and bytes literals per
+# https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
+_PREFIXES = {
+    "r",
+    "u",
+    "R",
+    "U",
+    "f",
+    "F",
+    "fr",
+    "Fr",
+    "fR",
+    "FR",
+    "rf",
+    "rF",
+    "Rf",
+    "RF",
+    "b",
+    "B",
+    "br",
+    "Br",
+    "bR",
+    "BR",
+    "rb",
+    "rB",
+    "Rb",
+    "RB",
+}
+_PAREN_IGNORE_TOKEN_TYPES = (
+    tokenize.NEWLINE,
+    tokenize.NL,
+    tokenize.COMMENT,
+)
 SINGLE_QUOTED_REGEX = re.compile(f"({'|'.join(_PREFIXES)})?'''")
-DOUBLE_QUOTED_REGEX = re.compile(f'({\'|\'.join(_PREFIXES)})?"""')
-QUOTE_DELIMITER_REGEX = re.compile(f'({\'|\'.join(_PREFIXES)})?("|\')', re.
-    DOTALL)
-MSGS: dict[str, MessageDefinitionTuple] = {'E1300': (
-    'Unsupported format character %r (%#02x) at index %d',
-    'bad-format-character',
-    'Used when an unsupported format character is used in a format string.'
-    ), 'E1301': ('Format string ends in middle of conversion specifier',
-    'truncated-format-string',
-    'Used when a format string terminates before the end of a conversion specifier.'
-    ), 'E1302': (
-    'Mixing named and unnamed conversion specifiers in format string',
-    'mixed-format-string',
-    "Used when a format string contains both named (e.g. '%(foo)d') and unnamed (e.g. '%d') conversion specifiers.  This is also used when a named conversion specifier contains * for the minimum field width and/or precision."
-    ), 'E1303': ('Expected mapping for format string, not %s',
-    'format-needs-mapping',
-    'Used when a format string that uses named conversion specifiers is used with an argument that is not a mapping.'
-    ), 'W1300': ('Format string dictionary key should be a string, not %s',
-    'bad-format-string-key',
-    'Used when a format string that uses named conversion specifiers is used with a dictionary whose keys are not all strings.'
-    ), 'W1301': ('Unused key %r in format string dictionary',
-    'unused-format-string-key',
-    'Used when a format string that uses named conversion specifiers is used with a dictionary that contains keys not required by the format string.'
-    ), 'E1304': ('Missing key %r in format string dictionary',
-    'missing-format-string-key',
-    "Used when a format string that uses named conversion specifiers is used with a dictionary that doesn't contain all the keys required by the format string."
-    ), 'E1305': ('Too many arguments for format string',
-    'too-many-format-args',
-    'Used when a format string that uses unnamed conversion specifiers is given too many arguments.'
-    ), 'E1306': ('Not enough arguments for format string',
-    'too-few-format-args',
-    'Used when a format string that uses unnamed conversion specifiers is given too few arguments'
-    ), 'E1307': ('Argument %r does not match format type %r',
-    'bad-string-format-type',
-    'Used when a type required by format string is not suitable for actual argument type'
-    ), 'E1310': ('Suspicious argument in %s.%s call', 'bad-str-strip-call',
-    'The argument to a str.{l,r,}strip call contains a duplicate character,'
-    ), 'W1302': ('Invalid format string', 'bad-format-string',
-    'Used when a PEP 3101 format string is invalid.'), 'W1303': (
-    'Missing keyword argument %r for format string',
-    'missing-format-argument-key',
-    "Used when a PEP 3101 format string that uses named fields doesn't receive one or more required keywords."
-    ), 'W1304': ('Unused format argument %r',
-    'unused-format-string-argument',
-    'Used when a PEP 3101 format string that uses named fields is used with an argument that is not required by the format string.'
-    ), 'W1305': (
-    'Format string contains both automatic field numbering and manual field specification'
-    , 'format-combined-specification',
-    "Used when a PEP 3101 format string contains both automatic field numbering (e.g. '{}') and manual field specification (e.g. '{0}')."
-    ), 'W1306': ('Missing format attribute %r in format specifier %r',
-    'missing-format-attribute',
-    "Used when a PEP 3101 format string uses an attribute specifier ({0.length}), but the argument passed for formatting doesn't have that attribute."
-    ), 'W1307': ('Using invalid lookup key %r in format specifier %r',
-    'invalid-format-index',
-    "Used when a PEP 3101 format string uses a lookup specifier ({a[1]}), but the argument passed for formatting doesn't contain or doesn't have that key as an attribute."
-    ), 'W1308': (
-    'Duplicate string formatting argument %r, consider passing as named argument'
-    , 'duplicate-string-formatting-argument',
-    'Used when we detect that a string formatting is repeating an argument instead of using named string arguments'
-    ), 'W1309': (
-    'Using an f-string that does not have any interpolated variables',
-    'f-string-without-interpolation',
-    'Used when we detect an f-string that does not use any interpolation variables, in which case it can be either a normal string or a bug in the code.'
-    ), 'W1310': (
-    'Using formatting for a string that does not have any interpolated variables'
-    , 'format-string-without-interpolation',
-    'Used when we detect a string that does not have any interpolation variables, in which case it can be either a normal string without formatting or a bug in the code.'
-    )}
-OTHER_NODES = (nodes.Const, nodes.List, nodes.Lambda, nodes.FunctionDef,
-    nodes.ListComp, nodes.SetComp, nodes.GeneratorExp)
-
-
-def get_access_path(key: (str | Literal[0]), parts: list[tuple[bool, str]]
-    ) ->str:
+DOUBLE_QUOTED_REGEX = re.compile(f"({'|'.join(_PREFIXES)})?\"\"\"")
+QUOTE_DELIMITER_REGEX = re.compile(f"({'|'.join(_PREFIXES)})?(\"|')", re.DOTALL)
+
+MSGS: dict[str, MessageDefinitionTuple] = (
+    {  # pylint: disable=consider-using-namedtuple-or-dataclass
+        "E1300": (
+            "Unsupported format character %r (%#02x) at index %d",
+            "bad-format-character",
+            "Used when an unsupported format character is used in a format string.",
+        ),
+        "E1301": (
+            "Format string ends in middle of conversion specifier",
+            "truncated-format-string",
+            "Used when a format string terminates before the end of a "
+            "conversion specifier.",
+        ),
+        "E1302": (
+            "Mixing named and unnamed conversion specifiers in format string",
+            "mixed-format-string",
+            "Used when a format string contains both named (e.g. '%(foo)d') "
+            "and unnamed (e.g. '%d') conversion specifiers.  This is also "
+            "used when a named conversion specifier contains * for the "
+            "minimum field width and/or precision.",
+        ),
+        "E1303": (
+            "Expected mapping for format string, not %s",
+            "format-needs-mapping",
+            "Used when a format string that uses named conversion specifiers "
+            "is used with an argument that is not a mapping.",
+        ),
+        "W1300": (
+            "Format string dictionary key should be a string, not %s",
+            "bad-format-string-key",
+            "Used when a format string that uses named conversion specifiers "
+            "is used with a dictionary whose keys are not all strings.",
+        ),
+        "W1301": (
+            "Unused key %r in format string dictionary",
+            "unused-format-string-key",
+            "Used when a format string that uses named conversion specifiers "
+            "is used with a dictionary that contains keys not required by the "
+            "format string.",
+        ),
+        "E1304": (
+            "Missing key %r in format string dictionary",
+            "missing-format-string-key",
+            "Used when a format string that uses named conversion specifiers "
+            "is used with a dictionary that doesn't contain all the keys "
+            "required by the format string.",
+        ),
+        "E1305": (
+            "Too many arguments for format string",
+            "too-many-format-args",
+            "Used when a format string that uses unnamed conversion "
+            "specifiers is given too many arguments.",
+        ),
+        "E1306": (
+            "Not enough arguments for format string",
+            "too-few-format-args",
+            "Used when a format string that uses unnamed conversion "
+            "specifiers is given too few arguments",
+        ),
+        "E1307": (
+            "Argument %r does not match format type %r",
+            "bad-string-format-type",
+            "Used when a type required by format string "
+            "is not suitable for actual argument type",
+        ),
+        "E1310": (
+            "Suspicious argument in %s.%s call",
+            "bad-str-strip-call",
+            "The argument to a str.{l,r,}strip call contains a duplicate character,",
+        ),
+        "W1302": (
+            "Invalid format string",
+            "bad-format-string",
+            "Used when a PEP 3101 format string is invalid.",
+        ),
+        "W1303": (
+            "Missing keyword argument %r for format string",
+            "missing-format-argument-key",
+            "Used when a PEP 3101 format string that uses named fields "
+            "doesn't receive one or more required keywords.",
+        ),
+        "W1304": (
+            "Unused format argument %r",
+            "unused-format-string-argument",
+            "Used when a PEP 3101 format string that uses named "
+            "fields is used with an argument that "
+            "is not required by the format string.",
+        ),
+        "W1305": (
+            "Format string contains both automatic field numbering "
+            "and manual field specification",
+            "format-combined-specification",
+            "Used when a PEP 3101 format string contains both automatic "
+            "field numbering (e.g. '{}') and manual field "
+            "specification (e.g. '{0}').",
+        ),
+        "W1306": (
+            "Missing format attribute %r in format specifier %r",
+            "missing-format-attribute",
+            "Used when a PEP 3101 format string uses an "
+            "attribute specifier ({0.length}), but the argument "
+            "passed for formatting doesn't have that attribute.",
+        ),
+        "W1307": (
+            "Using invalid lookup key %r in format specifier %r",
+            "invalid-format-index",
+            "Used when a PEP 3101 format string uses a lookup specifier "
+            "({a[1]}), but the argument passed for formatting "
+            "doesn't contain or doesn't have that key as an attribute.",
+        ),
+        "W1308": (
+            "Duplicate string formatting argument %r, consider passing as named argument",
+            "duplicate-string-formatting-argument",
+            "Used when we detect that a string formatting is "
+            "repeating an argument instead of using named string arguments",
+        ),
+        "W1309": (
+            "Using an f-string that does not have any interpolated variables",
+            "f-string-without-interpolation",
+            "Used when we detect an f-string that does not use any interpolation variables, "
+            "in which case it can be either a normal string or a bug in the code.",
+        ),
+        "W1310": (
+            "Using formatting for a string that does not have any interpolated variables",
+            "format-string-without-interpolation",
+            "Used when we detect a string that does not have any interpolation variables, "
+            "in which case it can be either a normal string without formatting or a bug in the code.",
+        ),
+    }
+)
+
+OTHER_NODES = (
+    nodes.Const,
+    nodes.List,
+    nodes.Lambda,
+    nodes.FunctionDef,
+    nodes.ListComp,
+    nodes.SetComp,
+    nodes.GeneratorExp,
+)
+
+
+def get_access_path(key: str | Literal[0], parts: list[tuple[bool, str]]) -> str:
     """Given a list of format specifiers, returns
     the final access path (e.g. a.b.c[0][1]).
     """
-    pass
+    path = []
+    for is_attribute, specifier in parts:
+        if is_attribute:
+            path.append(f".{specifier}")
+        else:
+            path.append(f"[{specifier!r}]")
+    return str(key) + "".join(path)
+
+
+def arg_matches_format_type(
+    arg_type: SuccessfulInferenceResult, format_type: str
+) -> bool:
+    if format_type in "sr":
+        # All types can be printed with %s and %r
+        return True
+    if isinstance(arg_type, astroid.Instance):
+        arg_type = arg_type.pytype()
+        if arg_type == "builtins.str":
+            return format_type == "c"
+        if arg_type == "builtins.float":
+            return format_type in "deEfFgGn%"
+        if arg_type == "builtins.int":
+            # Integers allow all types
+            return True
+        return False
+    return True


 class StringFormatChecker(BaseChecker):
     """Checks string formatting operations to ensure that the format string
     is valid and the arguments match the format string.
     """
-    name = 'string'
+
+    name = "string"
     msgs = MSGS

-    def _check_new_format(self, node: nodes.Call, func: bases.BoundMethod
-        ) ->None:
+    # pylint: disable = too-many-branches, too-many-locals, too-many-statements
+    @only_required_for_messages(
+        "bad-format-character",
+        "truncated-format-string",
+        "mixed-format-string",
+        "bad-format-string-key",
+        "missing-format-string-key",
+        "unused-format-string-key",
+        "bad-string-format-type",
+        "format-needs-mapping",
+        "too-many-format-args",
+        "too-few-format-args",
+        "format-string-without-interpolation",
+    )
+    def visit_binop(self, node: nodes.BinOp) -> None:
+        if node.op != "%":
+            return
+        left = node.left
+        args = node.right
+
+        if not (isinstance(left, nodes.Const) and isinstance(left.value, str)):
+            return
+        format_string = left.value
+        try:
+            (
+                required_keys,
+                required_num_args,
+                required_key_types,
+                required_arg_types,
+            ) = utils.parse_format_string(format_string)
+        except utils.UnsupportedFormatCharacter as exc:
+            formatted = format_string[exc.index]
+            self.add_message(
+                "bad-format-character",
+                node=node,
+                args=(formatted, ord(formatted), exc.index),
+            )
+            return
+        except utils.IncompleteFormatString:
+            self.add_message("truncated-format-string", node=node)
+            return
+        if not required_keys and not required_num_args:
+            self.add_message("format-string-without-interpolation", node=node)
+            return
+        if required_keys and required_num_args:
+            # The format string uses both named and unnamed format
+            # specifiers.
+            self.add_message("mixed-format-string", node=node)
+        elif required_keys:
+            # The format string uses only named format specifiers.
+            # Check that the RHS of the % operator is a mapping object
+            # that contains precisely the set of keys required by the
+            # format string.
+            if isinstance(args, nodes.Dict):
+                keys = set()
+                unknown_keys = False
+                for k, _ in args.items:
+                    if isinstance(k, nodes.Const):
+                        key = k.value
+                        if isinstance(key, str):
+                            keys.add(key)
+                        else:
+                            self.add_message(
+                                "bad-format-string-key", node=node, args=key
+                            )
+                    else:
+                        # One of the keys was something other than a
+                        # constant.  Since we can't tell what it is,
+                        # suppress checks for missing keys in the
+                        # dictionary.
+                        unknown_keys = True
+                if not unknown_keys:
+                    for key in required_keys:
+                        if key not in keys:
+                            self.add_message(
+                                "missing-format-string-key", node=node, args=key
+                            )
+                for key in keys:
+                    if key not in required_keys:
+                        self.add_message(
+                            "unused-format-string-key", node=node, args=key
+                        )
+                for key, arg in args.items:
+                    if not isinstance(key, nodes.Const):
+                        continue
+                    format_type = required_key_types.get(key.value, None)
+                    arg_type = utils.safe_infer(arg)
+                    if (
+                        format_type is not None
+                        and arg_type
+                        and not isinstance(arg_type, util.UninferableBase)
+                        and not arg_matches_format_type(arg_type, format_type)
+                    ):
+                        self.add_message(
+                            "bad-string-format-type",
+                            node=node,
+                            args=(arg_type.pytype(), format_type),
+                        )
+            elif isinstance(args, (OTHER_NODES, nodes.Tuple)):
+                type_name = type(args).__name__
+                self.add_message("format-needs-mapping", node=node, args=type_name)
+            # else:
+            # The RHS of the format specifier is a name or
+            # expression.  It may be a mapping object, so
+            # there's nothing we can check.
+        else:
+            # The format string uses only unnamed format specifiers.
+            # Check that the number of arguments passed to the RHS of
+            # the % operator matches the number required by the format
+            # string.
+            args_elts = []
+            if isinstance(args, nodes.Tuple):
+                rhs_tuple = utils.safe_infer(args)
+                num_args = None
+                if isinstance(rhs_tuple, nodes.BaseContainer):
+                    args_elts = rhs_tuple.elts
+                    num_args = len(args_elts)
+            elif isinstance(args, (OTHER_NODES, (nodes.Dict, nodes.DictComp))):
+                args_elts = [args]
+                num_args = 1
+            elif isinstance(args, nodes.Name):
+                inferred = utils.safe_infer(args)
+                if isinstance(inferred, nodes.Tuple):
+                    # The variable is a tuple, so we need to get the elements
+                    # from it for further inspection
+                    args_elts = inferred.elts
+                    num_args = len(args_elts)
+                elif isinstance(inferred, nodes.Const):
+                    args_elts = [inferred]
+                    num_args = 1
+                else:
+                    num_args = None
+            else:
+                # The RHS of the format specifier is an expression.
+                # It could be a tuple of unknown size, so
+                # there's nothing we can check.
+                num_args = None
+            if num_args is not None:
+                if num_args > required_num_args:
+                    self.add_message("too-many-format-args", node=node)
+                elif num_args < required_num_args:
+                    self.add_message("too-few-format-args", node=node)
+                for arg, format_type in zip(args_elts, required_arg_types):
+                    if not arg:
+                        continue
+                    arg_type = utils.safe_infer(arg)
+                    if (
+                        arg_type
+                        and not isinstance(arg_type, util.UninferableBase)
+                        and not arg_matches_format_type(arg_type, format_type)
+                    ):
+                        self.add_message(
+                            "bad-string-format-type",
+                            node=node,
+                            args=(arg_type.pytype(), format_type),
+                        )
+
+    @only_required_for_messages("f-string-without-interpolation")
+    def visit_joinedstr(self, node: nodes.JoinedStr) -> None:
+        self._check_interpolation(node)
+
+    def _check_interpolation(self, node: nodes.JoinedStr) -> None:
+        if isinstance(node.parent, nodes.FormattedValue):
+            return
+        for value in node.values:
+            if isinstance(value, nodes.FormattedValue):
+                return
+        self.add_message("f-string-without-interpolation", node=node)
+
+    def visit_call(self, node: nodes.Call) -> None:
+        func = utils.safe_infer(node.func)
+        if (
+            isinstance(func, astroid.BoundMethod)
+            and isinstance(func.bound, astroid.Instance)
+            and func.bound.name in {"str", "unicode", "bytes"}
+        ):
+            if func.name in {"strip", "lstrip", "rstrip"} and node.args:
+                arg = utils.safe_infer(node.args[0])
+                if not isinstance(arg, nodes.Const) or not isinstance(arg.value, str):
+                    return
+                if len(arg.value) != len(set(arg.value)):
+                    self.add_message(
+                        "bad-str-strip-call",
+                        node=node,
+                        args=(func.bound.name, func.name),
+                    )
+            elif func.name == "format":
+                self._check_new_format(node, func)
+
+    def _detect_vacuous_formatting(
+        self, node: nodes.Call, positional_arguments: list[SuccessfulInferenceResult]
+    ) -> None:
+        counter = collections.Counter(
+            arg.name for arg in positional_arguments if isinstance(arg, nodes.Name)
+        )
+        for name, count in counter.items():
+            if count == 1:
+                continue
+            self.add_message(
+                "duplicate-string-formatting-argument", node=node, args=(name,)
+            )
+
+    def _check_new_format(self, node: nodes.Call, func: bases.BoundMethod) -> None:
         """Check the new string formatting."""
-        pass
+        # Skip format nodes which don't have an explicit string on the
+        # left side of the format operation.
+        # We do this because our inference engine can't properly handle
+        # redefinition of the original string.
+        # Note that there may not be any left side at all, if the format method
+        # has been assigned to another variable. See issue 351. For example:
+        #
+        #    fmt = 'some string {}'.format
+        #    fmt('arg')
+        if isinstance(node.func, nodes.Attribute) and not isinstance(
+            node.func.expr, nodes.Const
+        ):
+            return
+        if node.starargs or node.kwargs:
+            return
+        try:
+            strnode = next(func.bound.infer())
+        except astroid.InferenceError:
+            return
+        if not (isinstance(strnode, nodes.Const) and isinstance(strnode.value, str)):
+            return
+        try:
+            call_site = astroid.arguments.CallSite.from_call(node)
+        except astroid.InferenceError:
+            return
+
+        try:
+            fields, num_args, manual_pos = utils.parse_format_method_string(
+                strnode.value
+            )
+        except utils.IncompleteFormatString:
+            self.add_message("bad-format-string", node=node)
+            return
+
+        positional_arguments = call_site.positional_arguments
+        named_arguments = call_site.keyword_arguments
+        named_fields = {field[0] for field in fields if isinstance(field[0], str)}
+        if num_args and manual_pos:
+            self.add_message("format-combined-specification", node=node)
+            return

-    def _check_new_format_specifiers(self, node: nodes.Call, fields: list[
-        tuple[str, list[tuple[bool, str]]]], named: dict[str,
-        SuccessfulInferenceResult]) ->None:
+        check_args = False
+        # Consider "{[0]} {[1]}" as num_args.
+        num_args += sum(1 for field in named_fields if not field)
+        if named_fields:
+            for field in named_fields:
+                if field and field not in named_arguments:
+                    self.add_message(
+                        "missing-format-argument-key", node=node, args=(field,)
+                    )
+            for field in named_arguments:
+                if field not in named_fields:
+                    self.add_message(
+                        "unused-format-string-argument", node=node, args=(field,)
+                    )
+            # num_args can be 0 if manual_pos is not.
+            num_args = num_args or manual_pos
+            if positional_arguments or num_args:
+                empty = not all(field for field in named_fields)
+                if named_arguments or empty:
+                    # Verify the required number of positional arguments
+                    # only if the .format got at least one keyword argument.
+                    # This means that the format strings accepts both
+                    # positional and named fields and we should warn
+                    # when one of them is missing or is extra.
+                    check_args = True
+        else:
+            check_args = True
+        if check_args:
+            # num_args can be 0 if manual_pos is not.
+            num_args = num_args or manual_pos
+            if not num_args:
+                self.add_message("format-string-without-interpolation", node=node)
+                return
+            if len(positional_arguments) > num_args:
+                self.add_message("too-many-format-args", node=node)
+            elif len(positional_arguments) < num_args:
+                self.add_message("too-few-format-args", node=node)
+
+        self._detect_vacuous_formatting(node, positional_arguments)
+        self._check_new_format_specifiers(node, fields, named_arguments)
+
+    # pylint: disable = too-many-statements
+    def _check_new_format_specifiers(
+        self,
+        node: nodes.Call,
+        fields: list[tuple[str, list[tuple[bool, str]]]],
+        named: dict[str, SuccessfulInferenceResult],
+    ) -> None:
         """Check attribute and index access in the format
         string ("{0.a}" and "{0[a]}").
         """
-        pass
+        key: Literal[0] | str
+        for key, specifiers in fields:
+            # Obtain the argument. If it can't be obtained
+            # or inferred, skip this check.
+            if not key:
+                # {[0]} will have an unnamed argument, defaulting
+                # to 0. It will not be present in `named`, so use the value
+                # 0 for it.
+                key = 0
+            if isinstance(key, int):
+                try:
+                    argname = utils.get_argument_from_call(node, key)
+                except utils.NoSuchArgumentError:
+                    continue
+            else:
+                if key not in named:
+                    continue
+                argname = named[key]
+            if argname is None or isinstance(argname, util.UninferableBase):
+                continue
+            try:
+                argument = utils.safe_infer(argname)
+            except astroid.InferenceError:
+                continue
+            if not specifiers or not argument:
+                # No need to check this key if it doesn't
+                # use attribute / item access
+                continue
+            if argument.parent and isinstance(argument.parent, nodes.Arguments):
+                # Ignore any object coming from an argument,
+                # because we can't infer its value properly.
+                continue
+            previous = argument
+            parsed: list[tuple[bool, str]] = []
+            for is_attribute, specifier in specifiers:
+                if isinstance(previous, util.UninferableBase):
+                    break
+                parsed.append((is_attribute, specifier))
+                if is_attribute:
+                    try:
+                        previous = previous.getattr(specifier)[0]
+                    except astroid.NotFoundError:
+                        if (
+                            hasattr(previous, "has_dynamic_getattr")
+                            and previous.has_dynamic_getattr()
+                        ):
+                            # Don't warn if the object has a custom __getattr__
+                            break
+                        path = get_access_path(key, parsed)
+                        self.add_message(
+                            "missing-format-attribute",
+                            args=(specifier, path),
+                            node=node,
+                        )
+                        break
+                else:
+                    warn_error = False
+                    if hasattr(previous, "getitem"):
+                        try:
+                            previous = previous.getitem(nodes.Const(specifier))
+                        except (
+                            astroid.AstroidIndexError,
+                            astroid.AstroidTypeError,
+                            astroid.AttributeInferenceError,
+                        ):
+                            warn_error = True
+                        except astroid.InferenceError:
+                            break
+                        if isinstance(previous, util.UninferableBase):
+                            break
+                    else:
+                        try:
+                            # Lookup __getitem__ in the current node,
+                            # but skip further checks, because we can't
+                            # retrieve the looked object
+                            previous.getattr("__getitem__")
+                            break
+                        except astroid.NotFoundError:
+                            warn_error = True
+                    if warn_error:
+                        path = get_access_path(key, parsed)
+                        self.add_message(
+                            "invalid-format-index", args=(specifier, path), node=node
+                        )
+                        break
+
+                try:
+                    previous = next(previous.infer())
+                except astroid.InferenceError:
+                    # can't check further if we can't infer it
+                    break


 class StringConstantChecker(BaseTokenChecker, BaseRawFileChecker):
     """Check string literals."""
-    name = 'string'
-    msgs = {'W1401': (
-        "Anomalous backslash in string: '%s'. String constant might be missing an r prefix."
-        , 'anomalous-backslash-in-string',
-        'Used when a backslash is in a literal string but not as an escape.'
-        ), 'W1402': (
-        "Anomalous Unicode escape in byte string: '%s'. String constant might be missing an r or u prefix."
-        , 'anomalous-unicode-escape-in-string',
-        'Used when an escape like \\u is encountered in a byte string where it has no effect.'
-        ), 'W1404': ('Implicit string concatenation found in %s',
-        'implicit-str-concat',
-        'String literals are implicitly concatenated in a literal iterable definition : maybe a comma is missing ?'
-        , {'old_names': [('W1403', 'implicit-str-concat-in-sequence')]}),
-        'W1405': (
-        'Quote delimiter %s is inconsistent with the rest of the file',
-        'inconsistent-quotes',
-        'Quote delimiters are not used consistently throughout a module (with allowances made for avoiding unnecessary escaping).'
-        ), 'W1406': (
-        'The u prefix for strings is no longer necessary in Python >=3.0',
-        'redundant-u-string-prefix',
-        'Used when we detect a string with a u prefix. These prefixes were necessary in Python 2 to indicate a string was Unicode, but since Python 3.0 strings are Unicode by default.'
-        )}
-    options = ('check-str-concat-over-line-jumps', {'default': False,
-        'type': 'yn', 'metavar': '<y or n>', 'help':
-        'This flag controls whether the implicit-str-concat should generate a warning on implicit string concatenation in sequences defined over several lines.'
-        }), ('check-quote-consistency', {'default': False, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'This flag controls whether inconsistent-quotes generates a warning when the character used as a quote delimiter is used inconsistently within a module.'
-        })
-    ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'"01234567'
-    UNICODE_ESCAPE_CHARACTERS = 'uUN'
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "string"
+    msgs = {
+        "W1401": (
+            "Anomalous backslash in string: '%s'. "
+            "String constant might be missing an r prefix.",
+            "anomalous-backslash-in-string",
+            "Used when a backslash is in a literal string but not as an escape.",
+        ),
+        "W1402": (
+            "Anomalous Unicode escape in byte string: '%s'. "
+            "String constant might be missing an r or u prefix.",
+            "anomalous-unicode-escape-in-string",
+            "Used when an escape like \\u is encountered in a byte "
+            "string where it has no effect.",
+        ),
+        "W1404": (
+            "Implicit string concatenation found in %s",
+            "implicit-str-concat",
+            "String literals are implicitly concatenated in a "
+            "literal iterable definition : "
+            "maybe a comma is missing ?",
+            {"old_names": [("W1403", "implicit-str-concat-in-sequence")]},
+        ),
+        "W1405": (
+            "Quote delimiter %s is inconsistent with the rest of the file",
+            "inconsistent-quotes",
+            "Quote delimiters are not used consistently throughout a module "
+            "(with allowances made for avoiding unnecessary escaping).",
+        ),
+        "W1406": (
+            "The u prefix for strings is no longer necessary in Python >=3.0",
+            "redundant-u-string-prefix",
+            "Used when we detect a string with a u prefix. These prefixes were necessary "
+            "in Python 2 to indicate a string was Unicode, but since Python 3.0 strings "
+            "are Unicode by default.",
+        ),
+    }
+    options = (
+        (
+            "check-str-concat-over-line-jumps",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "This flag controls whether the "
+                "implicit-str-concat should generate a warning "
+                "on implicit string concatenation in sequences defined over "
+                "several lines.",
+            },
+        ),
+        (
+            "check-quote-consistency",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "This flag controls whether inconsistent-quotes generates a "
+                "warning when the character used as a quote delimiter is used "
+                "inconsistently within a module.",
+            },
+        ),
+    )
+
+    # Characters that have a special meaning after a backslash in either
+    # Unicode or byte strings.
+    ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
+
+    # Characters that have a special meaning after a backslash but only in
+    # Unicode strings.
+    UNICODE_ESCAPE_CHARACTERS = "uUN"
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
-        self.string_tokens: dict[tuple[int, int], tuple[str, tokenize.
-            TokenInfo | None]] = {}
+        self.string_tokens: dict[
+            tuple[int, int], tuple[str, tokenize.TokenInfo | None]
+        ] = {}
         """Token position -> (token value, next token)."""
         self._parenthesized_string_tokens: dict[tuple[int, int], bool] = {}

-    def check_for_consistent_string_delimiters(self, tokens: Iterable[
-        tokenize.TokenInfo]) ->None:
+    def process_module(self, node: nodes.Module) -> None:
+        self._unicode_literals = "unicode_literals" in node.future_imports
+
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
+        encoding = "ascii"
+        for i, (token_type, token, start, _, line) in enumerate(tokens):
+            if token_type == tokenize.ENCODING:
+                # this is always the first token processed
+                encoding = token
+            elif token_type == tokenize.STRING:
+                # 'token' is the whole un-parsed token; we can look at the start
+                # of it to see whether it's a raw or unicode string etc.
+                self.process_string_token(token, start[0], start[1])
+                # We figure the next token, ignoring comments & newlines:
+                j = i + 1
+                while j < len(tokens) and tokens[j].type in (
+                    tokenize.NEWLINE,
+                    tokenize.NL,
+                    tokenize.COMMENT,
+                ):
+                    j += 1
+                next_token = tokens[j] if j < len(tokens) else None
+                if encoding != "ascii":
+                    # We convert `tokenize` character count into a byte count,
+                    # to match with astroid `.col_offset`
+                    start = (start[0], len(line[: start[1]].encode(encoding)))
+                self.string_tokens[start] = (str_eval(token), next_token)
+                is_parenthesized = self._is_initial_string_token(
+                    i, tokens
+                ) and self._is_parenthesized(i, tokens)
+                self._parenthesized_string_tokens[start] = is_parenthesized
+
+        if self.linter.config.check_quote_consistency:
+            self.check_for_consistent_string_delimiters(tokens)
+
+    def _is_initial_string_token(
+        self, index: int, tokens: Sequence[tokenize.TokenInfo]
+    ) -> bool:
+        # Must NOT be preceded by a string literal
+        prev_token = self._find_prev_token(index, tokens)
+        if prev_token and prev_token.type == tokenize.STRING:
+            return False
+        # Must be followed by a string literal token.
+        next_token = self._find_next_token(index, tokens)
+        return bool(next_token and next_token.type == tokenize.STRING)
+
+    def _is_parenthesized(self, index: int, tokens: list[tokenize.TokenInfo]) -> bool:
+        prev_token = self._find_prev_token(
+            index, tokens, ignore=(*_PAREN_IGNORE_TOKEN_TYPES, tokenize.STRING)
+        )
+        if not prev_token or prev_token.type != tokenize.OP or prev_token[1] != "(":
+            return False
+        next_token = self._find_next_token(
+            index, tokens, ignore=(*_PAREN_IGNORE_TOKEN_TYPES, tokenize.STRING)
+        )
+        return bool(
+            next_token and next_token.type == tokenize.OP and next_token[1] == ")"
+        )
+
+    def _find_prev_token(
+        self,
+        index: int,
+        tokens: Sequence[tokenize.TokenInfo],
+        *,
+        ignore: tuple[int, ...] = _PAREN_IGNORE_TOKEN_TYPES,
+    ) -> tokenize.TokenInfo | None:
+        i = index - 1
+        while i >= 0 and tokens[i].type in ignore:
+            i -= 1
+        return tokens[i] if i >= 0 else None
+
+    def _find_next_token(
+        self,
+        index: int,
+        tokens: Sequence[tokenize.TokenInfo],
+        *,
+        ignore: tuple[int, ...] = _PAREN_IGNORE_TOKEN_TYPES,
+    ) -> tokenize.TokenInfo | None:
+        i = index + 1
+        while i < len(tokens) and tokens[i].type in ignore:
+            i += 1
+        return tokens[i] if i < len(tokens) else None
+
+    @only_required_for_messages("implicit-str-concat")
+    def visit_call(self, node: nodes.Call) -> None:
+        self.check_for_concatenated_strings(node.args, "call")
+
+    @only_required_for_messages("implicit-str-concat")
+    def visit_list(self, node: nodes.List) -> None:
+        self.check_for_concatenated_strings(node.elts, "list")
+
+    @only_required_for_messages("implicit-str-concat")
+    def visit_set(self, node: nodes.Set) -> None:
+        self.check_for_concatenated_strings(node.elts, "set")
+
+    @only_required_for_messages("implicit-str-concat")
+    def visit_tuple(self, node: nodes.Tuple) -> None:
+        self.check_for_concatenated_strings(node.elts, "tuple")
+
+    def visit_assign(self, node: nodes.Assign) -> None:
+        if isinstance(node.value, nodes.Const) and isinstance(node.value.value, str):
+            self.check_for_concatenated_strings([node.value], "assignment")
+
+    def check_for_consistent_string_delimiters(
+        self, tokens: Iterable[tokenize.TokenInfo]
+    ) -> None:
         """Adds a message for each string using inconsistent quote delimiters.

         Quote delimiters are used inconsistently if " and ' are mixed in a module's
@@ -174,10 +833,110 @@ class StringConstantChecker(BaseTokenChecker, BaseRawFileChecker):
         Args:
           tokens: The tokens to be checked against for consistent usage.
         """
-        pass
+        string_delimiters: Counter[str] = collections.Counter()
+
+        inside_fstring = False  # whether token is inside f-string (since 3.12)
+        target_py312 = self.linter.config.py_version >= (3, 12)
+
+        # First, figure out which quote character predominates in the module
+        for tok_type, token, _, _, _ in tokens:
+            if sys.version_info[:2] >= (3, 12):
+                # pylint: disable=no-member,useless-suppression
+                if tok_type == tokenize.FSTRING_START:
+                    inside_fstring = True
+                elif tok_type == tokenize.FSTRING_END:
+                    inside_fstring = False
+
+                if inside_fstring and not target_py312:
+                    # skip analysis of f-string contents
+                    continue
+
+            if tok_type == tokenize.STRING and _is_quote_delimiter_chosen_freely(token):
+                string_delimiters[_get_quote_delimiter(token)] += 1

-    def process_non_raw_string_token(self, prefix: str, string_body: str,
-        start_row: int, string_start_col: int) ->None:
+        if len(string_delimiters) > 1:
+            # Ties are broken arbitrarily
+            most_common_delimiter = string_delimiters.most_common(1)[0][0]
+            for tok_type, token, start, _, _ in tokens:
+                if tok_type != tokenize.STRING:
+                    continue
+                quote_delimiter = _get_quote_delimiter(token)
+                if (
+                    _is_quote_delimiter_chosen_freely(token)
+                    and quote_delimiter != most_common_delimiter
+                ):
+                    self.add_message(
+                        "inconsistent-quotes", line=start[0], args=(quote_delimiter,)
+                    )
+
+    def check_for_concatenated_strings(
+        self, elements: Sequence[nodes.NodeNG], iterable_type: str
+    ) -> None:
+        for elt in elements:
+            if not (
+                isinstance(elt, nodes.Const) and elt.pytype() in _AST_NODE_STR_TYPES
+            ):
+                continue
+            if elt.col_offset < 0:
+                # This can happen in case of escaped newlines
+                continue
+            token_index = (elt.lineno, elt.col_offset)
+            if token_index not in self.string_tokens:
+                # This may happen with Latin1 encoding
+                # cf. https://github.com/pylint-dev/pylint/issues/2610
+                continue
+            matching_token, next_token = self.string_tokens[token_index]
+            # We detect string concatenation: the AST Const is the
+            # combination of 2 string tokens
+            if (
+                matching_token != elt.value
+                and next_token is not None
+                and next_token.type == tokenize.STRING
+            ):
+                if next_token.start[0] == elt.lineno or (
+                    self.linter.config.check_str_concat_over_line_jumps
+                    # Allow implicitly concatenated strings in parens.
+                    # See https://github.com/pylint-dev/pylint/issues/8552.
+                    and not self._parenthesized_string_tokens.get(
+                        (elt.lineno, elt.col_offset)
+                    )
+                ):
+                    self.add_message(
+                        "implicit-str-concat",
+                        line=elt.lineno,
+                        args=(iterable_type,),
+                        confidence=HIGH,
+                    )
+
+    def process_string_token(self, token: str, start_row: int, start_col: int) -> None:
+        quote_char = None
+        for _index, char in enumerate(token):
+            if char in "'\"":
+                quote_char = char
+                break
+        if quote_char is None:
+            return
+        # pylint: disable=undefined-loop-variable
+        prefix = token[:_index].lower()  # markers like u, b, r.
+        after_prefix = token[_index:]
+        # pylint: enable=undefined-loop-variable
+        # Chop off quotes
+        quote_length = (
+            3 if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char else 1
+        )
+        string_body = after_prefix[quote_length:-quote_length]
+        # No special checks on raw strings at the moment.
+        if "r" not in prefix:
+            self.process_non_raw_string_token(
+                prefix,
+                string_body,
+                start_row,
+                start_col + len(prefix) + quote_length,
+            )
+
+    def process_non_raw_string_token(
+        self, prefix: str, string_body: str, start_row: int, string_start_col: int
+    ) -> None:
         """Check for bad escapes in a non-raw string.

         prefix: lowercase string of string prefix markers ('ur').
@@ -186,24 +945,96 @@ class StringConstantChecker(BaseTokenChecker, BaseRawFileChecker):
         start_row: line number in the source.
         string_start_col: col number of the string start in the source.
         """
-        pass
+        # Walk through the string; if we see a backslash then escape the next
+        # character, and skip over it.  If we see a non-escaped character,
+        # alert, and continue.
+        #
+        # Accept a backslash when it escapes a backslash, or a quote, or
+        # end-of-line, or one of the letters that introduce a special escape
+        # sequence <https://docs.python.org/reference/lexical_analysis.html>
+        #
+        index = 0
+        while True:
+            index = string_body.find("\\", index)
+            if index == -1:
+                break
+            # There must be a next character; having a backslash at the end
+            # of the string would be a SyntaxError.
+            next_char = string_body[index + 1]
+            match = string_body[index : index + 2]
+            # The column offset will vary depending on whether the string token
+            # is broken across lines. Calculate relative to the nearest line
+            # break or relative to the start of the token's line.
+            last_newline = string_body.rfind("\n", 0, index)
+            if last_newline == -1:
+                line = start_row
+                col_offset = index + string_start_col
+            else:
+                line = start_row + string_body.count("\n", 0, index)
+                col_offset = index - last_newline - 1
+            if next_char in self.UNICODE_ESCAPE_CHARACTERS:
+                if "u" in prefix:
+                    pass
+                elif "b" not in prefix:
+                    pass  # unicode by default
+                else:
+                    self.add_message(
+                        "anomalous-unicode-escape-in-string",
+                        line=line,
+                        args=(match,),
+                        col_offset=col_offset,
+                    )
+            elif next_char not in self.ESCAPE_CHARACTERS:
+                self.add_message(
+                    "anomalous-backslash-in-string",
+                    line=line,
+                    args=(match,),
+                    col_offset=col_offset,
+                )
+            # Whether it was a valid escape or not, backslash followed by
+            # another character can always be consumed whole: the second
+            # character can never be the start of a new backslash escape.
+            index += 2

-    def _detect_u_string_prefix(self, node: nodes.Const) ->None:
+    @only_required_for_messages("redundant-u-string-prefix")
+    def visit_const(self, node: nodes.Const) -> None:
+        if node.pytype() == "builtins.str" and not isinstance(
+            node.parent, nodes.JoinedStr
+        ):
+            self._detect_u_string_prefix(node)
+
+    def _detect_u_string_prefix(self, node: nodes.Const) -> None:
         """Check whether strings include a 'u' prefix like u'String'."""
-        pass
+        if node.kind == "u":
+            self.add_message(
+                "redundant-u-string-prefix",
+                line=node.lineno,
+                col_offset=node.col_offset,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(StringFormatChecker(linter))
+    linter.register_checker(StringConstantChecker(linter))


-def str_eval(token: str) ->str:
+def str_eval(token: str) -> str:
     """Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.

     This supports f-strings, contrary to `ast.literal_eval`.
     We have to support all string literal notations:
     https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
     """
-    pass
+    if token[0:2].lower() in {"fr", "rf"}:
+        token = token[2:]
+    elif token[0].lower() in {"r", "u", "f"}:
+        token = token[1:]
+    if token[0:3] in {'"""', "'''"}:
+        return token[3:-3]
+    return token[1:-1]


-def _is_long_string(string_token: str) ->bool:
+def _is_long_string(string_token: str) -> bool:
     """Is this string token a "longstring" (is it triple-quoted)?

     Long strings are triple-quoted as defined in
@@ -220,10 +1051,13 @@ def _is_long_string(string_token: str) ->bool:
         A boolean representing whether this token matches a longstring
         regex.
     """
-    pass
+    return bool(
+        SINGLE_QUOTED_REGEX.match(string_token)
+        or DOUBLE_QUOTED_REGEX.match(string_token)
+    )


-def _get_quote_delimiter(string_token: str) ->str:
+def _get_quote_delimiter(string_token: str) -> str:
     """Returns the quote character used to delimit this token string.

     This function checks whether the token is a well-formed string.
@@ -238,10 +1072,13 @@ def _get_quote_delimiter(string_token: str) ->str:
     Raises:
       ValueError: No quote delimiter characters are present.
     """
-    pass
+    match = QUOTE_DELIMITER_REGEX.match(string_token)
+    if not match:
+        raise ValueError(f"string token {string_token} is not a well-formed string")
+    return match.group(2)


-def _is_quote_delimiter_chosen_freely(string_token: str) ->bool:
+def _is_quote_delimiter_chosen_freely(string_token: str) -> bool:
     """Was there a non-awkward option for the quote delimiter?

     Args:
@@ -253,4 +1090,10 @@ def _is_quote_delimiter_chosen_freely(string_token: str) ->bool:
         strings are excepted from this analysis under the assumption that their
         quote characters are set by policy.
     """
-    pass
+    quote_delimiter = _get_quote_delimiter(string_token)
+    unchosen_delimiter = '"' if quote_delimiter == "'" else "'"
+    return bool(
+        quote_delimiter
+        and not _is_long_string(string_token)
+        and unchosen_delimiter not in str_eval(string_token)
+    )
diff --git a/pylint/checkers/threading_checker.py b/pylint/checkers/threading_checker.py
index 76dec0d98..b289d6707 100644
--- a/pylint/checkers/threading_checker.py
+++ b/pylint/checkers/threading_checker.py
@@ -1,8 +1,16 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages, safe_infer
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -12,11 +20,40 @@ class ThreadingChecker(BaseChecker):

     - useless with lock - locking used in wrong way that has no effect (with threading.Lock():)
     """
-    name = 'threading'
-    LOCKS = frozenset(('threading.Lock', 'threading.RLock',
-        'threading.Condition', 'threading.Semaphore',
-        'threading.BoundedSemaphore'))
-    msgs = {'W2101': ("'%s()' directly created in 'with' has no effect",
-        'useless-with-lock',
-        'Used when a new lock instance is created by using with statement which has no effect. Instead, an existing instance should be used to acquire lock.'
-        )}
+
+    name = "threading"
+
+    LOCKS = frozenset(
+        (
+            "threading.Lock",
+            "threading.RLock",
+            "threading.Condition",
+            "threading.Semaphore",
+            "threading.BoundedSemaphore",
+        )
+    )
+
+    msgs = {
+        "W2101": (
+            "'%s()' directly created in 'with' has no effect",
+            "useless-with-lock",
+            "Used when a new lock instance is created by using with statement "
+            "which has no effect. Instead, an existing instance should be used to acquire lock.",
+        ),
+    }
+
+    @only_required_for_messages("useless-with-lock")
+    def visit_with(self, node: nodes.With) -> None:
+        context_managers = (c for c, _ in node.items if isinstance(c, nodes.Call))
+        for context_manager in context_managers:
+            if isinstance(context_manager, nodes.Call):
+                infered_function = safe_infer(context_manager.func)
+                if infered_function is None:
+                    continue
+                qname = infered_function.qname()
+                if qname in self.LOCKS:
+                    self.add_message("useless-with-lock", node=node, args=qname)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ThreadingChecker(linter))
diff --git a/pylint/checkers/typecheck.py b/pylint/checkers/typecheck.py
index f7ac53ca5..9e6465531 100644
--- a/pylint/checkers/typecheck.py
+++ b/pylint/checkers/typecheck.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Try to find more bugs in the code using astroid inference capabilities."""
+
 from __future__ import annotations
+
 import heapq
 import itertools
 import operator
@@ -10,31 +16,84 @@ from collections.abc import Callable, Iterable
 from functools import cached_property, singledispatch
 from re import Pattern
 from typing import TYPE_CHECKING, Any, Literal, Union
+
 import astroid
 import astroid.exceptions
 import astroid.helpers
 from astroid import arguments, bases, nodes, util
 from astroid.nodes import _base_nodes
 from astroid.typing import InferenceResult, SuccessfulInferenceResult
+
 from pylint.checkers import BaseChecker, utils
-from pylint.checkers.utils import decorated_with, decorated_with_property, has_known_bases, is_builtin_object, is_comprehension, is_hashable, is_inside_abstract_class, is_iterable, is_mapping, is_module_ignored, is_node_in_type_annotation_context, is_none, is_overload_stub, is_postponed_evaluation_enabled, is_super, node_ignores_exception, only_required_for_messages, safe_infer, supports_delitem, supports_getitem, supports_membership_test, supports_setitem
+from pylint.checkers.utils import (
+    decorated_with,
+    decorated_with_property,
+    has_known_bases,
+    is_builtin_object,
+    is_comprehension,
+    is_hashable,
+    is_inside_abstract_class,
+    is_iterable,
+    is_mapping,
+    is_module_ignored,
+    is_node_in_type_annotation_context,
+    is_none,
+    is_overload_stub,
+    is_postponed_evaluation_enabled,
+    is_super,
+    node_ignores_exception,
+    only_required_for_messages,
+    safe_infer,
+    supports_delitem,
+    supports_getitem,
+    supports_membership_test,
+    supports_setitem,
+)
 from pylint.constants import PY310_PLUS
 from pylint.interfaces import HIGH, INFERENCE
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-CallableObjects = Union[bases.BoundMethod, bases.UnboundMethod, nodes.
-    FunctionDef, nodes.Lambda, nodes.ClassDef]
-STR_FORMAT = {'builtins.str.format'}
-ASYNCIO_COROUTINE = 'asyncio.coroutines.coroutine'
-BUILTIN_TUPLE = 'builtins.tuple'
-TYPE_ANNOTATION_NODES_TYPES = (nodes.AnnAssign, nodes.Arguments, nodes.
-    FunctionDef)
-BUILTINS_IMPLICIT_RETURN_NONE = {'builtins.dict': {'clear', 'update'},
-    'builtins.list': {'append', 'clear', 'extend', 'insert', 'remove',
-    'reverse', 'sort'}, 'builtins.set': {'add', 'clear',
-    'difference_update', 'discard', 'intersection_update', 'remove',
-    'symmetric_difference_update', 'update'}}
+
+CallableObjects = Union[
+    bases.BoundMethod,
+    bases.UnboundMethod,
+    nodes.FunctionDef,
+    nodes.Lambda,
+    nodes.ClassDef,
+]
+
+STR_FORMAT = {"builtins.str.format"}
+ASYNCIO_COROUTINE = "asyncio.coroutines.coroutine"
+BUILTIN_TUPLE = "builtins.tuple"
+TYPE_ANNOTATION_NODES_TYPES = (
+    nodes.AnnAssign,
+    nodes.Arguments,
+    nodes.FunctionDef,
+)
+BUILTINS_IMPLICIT_RETURN_NONE = {
+    "builtins.dict": {"clear", "update"},
+    "builtins.list": {
+        "append",
+        "clear",
+        "extend",
+        "insert",
+        "remove",
+        "reverse",
+        "sort",
+    },
+    "builtins.set": {
+        "add",
+        "clear",
+        "difference_update",
+        "discard",
+        "intersection_update",
+        "remove",
+        "symmetric_difference_update",
+        "update",
+    },
+}


 class VERSION_COMPATIBLE_OVERLOAD:
@@ -44,9 +103,12 @@ class VERSION_COMPATIBLE_OVERLOAD:
 VERSION_COMPATIBLE_OVERLOAD_SENTINEL = VERSION_COMPATIBLE_OVERLOAD()


-def _is_owner_ignored(owner: SuccessfulInferenceResult, attrname: (str |
-    None), ignored_classes: Iterable[str], ignored_modules: Iterable[str]
-    ) ->bool:
+def _is_owner_ignored(
+    owner: SuccessfulInferenceResult,
+    attrname: str | None,
+    ignored_classes: Iterable[str],
+    ignored_modules: Iterable[str],
+) -> bool:
     """Check if the given owner should be ignored.

     This will verify if the owner's module is in *ignored_modules*
@@ -58,122 +120,318 @@ def _is_owner_ignored(owner: SuccessfulInferenceResult, attrname: (str |
     matches any name from the *ignored_classes* or if its qualified
     name can be found in *ignored_classes*.
     """
-    pass
+    if is_module_ignored(owner.root().qname(), ignored_modules):
+        return True
+
+    # Match against ignored classes.
+    ignored_classes = set(ignored_classes)
+    qname = owner.qname() if hasattr(owner, "qname") else ""
+    return any(ignore in (attrname, qname) for ignore in ignored_classes)
+
+
+@singledispatch
+def _node_names(node: SuccessfulInferenceResult) -> Iterable[str]:
+    if not hasattr(node, "locals"):
+        return []
+    return node.locals.keys()  # type: ignore[no-any-return]
+
+
+@_node_names.register(nodes.ClassDef)
+@_node_names.register(astroid.Instance)
+def _(node: nodes.ClassDef | bases.Instance) -> Iterable[str]:
+    values = itertools.chain(node.instance_attrs.keys(), node.locals.keys())

+    try:
+        mro = node.mro()[1:]
+    except (NotImplementedError, TypeError, astroid.MroError):
+        mro = node.ancestors()

-def _similar_names(owner: SuccessfulInferenceResult, attrname: (str | None),
-    distance_threshold: int, max_choices: int) ->list[str]:
+    other_values = [value for cls in mro for value in _node_names(cls)]
+    return itertools.chain(values, other_values)
+
+
+def _string_distance(seq1: str, seq2: str) -> int:
+    seq2_length = len(seq2)
+
+    row = [*list(range(1, seq2_length + 1)), 0]
+    for seq1_index, seq1_char in enumerate(seq1):
+        last_row = row
+        row = [0] * seq2_length + [seq1_index + 1]
+
+        for seq2_index, seq2_char in enumerate(seq2):
+            row[seq2_index] = min(
+                last_row[seq2_index] + 1,
+                row[seq2_index - 1] + 1,
+                last_row[seq2_index - 1] + (seq1_char != seq2_char),
+            )
+
+    return row[seq2_length - 1]
+
+
+def _similar_names(
+    owner: SuccessfulInferenceResult,
+    attrname: str | None,
+    distance_threshold: int,
+    max_choices: int,
+) -> list[str]:
     """Given an owner and a name, try to find similar names.

     The similar names are searched given a distance metric and only
     a given number of choices will be returned.
     """
-    pass
+    possible_names: list[tuple[str, int]] = []
+    names = _node_names(owner)
+
+    for name in names:
+        if name == attrname:
+            continue
+
+        distance = _string_distance(attrname or "", name)
+        if distance <= distance_threshold:
+            possible_names.append((name, distance))
+
+    # Now get back the values with a minimum, up to the given
+    # limit or choices.
+    picked = [
+        name
+        for (name, _) in heapq.nsmallest(
+            max_choices, possible_names, key=operator.itemgetter(1)
+        )
+    ]
+    return sorted(picked)
+
+
+def _missing_member_hint(
+    owner: SuccessfulInferenceResult,
+    attrname: str | None,
+    distance_threshold: int,
+    max_choices: int,
+) -> str:
+    names = _similar_names(owner, attrname, distance_threshold, max_choices)
+    if not names:
+        # No similar name.
+        return ""
+
+    names = [repr(name) for name in names]
+    if len(names) == 1:
+        names_hint = ", ".join(names)
+    else:
+        names_hint = f"one of {', '.join(names[:-1])} or {names[-1]}"

+    return f"; maybe {names_hint}?"

-MSGS: dict[str, MessageDefinitionTuple] = {'E1101': (
-    '%s %r has no %r member%s', 'no-member',
-    'Used when a variable is accessed for a nonexistent member.', {
-    'old_names': [('E1103', 'maybe-no-member')]}), 'I1101': (
-    '%s %r has no %r member%s, but source is unavailable. Consider adding this module to extension-pkg-allow-list if you want to perform analysis based on run-time introspection of living objects.'
-    , 'c-extension-no-member',
-    'Used when a variable is accessed for non-existent member of C extension. Due to unavailability of source static analysis is impossible, but it may be performed by introspecting living objects in run-time.'
-    ), 'E1102': ('%s is not callable', 'not-callable',
-    'Used when an object being called has been inferred to a non callable object.'
-    ), 'E1111': (
-    'Assigning result of a function call, where the function has no return',
-    'assignment-from-no-return',
-    "Used when an assignment is done on a function call but the inferred function doesn't return anything."
-    ), 'E1120': ('No value for argument %s in %s call',
-    'no-value-for-parameter',
-    'Used when a function call passes too few arguments.'), 'E1121': (
-    'Too many positional arguments for %s call', 'too-many-function-args',
-    'Used when a function call passes too many positional arguments.'),
-    'E1123': ('Unexpected keyword argument %r in %s call',
-    'unexpected-keyword-arg',
-    "Used when a function call passes a keyword argument that doesn't correspond to one of the function's parameter names."
-    ), 'E1124': ('Argument %r passed by position and keyword in %s call',
-    'redundant-keyword-arg',
-    'Used when a function call would result in assigning multiple values to a function parameter, one value from a positional argument and one from a keyword argument.'
-    ), 'E1125': ('Missing mandatory keyword argument %r in %s call',
-    'missing-kwoa',
-    'Used when a function call does not pass a mandatory keyword-only argument.'
-    ), 'E1126': (
-    'Sequence index is not an int, slice, or instance with __index__',
-    'invalid-sequence-index',
-    'Used when a sequence type is indexed with an invalid type. Valid types are ints, slices, and objects with an __index__ method.'
-    ), 'E1127': (
-    'Slice index is not an int, None, or instance with __index__',
-    'invalid-slice-index',
-    'Used when a slice index is not an integer, None, or an object with an __index__ method.'
-    ), 'E1128': (
-    'Assigning result of a function call, where the function returns None',
-    'assignment-from-none',
-    'Used when an assignment is done on a function call but the inferred function returns nothing but None.'
-    , {'old_names': [('W1111', 'old-assignment-from-none')]}), 'E1129': (
-    "Context manager '%s' doesn't implement __enter__ and __exit__.",
-    'not-context-manager',
-    "Used when an instance in a with statement doesn't implement the context manager protocol(__enter__/__exit__)."
-    ), 'E1130': ('%s', 'invalid-unary-operand-type',
-    'Emitted when a unary operand is used on an object which does not support this type of operation.'
-    ), 'E1131': ('%s', 'unsupported-binary-operation',
-    'Emitted when a binary arithmetic operation between two operands is not supported.'
-    ), 'E1132': (
-    'Got multiple values for keyword argument %r in function call',
-    'repeated-keyword',
-    'Emitted when a function call got multiple values for a keyword.'),
-    'E1135': ("Value '%s' doesn't support membership test",
-    'unsupported-membership-test',
-    "Emitted when an instance in membership test expression doesn't implement membership protocol (__contains__/__iter__/__getitem__)."
-    ), 'E1136': ("Value '%s' is unsubscriptable", 'unsubscriptable-object',
-    "Emitted when a subscripted value doesn't support subscription (i.e. doesn't define __getitem__ method or __class_getitem__ for a class)."
-    ), 'E1137': ('%r does not support item assignment',
-    'unsupported-assignment-operation',
-    "Emitted when an object does not support item assignment (i.e. doesn't define __setitem__ method)."
-    ), 'E1138': ('%r does not support item deletion',
-    'unsupported-delete-operation',
-    "Emitted when an object does not support item deletion (i.e. doesn't define __delitem__ method)."
-    ), 'E1139': ('Invalid metaclass %r used', 'invalid-metaclass',
-    'Emitted whenever we can detect that a class is using, as a metaclass, something which might be invalid for using as a metaclass.'
-    ), 'E1141': (
-    'Unpacking a dictionary in iteration without calling .items()',
-    'dict-iter-missing-items',
-    'Emitted when trying to iterate through a dict without calling .items()'
-    ), 'E1142': ("'await' should be used within an async function",
-    'await-outside-async',
-    'Emitted when await is used outside an async function.'), 'E1143': (
-    "'%s' is unhashable and can't be used as a %s in a %s",
-    'unhashable-member',
-    "Emitted when a dict key or set member is not hashable (i.e. doesn't define __hash__ method)."
-    , {'old_names': [('E1140', 'unhashable-dict-key')]}), 'E1144': (
-    'Slice step cannot be 0', 'invalid-slice-step',
-    "Used when a slice step is 0 and the object doesn't implement a custom __getitem__ method."
-    ), 'W1113': (
-    'Keyword argument before variable positional arguments list in the definition of %s function'
-    , 'keyword-arg-before-vararg',
-    'When defining a keyword argument before variable positional arguments, one can end up in having multiple values passed for the aforementioned parameter in case the method is called with keyword arguments.'
-    ), 'W1114': ('Positional arguments appear to be out of order',
-    'arguments-out-of-order',
-    "Emitted  when the caller's argument names fully match the parameter names in the function signature but do not have the same order."
-    ), 'W1115': ('Non-string value assigned to __name__',
-    'non-str-assignment-to-dunder-name',
-    'Emitted when a non-string value is assigned to __name__'), 'W1116': (
-    'Second argument of isinstance is not a type',
-    'isinstance-second-argument-not-valid-type',
-    'Emitted when the second argument of an isinstance call is not a type.'
-    ), 'W1117': (
-    '%r will be included in %r since a positional-only parameter with this name already exists'
-    , 'kwarg-superseded-by-positional-arg',
-    'Emitted when a function is called with a keyword argument that has the same name as a positional-only parameter and the function contains a keyword variadic parameter dict.'
-    )}
-SEQUENCE_TYPES = {'str', 'unicode', 'list', 'tuple', 'bytearray', 'xrange',
-    'range', 'bytes', 'memoryview'}
-
-
-def _emit_no_member(node: (nodes.Attribute | nodes.AssignAttr | nodes.
-    DelAttr), owner: InferenceResult, owner_name: (str | None),
-    mixin_class_rgx: Pattern[str], ignored_mixins: bool=True, ignored_none:
-    bool=True) ->bool:
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "E1101": (
+        "%s %r has no %r member%s",
+        "no-member",
+        "Used when a variable is accessed for a nonexistent member.",
+        {"old_names": [("E1103", "maybe-no-member")]},
+    ),
+    "I1101": (
+        "%s %r has no %r member%s, but source is unavailable. Consider "
+        "adding this module to extension-pkg-allow-list if you want "
+        "to perform analysis based on run-time introspection of living objects.",
+        "c-extension-no-member",
+        "Used when a variable is accessed for non-existent member of C "
+        "extension. Due to unavailability of source static analysis is impossible, "
+        "but it may be performed by introspecting living objects in run-time.",
+    ),
+    "E1102": (
+        "%s is not callable",
+        "not-callable",
+        "Used when an object being called has been inferred to a non "
+        "callable object.",
+    ),
+    "E1111": (
+        "Assigning result of a function call, where the function has no return",
+        "assignment-from-no-return",
+        "Used when an assignment is done on a function call but the "
+        "inferred function doesn't return anything.",
+    ),
+    "E1120": (
+        "No value for argument %s in %s call",
+        "no-value-for-parameter",
+        "Used when a function call passes too few arguments.",
+    ),
+    "E1121": (
+        "Too many positional arguments for %s call",
+        "too-many-function-args",
+        "Used when a function call passes too many positional arguments.",
+    ),
+    "E1123": (
+        "Unexpected keyword argument %r in %s call",
+        "unexpected-keyword-arg",
+        "Used when a function call passes a keyword argument that "
+        "doesn't correspond to one of the function's parameter names.",
+    ),
+    "E1124": (
+        "Argument %r passed by position and keyword in %s call",
+        "redundant-keyword-arg",
+        "Used when a function call would result in assigning multiple "
+        "values to a function parameter, one value from a positional "
+        "argument and one from a keyword argument.",
+    ),
+    "E1125": (
+        "Missing mandatory keyword argument %r in %s call",
+        "missing-kwoa",
+        (
+            "Used when a function call does not pass a mandatory"
+            " keyword-only argument."
+        ),
+    ),
+    "E1126": (
+        "Sequence index is not an int, slice, or instance with __index__",
+        "invalid-sequence-index",
+        "Used when a sequence type is indexed with an invalid type. "
+        "Valid types are ints, slices, and objects with an __index__ "
+        "method.",
+    ),
+    "E1127": (
+        "Slice index is not an int, None, or instance with __index__",
+        "invalid-slice-index",
+        "Used when a slice index is not an integer, None, or an object "
+        "with an __index__ method.",
+    ),
+    "E1128": (
+        "Assigning result of a function call, where the function returns None",
+        "assignment-from-none",
+        "Used when an assignment is done on a function call but the "
+        "inferred function returns nothing but None.",
+        {"old_names": [("W1111", "old-assignment-from-none")]},
+    ),
+    "E1129": (
+        "Context manager '%s' doesn't implement __enter__ and __exit__.",
+        "not-context-manager",
+        "Used when an instance in a with statement doesn't implement "
+        "the context manager protocol(__enter__/__exit__).",
+    ),
+    "E1130": (
+        "%s",
+        "invalid-unary-operand-type",
+        "Emitted when a unary operand is used on an object which does not "
+        "support this type of operation.",
+    ),
+    "E1131": (
+        "%s",
+        "unsupported-binary-operation",
+        "Emitted when a binary arithmetic operation between two "
+        "operands is not supported.",
+    ),
+    "E1132": (
+        "Got multiple values for keyword argument %r in function call",
+        "repeated-keyword",
+        "Emitted when a function call got multiple values for a keyword.",
+    ),
+    "E1135": (
+        "Value '%s' doesn't support membership test",
+        "unsupported-membership-test",
+        "Emitted when an instance in membership test expression doesn't "
+        "implement membership protocol (__contains__/__iter__/__getitem__).",
+    ),
+    "E1136": (
+        "Value '%s' is unsubscriptable",
+        "unsubscriptable-object",
+        "Emitted when a subscripted value doesn't support subscription "
+        "(i.e. doesn't define __getitem__ method or __class_getitem__ for a class).",
+    ),
+    "E1137": (
+        "%r does not support item assignment",
+        "unsupported-assignment-operation",
+        "Emitted when an object does not support item assignment "
+        "(i.e. doesn't define __setitem__ method).",
+    ),
+    "E1138": (
+        "%r does not support item deletion",
+        "unsupported-delete-operation",
+        "Emitted when an object does not support item deletion "
+        "(i.e. doesn't define __delitem__ method).",
+    ),
+    "E1139": (
+        "Invalid metaclass %r used",
+        "invalid-metaclass",
+        "Emitted whenever we can detect that a class is using, "
+        "as a metaclass, something which might be invalid for using as "
+        "a metaclass.",
+    ),
+    "E1141": (
+        "Unpacking a dictionary in iteration without calling .items()",
+        "dict-iter-missing-items",
+        "Emitted when trying to iterate through a dict without calling .items()",
+    ),
+    "E1142": (
+        "'await' should be used within an async function",
+        "await-outside-async",
+        "Emitted when await is used outside an async function.",
+    ),
+    "E1143": (
+        "'%s' is unhashable and can't be used as a %s in a %s",
+        "unhashable-member",
+        "Emitted when a dict key or set member is not hashable "
+        "(i.e. doesn't define __hash__ method).",
+        {"old_names": [("E1140", "unhashable-dict-key")]},
+    ),
+    "E1144": (
+        "Slice step cannot be 0",
+        "invalid-slice-step",
+        "Used when a slice step is 0 and the object doesn't implement "
+        "a custom __getitem__ method.",
+    ),
+    "W1113": (
+        "Keyword argument before variable positional arguments list "
+        "in the definition of %s function",
+        "keyword-arg-before-vararg",
+        "When defining a keyword argument before variable positional arguments, one can "
+        "end up in having multiple values passed for the aforementioned parameter in "
+        "case the method is called with keyword arguments.",
+    ),
+    "W1114": (
+        "Positional arguments appear to be out of order",
+        "arguments-out-of-order",
+        "Emitted  when the caller's argument names fully match the parameter "
+        "names in the function signature but do not have the same order.",
+    ),
+    "W1115": (
+        "Non-string value assigned to __name__",
+        "non-str-assignment-to-dunder-name",
+        "Emitted when a non-string value is assigned to __name__",
+    ),
+    "W1116": (
+        "Second argument of isinstance is not a type",
+        "isinstance-second-argument-not-valid-type",
+        "Emitted when the second argument of an isinstance call is not a type.",
+    ),
+    "W1117": (
+        "%r will be included in %r since a positional-only parameter with this name already exists",
+        "kwarg-superseded-by-positional-arg",
+        "Emitted when a function is called with a keyword argument that has the "
+        "same name as a positional-only parameter and the function contains a "
+        "keyword variadic parameter dict.",
+    ),
+}
+
+# builtin sequence types in Python 2 and 3.
+SEQUENCE_TYPES = {
+    "str",
+    "unicode",
+    "list",
+    "tuple",
+    "bytearray",
+    "xrange",
+    "range",
+    "bytes",
+    "memoryview",
+}
+
+
+def _emit_no_member(
+    node: nodes.Attribute | nodes.AssignAttr | nodes.DelAttr,
+    owner: InferenceResult,
+    owner_name: str | None,
+    mixin_class_rgx: Pattern[str],
+    ignored_mixins: bool = True,
+    ignored_none: bool = True,
+) -> bool:
     """Try to see if no-member should be emitted for the given owner.

     The following cases are ignored:
@@ -186,18 +444,262 @@ def _emit_no_member(node: (nodes.Attribute | nodes.AssignAttr | nodes.
           AttributeError, Exception or bare except.
         * The node is guarded behind and `IF` or `IFExp` node
     """
-    pass
+    # pylint: disable = too-many-return-statements, too-many-branches
+    if node_ignores_exception(node, AttributeError):
+        return False
+    if ignored_none and isinstance(owner, nodes.Const) and owner.value is None:
+        return False
+    if is_super(owner) or getattr(owner, "type", None) == "metaclass":
+        return False
+    if owner_name and ignored_mixins and mixin_class_rgx.match(owner_name):
+        return False
+    if isinstance(owner, nodes.FunctionDef) and (
+        owner.decorators or owner.is_abstract()
+    ):
+        return False
+    if isinstance(owner, (astroid.Instance, nodes.ClassDef)):
+        # Issue #2565: Don't ignore enums, as they have a `__getattr__` but it's not
+        # invoked at this point.
+        try:
+            metaclass = owner.metaclass()
+        except astroid.MroError:
+            pass
+        else:
+            # Renamed in Python 3.10 to `EnumType`
+            if metaclass and metaclass.qname() in {"enum.EnumMeta", "enum.EnumType"}:
+                return not _enum_has_attribute(owner, node)
+        if owner.has_dynamic_getattr():
+            return False
+        if not has_known_bases(owner):
+            return False
+
+        # Exclude typed annotations, since these might actually exist
+        # at some point during the runtime of the program.
+        if utils.is_attribute_typed_annotation(owner, node.attrname):
+            return False
+    if isinstance(owner, astroid.objects.Super):
+        # Verify if we are dealing with an invalid Super object.
+        # If it is invalid, then there's no point in checking that
+        # it has the required attribute. Also, don't fail if the
+        # MRO is invalid.
+        try:
+            owner.super_mro()
+        except (astroid.MroError, astroid.SuperError):
+            return False
+        if not all(has_known_bases(base) for base in owner.type.mro()):
+            return False
+    if isinstance(owner, nodes.Module):
+        try:
+            owner.getattr("__getattr__")
+            return False
+        except astroid.NotFoundError:
+            pass
+    if owner_name and node.attrname.startswith("_" + owner_name):
+        # Test if an attribute has been mangled ('private' attribute)
+        unmangled_name = node.attrname.split("_" + owner_name)[-1]
+        try:
+            if owner.getattr(unmangled_name, context=None) is not None:
+                return False
+        except astroid.NotFoundError:
+            return True
+
+    # Don't emit no-member if guarded behind `IF` or `IFExp`
+    #   * Walk up recursively until if statement is found.
+    #   * Check if condition can be inferred as `Const`,
+    #       would evaluate as `False`,
+    #       and whether the node is part of the `body`.
+    #   * Continue checking until scope of node is reached.
+    scope: nodes.NodeNG = node.scope()
+    node_origin: nodes.NodeNG = node
+    parent: nodes.NodeNG = node.parent
+    while parent != scope:
+        if isinstance(parent, (nodes.If, nodes.IfExp)):
+            inferred = safe_infer(parent.test)
+            if (  # pylint: disable=too-many-boolean-expressions
+                isinstance(inferred, nodes.Const)
+                and inferred.bool_value() is False
+                and (
+                    isinstance(parent, nodes.If)
+                    and node_origin in parent.body
+                    or isinstance(parent, nodes.IfExp)
+                    and node_origin == parent.body
+                )
+            ):
+                return False
+        node_origin, parent = parent, parent.parent
+
+    return True
+
+
+def _get_all_attribute_assignments(
+    node: nodes.FunctionDef, name: str | None = None
+) -> set[str]:
+    attributes: set[str] = set()
+    for child in node.nodes_of_class((nodes.Assign, nodes.AnnAssign)):
+        targets = []
+        if isinstance(child, nodes.Assign):
+            targets = child.targets
+        elif isinstance(child, nodes.AnnAssign):
+            targets = [child.target]
+        for assign_target in targets:
+            if isinstance(assign_target, nodes.Tuple):
+                targets.extend(assign_target.elts)
+                continue
+            if (
+                isinstance(assign_target, nodes.AssignAttr)
+                and isinstance(assign_target.expr, nodes.Name)
+                and (name is None or assign_target.expr.name == name)
+            ):
+                attributes.add(assign_target.attrname)
+    return attributes
+
+
+def _enum_has_attribute(
+    owner: astroid.Instance | nodes.ClassDef, node: nodes.Attribute
+) -> bool:
+    if isinstance(owner, astroid.Instance):
+        enum_def = next(
+            (b.parent for b in owner.bases if isinstance(b.parent, nodes.ClassDef)),
+            None,
+        )
+
+        if enum_def is None:
+            # We don't inherit from anything, so try to find the parent
+            # class definition and roll with that
+            enum_def = node
+            while enum_def is not None and not isinstance(enum_def, nodes.ClassDef):
+                enum_def = enum_def.parent
+
+        # If this blows, something is clearly wrong
+        assert enum_def is not None, "enum_def unexpectedly None"
+    else:
+        enum_def = owner

+    # Find __new__ and __init__
+    dunder_new = next((m for m in enum_def.methods() if m.name == "__new__"), None)
+    dunder_init = next((m for m in enum_def.methods() if m.name == "__init__"), None)

-def _has_parent_of_type(node: nodes.Call, node_type: (nodes.Keyword | nodes
-    .Starred), statement: _base_nodes.Statement) ->bool:
+    enum_attributes: set[str] = set()
+
+    # Find attributes defined in __new__
+    if dunder_new:
+        # Get the object returned in __new__
+        returned_obj_name = next(
+            (c.value for c in dunder_new.get_children() if isinstance(c, nodes.Return)),
+            None,
+        )
+        if isinstance(returned_obj_name, nodes.Name):
+            # Find all attribute assignments to the returned object
+            enum_attributes |= _get_all_attribute_assignments(
+                dunder_new, returned_obj_name.name
+            )
+
+    # Find attributes defined in __init__
+    if dunder_init and dunder_init.body and dunder_init.args:
+        # Grab the name referring to `self` from the function def
+        enum_attributes |= _get_all_attribute_assignments(
+            dunder_init, dunder_init.args.arguments[0].name
+        )
+
+    return node.attrname in enum_attributes
+
+
+def _determine_callable(
+    callable_obj: nodes.NodeNG,
+) -> tuple[CallableObjects, int, str]:
+    # TODO: The typing of the second return variable is actually Literal[0,1]
+    # We need typing on astroid.NodeNG.implicit_parameters for this
+    # TODO: The typing of the third return variable can be narrowed to a Literal
+    # We need typing on astroid.NodeNG.type for this
+
+    # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
+    # and Function inherits Lambda.
+    parameters = 0
+    if hasattr(callable_obj, "implicit_parameters"):
+        parameters = callable_obj.implicit_parameters()
+    if isinstance(callable_obj, bases.BoundMethod):
+        # Bound methods have an extra implicit 'self' argument.
+        return callable_obj, parameters, callable_obj.type
+    if isinstance(callable_obj, bases.UnboundMethod):
+        return callable_obj, parameters, "unbound method"
+    if isinstance(callable_obj, nodes.FunctionDef):
+        return callable_obj, parameters, callable_obj.type
+    if isinstance(callable_obj, nodes.Lambda):
+        return callable_obj, parameters, "lambda"
+    if isinstance(callable_obj, nodes.ClassDef):
+        # Class instantiation, lookup __new__ instead.
+        # If we only find object.__new__, we can safely check __init__
+        # instead. If __new__ belongs to builtins, then we look
+        # again for __init__ in the locals, since we won't have
+        # argument information for the builtin __new__ function.
+        try:
+            # Use the last definition of __new__.
+            new = callable_obj.local_attr("__new__")[-1]
+        except astroid.NotFoundError:
+            new = None
+
+        from_object = new and new.parent.scope().name == "object"
+        from_builtins = new and new.root().name in sys.builtin_module_names
+
+        if not new or from_object or from_builtins:
+            try:
+                # Use the last definition of __init__.
+                callable_obj = callable_obj.local_attr("__init__")[-1]
+            except astroid.NotFoundError as e:
+                raise ValueError from e
+        else:
+            callable_obj = new
+
+        if not isinstance(callable_obj, nodes.FunctionDef):
+            raise ValueError
+        # both have an extra implicit 'cls'/'self' argument.
+        return callable_obj, parameters, "constructor"
+
+    raise ValueError
+
+
+def _has_parent_of_type(
+    node: nodes.Call,
+    node_type: nodes.Keyword | nodes.Starred,
+    statement: _base_nodes.Statement,
+) -> bool:
     """Check if the given node has a parent of the given type."""
-    pass
+    parent = node.parent
+    while not isinstance(parent, node_type) and statement.parent_of(parent):
+        parent = parent.parent
+    return isinstance(parent, node_type)


-def _no_context_variadic(node: nodes.Call, variadic_name: (str | None),
-    variadic_type: (nodes.Keyword | nodes.Starred), variadics: list[nodes.
-    Keyword | nodes.Starred]) ->bool:
+def _no_context_variadic_keywords(node: nodes.Call, scope: nodes.Lambda) -> bool:
+    statement = node.statement()
+    variadics = []
+
+    if (
+        isinstance(scope, nodes.Lambda)
+        and not isinstance(scope, nodes.FunctionDef)
+        or isinstance(statement, nodes.With)
+    ):
+        variadics = list(node.keywords or []) + node.kwargs
+    elif isinstance(statement, (nodes.Return, nodes.Expr, nodes.Assign)) and isinstance(
+        statement.value, nodes.Call
+    ):
+        call = statement.value
+        variadics = list(call.keywords or []) + call.kwargs
+
+    return _no_context_variadic(node, scope.args.kwarg, nodes.Keyword, variadics)
+
+
+def _no_context_variadic_positional(node: nodes.Call, scope: nodes.Lambda) -> bool:
+    variadics = node.starargs + node.kwargs
+    return _no_context_variadic(node, scope.args.vararg, nodes.Starred, variadics)
+
+
+def _no_context_variadic(
+    node: nodes.Call,
+    variadic_name: str | None,
+    variadic_type: nodes.Keyword | nodes.Starred,
+    variadics: list[nodes.Keyword | nodes.Starred],
+) -> bool:
     """Verify if the given call node has variadic nodes without context.

     This is a workaround for handling cases of nested call functions
@@ -208,11 +710,54 @@ def _no_context_variadic(node: nodes.Call, variadic_name: (str | None),
     This can lead pylint to believe that a function call receives
     too few arguments.
     """
-    pass
+    scope = node.scope()
+    is_in_lambda_scope = not isinstance(scope, nodes.FunctionDef) and isinstance(
+        scope, nodes.Lambda
+    )
+    statement = node.statement()
+    for name in statement.nodes_of_class(nodes.Name):
+        if name.name != variadic_name:
+            continue

+        inferred = safe_infer(name)
+        if isinstance(inferred, (nodes.List, nodes.Tuple)):
+            length = len(inferred.elts)
+        elif isinstance(inferred, nodes.Dict):
+            length = len(inferred.items)
+        else:
+            continue

-def _infer_from_metaclass_constructor(cls: nodes.ClassDef, func: nodes.
-    FunctionDef) ->(InferenceResult | None):
+        if is_in_lambda_scope and isinstance(inferred.parent, nodes.Arguments):
+            # The statement of the variadic will be the assignment itself,
+            # so we need to go the lambda instead
+            inferred_statement = inferred.parent.parent
+        else:
+            inferred_statement = inferred.statement()
+
+        if not length and isinstance(
+            inferred_statement, (nodes.Lambda, nodes.FunctionDef)
+        ):
+            is_in_starred_context = _has_parent_of_type(node, variadic_type, statement)
+            used_as_starred_argument = any(
+                variadic.value == name or variadic.value.parent_of(name)
+                for variadic in variadics
+            )
+            if is_in_starred_context or used_as_starred_argument:
+                return True
+    return False
+
+
+def _is_invalid_metaclass(metaclass: nodes.ClassDef) -> bool:
+    try:
+        mro = metaclass.mro()
+    except (astroid.DuplicateBasesError, astroid.InconsistentMroError):
+        return True
+    return not any(is_builtin_object(cls) and cls.name == "type" for cls in mro)
+
+
+def _infer_from_metaclass_constructor(
+    cls: nodes.ClassDef, func: nodes.FunctionDef
+) -> InferenceResult | None:
     """Try to infer what the given *func* constructor is building.

     :param astroid.FunctionDef func:
@@ -229,60 +774,298 @@ def _infer_from_metaclass_constructor(cls: nodes.ClassDef, func: nodes.
         if we couldn't infer it.
     :rtype: astroid.ClassDef
     """
-    pass
+    context = astroid.context.InferenceContext()
+
+    class_bases = nodes.List()
+    class_bases.postinit(elts=cls.bases)
+
+    attrs = nodes.Dict(
+        lineno=0, col_offset=0, parent=None, end_lineno=0, end_col_offset=0
+    )
+    local_names = [(name, values[-1]) for name, values in cls.locals.items()]
+    attrs.postinit(local_names)
+
+    builder_args = nodes.Tuple()
+    builder_args.postinit([cls.name, class_bases, attrs])
+
+    context.callcontext = astroid.context.CallContext(builder_args)
+    try:
+        inferred = next(func.infer_call_result(func, context), None)
+    except astroid.InferenceError:
+        return None
+    return inferred or None
+
+
+def _is_c_extension(module_node: InferenceResult) -> bool:
+    return (
+        isinstance(module_node, nodes.Module)
+        and not astroid.modutils.is_stdlib_module(module_node.name)
+        and not module_node.fully_defined()
+    )
+
+
+def _is_invalid_isinstance_type(arg: nodes.NodeNG) -> bool:
+    # Return True if we are sure that arg is not a type
+    if PY310_PLUS and isinstance(arg, nodes.BinOp) and arg.op == "|":
+        return any(
+            _is_invalid_isinstance_type(elt) and not is_none(elt)
+            for elt in (arg.left, arg.right)
+        )
+    inferred = utils.safe_infer(arg)
+    if not inferred:
+        # Cannot infer it so skip it.
+        return False
+    if isinstance(inferred, nodes.Tuple):
+        return any(_is_invalid_isinstance_type(elt) for elt in inferred.elts)
+    if isinstance(inferred, nodes.ClassDef):
+        return False
+    if isinstance(inferred, astroid.Instance) and inferred.qname() == BUILTIN_TUPLE:
+        return False
+    if PY310_PLUS and isinstance(inferred, bases.UnionType):
+        return any(
+            _is_invalid_isinstance_type(elt) and not is_none(elt)
+            for elt in (inferred.left, inferred.right)
+        )
+    return True


 class TypeChecker(BaseChecker):
     """Try to find bugs in the code using type inference."""
-    name = 'typecheck'
+
+    # configuration section name
+    name = "typecheck"
+    # messages
     msgs = MSGS
-    options = ('ignore-on-opaque-inference', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'This flag controls whether pylint should warn about no-member and similar checks whenever an opaque object is returned when inferring. The inference can return multiple potential results while evaluating a Python object, but some branches might not be evaluated, which results in partial inference. In that case, it might be useful to still emit no-member and other checks for the rest of the inferred objects.'
-        }), ('mixin-class-rgx', {'default': '.*[Mm]ixin', 'type': 'regexp',
-        'metavar': '<regexp>', 'help':
-        'Regex pattern to define which classes are considered mixins.'}), (
-        'ignore-mixin-members', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Tells whether missing members accessed in mixin class should be ignored. A class is considered mixin if its name matches the mixin-class-rgx option.'
-        , 'kwargs': {'new_names': ['ignore-checks-for-mixin']}}), (
-        'ignored-checks-for-mixins', {'default': ['no-member',
-        'not-async-context-manager', 'not-context-manager',
-        'attribute-defined-outside-init'], 'type': 'csv', 'metavar':
-        '<list of messages names>', 'help':
-        'List of symbolic message names to ignore for Mixin members.'}), (
-        'ignore-none', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Tells whether to warn about missing members when the owner of the attribute is inferred to be None.'
-        }), ('ignored-classes', {'default': ('optparse.Values',
-        'thread._local', '_thread._local', 'argparse.Namespace'), 'type':
-        'csv', 'metavar': '<members names>', 'help':
-        'List of class names for which member attributes should not be checked (useful for classes with dynamically set attributes). This supports the use of qualified names.'
-        }), ('generated-members', {'default': (), 'type': 'string',
-        'metavar': '<members names>', 'help':
-        "List of members which are set dynamically and missed by pylint inference system, and so shouldn't trigger E1101 when accessed. Python regular expressions are accepted."
-        }), ('contextmanager-decorators', {'default': [
-        'contextlib.contextmanager'], 'type': 'csv', 'metavar':
-        '<decorator names>', 'help':
-        'List of decorators that produce context managers, such as contextlib.contextmanager. Add to this list to register other decorators that produce valid context managers.'
-        }), ('missing-member-hint-distance', {'default': 1, 'type': 'int',
-        'metavar': '<member hint edit distance>', 'help':
-        'The minimum edit distance a name should have in order to be considered a similar match for a missing member name.'
-        }), ('missing-member-max-choices', {'default': 1, 'type': 'int',
-        'metavar': '<member hint max choices>', 'help':
-        'The total number of similar names that should be taken in consideration when showing a hint for a missing member.'
-        }), ('missing-member-hint', {'default': True, 'type': 'yn',
-        'metavar': '<missing member hint>', 'help':
-        'Show a hint with possible names when a member name was not found. The aspect of finding the hint is based on edit distance.'
-        }), ('signature-mutators', {'default': [], 'type': 'csv', 'metavar':
-        '<decorator names>', 'help':
-        'List of decorators that change the signature of a decorated function.'
-        })
+    # configuration options
+    options = (
+        (
+            "ignore-on-opaque-inference",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "This flag controls whether pylint should warn about "
+                "no-member and similar checks whenever an opaque object "
+                "is returned when inferring. The inference can return "
+                "multiple potential results while evaluating a Python object, "
+                "but some branches might not be evaluated, which results in "
+                "partial inference. In that case, it might be useful to still emit "
+                "no-member and other checks for the rest of the inferred objects.",
+            },
+        ),
+        (
+            "mixin-class-rgx",
+            {
+                "default": ".*[Mm]ixin",
+                "type": "regexp",
+                "metavar": "<regexp>",
+                "help": "Regex pattern to define which classes are considered mixins.",
+            },
+        ),
+        (
+            "ignore-mixin-members",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Tells whether missing members accessed in mixin "
+                "class should be ignored. A class is considered mixin if its name matches "
+                "the mixin-class-rgx option.",
+                "kwargs": {"new_names": ["ignore-checks-for-mixin"]},
+            },
+        ),
+        (
+            "ignored-checks-for-mixins",
+            {
+                "default": [
+                    "no-member",
+                    "not-async-context-manager",
+                    "not-context-manager",
+                    "attribute-defined-outside-init",
+                ],
+                "type": "csv",
+                "metavar": "<list of messages names>",
+                "help": "List of symbolic message names to ignore for Mixin members.",
+            },
+        ),
+        (
+            "ignore-none",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Tells whether to warn about missing members when the owner "
+                "of the attribute is inferred to be None.",
+            },
+        ),
+        # the defaults here are *stdlib* names that (almost) always
+        # lead to false positives, since their idiomatic use is
+        # 'too dynamic' for pylint to grok.
+        (
+            "ignored-classes",
+            {
+                "default": (
+                    "optparse.Values",
+                    "thread._local",
+                    "_thread._local",
+                    "argparse.Namespace",
+                ),
+                "type": "csv",
+                "metavar": "<members names>",
+                "help": "List of class names for which member attributes "
+                "should not be checked (useful for classes with "
+                "dynamically set attributes). This supports "
+                "the use of qualified names.",
+            },
+        ),
+        (
+            "generated-members",
+            {
+                "default": (),
+                "type": "string",
+                "metavar": "<members names>",
+                "help": "List of members which are set dynamically and \
+missed by pylint inference system, and so shouldn't trigger E1101 when \
+accessed. Python regular expressions are accepted.",
+            },
+        ),
+        (
+            "contextmanager-decorators",
+            {
+                "default": ["contextlib.contextmanager"],
+                "type": "csv",
+                "metavar": "<decorator names>",
+                "help": "List of decorators that produce context managers, "
+                "such as contextlib.contextmanager. Add to this list "
+                "to register other decorators that produce valid "
+                "context managers.",
+            },
+        ),
+        (
+            "missing-member-hint-distance",
+            {
+                "default": 1,
+                "type": "int",
+                "metavar": "<member hint edit distance>",
+                "help": "The minimum edit distance a name should have in order "
+                "to be considered a similar match for a missing member name.",
+            },
+        ),
+        (
+            "missing-member-max-choices",
+            {
+                "default": 1,
+                "type": "int",
+                "metavar": "<member hint max choices>",
+                "help": "The total number of similar names that should be taken in "
+                "consideration when showing a hint for a missing member.",
+            },
+        ),
+        (
+            "missing-member-hint",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<missing member hint>",
+                "help": "Show a hint with possible names when a member name was not "
+                "found. The aspect of finding the hint is based on edit distance.",
+            },
+        ),
+        (
+            "signature-mutators",
+            {
+                "default": [],
+                "type": "csv",
+                "metavar": "<decorator names>",
+                "help": "List of decorators that change the signature of "
+                "a decorated function.",
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        py_version = self.linter.config.py_version
+        self._py310_plus = py_version >= (3, 10)
+        self._mixin_class_rgx = self.linter.config.mixin_class_rgx
+
+    @cached_property
+    def _suggestion_mode(self) -> bool:
+        return self.linter.config.suggestion_mode  # type: ignore[no-any-return]
+
+    @cached_property
+    def _compiled_generated_members(self) -> tuple[Pattern[str], ...]:
+        # do this lazily since config not fully initialized in __init__
+        # generated_members may contain regular expressions
+        # (surrounded by quote `"` and followed by a comma `,`)
+        # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' =>
+        # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
+        generated_members = self.linter.config.generated_members
+        if isinstance(generated_members, str):
+            gen = shlex.shlex(generated_members)
+            gen.whitespace += ","
+            gen.wordchars += r"[]-+\.*?()|"
+            generated_members = tuple(tok.strip('"') for tok in gen)
+        return tuple(re.compile(exp) for exp in generated_members)
+
+    @only_required_for_messages("keyword-arg-before-vararg")
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        # check for keyword arg before varargs.
+
+        if node.args.vararg and node.args.defaults:
+            # When `positional-only` parameters are present then only
+            # `positional-or-keyword` parameters are checked. I.e:
+            # >>> def name(pos_only_params, /, pos_or_keyword_params, *args): ...
+            if node.args.posonlyargs and not node.args.args:
+                return
+            self.add_message("keyword-arg-before-vararg", node=node, args=(node.name))
+
     visit_asyncfunctiondef = visit_functiondef

-    @only_required_for_messages('no-member', 'c-extension-no-member')
-    def visit_attribute(self, node: (nodes.Attribute | nodes.AssignAttr |
-        nodes.DelAttr)) ->None:
+    @only_required_for_messages("invalid-metaclass")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        def _metaclass_name(metaclass: InferenceResult) -> str | None:
+            # pylint: disable=unidiomatic-typecheck
+            if isinstance(metaclass, (nodes.ClassDef, nodes.FunctionDef)):
+                return metaclass.name  # type: ignore[no-any-return]
+            if type(metaclass) is bases.Instance:
+                # Really do mean type, not isinstance, since subclasses of bases.Instance
+                # like Const or Dict should use metaclass.as_string below.
+                return str(metaclass)
+            return metaclass.as_string()  # type: ignore[no-any-return]
+
+        metaclass = node.declared_metaclass()
+        if not metaclass:
+            return
+
+        if isinstance(metaclass, nodes.FunctionDef):
+            # Try to infer the result.
+            metaclass = _infer_from_metaclass_constructor(node, metaclass)
+            if not metaclass:
+                # Don't do anything if we cannot infer the result.
+                return
+
+        if isinstance(metaclass, nodes.ClassDef):
+            if _is_invalid_metaclass(metaclass):
+                self.add_message(
+                    "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+                )
+        else:
+            self.add_message(
+                "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+            )
+
+    def visit_assignattr(self, node: nodes.AssignAttr) -> None:
+        if isinstance(node.assign_type(), nodes.AugAssign):
+            self.visit_attribute(node)
+
+    def visit_delattr(self, node: nodes.DelAttr) -> None:
+        self.visit_attribute(node)
+
+    # pylint: disable = too-many-branches, too-many-statements
+    @only_required_for_messages("no-member", "c-extension-no-member")
+    def visit_attribute(
+        self, node: nodes.Attribute | nodes.AssignAttr | nodes.DelAttr
+    ) -> None:
         """Check that the accessed attribute exists.

         to avoid too much false positives for now, we'll consider the code as
@@ -290,75 +1073,908 @@ class TypeChecker(BaseChecker):

         function/method, super call and metaclasses are ignored
         """
-        pass
+        if any(
+            pattern.match(name)
+            for name in (node.attrname, node.as_string())
+            for pattern in self._compiled_generated_members
+        ):
+            return
+
+        if is_postponed_evaluation_enabled(node) and is_node_in_type_annotation_context(
+            node
+        ):
+            return
+
+        try:
+            inferred = list(node.expr.infer())
+        except astroid.InferenceError:
+            return
+
+        # list of (node, nodename) which are missing the attribute
+        missingattr: set[tuple[SuccessfulInferenceResult, str | None]] = set()
+
+        non_opaque_inference_results: list[SuccessfulInferenceResult] = [
+            owner
+            for owner in inferred
+            if not isinstance(owner, (nodes.Unknown, util.UninferableBase))
+        ]
+        if (
+            len(non_opaque_inference_results) != len(inferred)
+            and self.linter.config.ignore_on_opaque_inference
+        ):
+            # There is an ambiguity in the inference. Since we can't
+            # make sure that we won't emit a false positive, we just stop
+            # whenever the inference returns an opaque inference object.
+            return
+        for owner in non_opaque_inference_results:
+            name = getattr(owner, "name", None)
+            if _is_owner_ignored(
+                owner,
+                name,
+                self.linter.config.ignored_classes,
+                self.linter.config.ignored_modules,
+            ):
+                continue
+
+            qualname = f"{owner.pytype()}.{node.attrname}"
+            if any(
+                pattern.match(qualname) for pattern in self._compiled_generated_members
+            ):
+                return
+
+            try:
+                attr_nodes = owner.getattr(node.attrname)
+            except AttributeError:
+                continue
+            except astroid.DuplicateBasesError:
+                continue
+            except astroid.NotFoundError:
+                # Avoid false positive in case a decorator supplies member.
+                if (
+                    isinstance(owner, (astroid.FunctionDef, astroid.BoundMethod))
+                    and owner.decorators
+                ):
+                    continue
+                # This can't be moved before the actual .getattr call,
+                # because there can be more values inferred and we are
+                # stopping after the first one which has the attribute in question.
+                # The problem is that if the first one has the attribute,
+                # but we continue to the next values which doesn't have the
+                # attribute, then we'll have a false positive.
+                # So call this only after the call has been made.
+                if not _emit_no_member(
+                    node,
+                    owner,
+                    name,
+                    self._mixin_class_rgx,
+                    ignored_mixins=(
+                        "no-member" in self.linter.config.ignored_checks_for_mixins
+                    ),
+                    ignored_none=self.linter.config.ignore_none,
+                ):
+                    continue
+                missingattr.add((owner, name))
+                continue
+            else:
+                for attr_node in attr_nodes:
+                    attr_parent = attr_node.parent
+                    # Skip augmented assignments
+                    try:
+                        if isinstance(attr_node.statement(), nodes.AugAssign) or (
+                            isinstance(attr_parent, nodes.Assign)
+                            and utils.is_augmented_assign(attr_parent)[0]
+                        ):
+                            continue
+                    except astroid.exceptions.StatementMissing:
+                        break
+                    # Skip self-referencing assignments
+                    if attr_parent is node.parent:
+                        continue
+                    break
+                else:
+                    missingattr.add((owner, name))
+                    continue
+            # stop on the first found
+            break
+        else:
+            # we have not found any node with the attributes, display the
+            # message for inferred nodes
+            done = set()
+            for owner, name in missingattr:
+                if isinstance(owner, astroid.Instance):
+                    actual = owner._proxied
+                else:
+                    actual = owner
+                if actual in done:
+                    continue
+                done.add(actual)
+
+                msg, hint = self._get_nomember_msgid_hint(node, owner)
+                self.add_message(
+                    msg,
+                    node=node,
+                    args=(owner.display_type(), name, node.attrname, hint),
+                    confidence=INFERENCE,
+                )

-    @only_required_for_messages('assignment-from-no-return',
-        'assignment-from-none', 'non-str-assignment-to-dunder-name')
-    def visit_assign(self, node: nodes.Assign) ->None:
+    def _get_nomember_msgid_hint(
+        self,
+        node: nodes.Attribute | nodes.AssignAttr | nodes.DelAttr,
+        owner: SuccessfulInferenceResult,
+    ) -> tuple[Literal["c-extension-no-member", "no-member"], str]:
+        suggestions_are_possible = self._suggestion_mode and isinstance(
+            owner, nodes.Module
+        )
+        if suggestions_are_possible and _is_c_extension(owner):
+            msg = "c-extension-no-member"
+            hint = ""
+        else:
+            msg = "no-member"
+            if self.linter.config.missing_member_hint:
+                hint = _missing_member_hint(
+                    owner,
+                    node.attrname,
+                    self.linter.config.missing_member_hint_distance,
+                    self.linter.config.missing_member_max_choices,
+                )
+            else:
+                hint = ""
+        return msg, hint  # type: ignore[return-value]
+
+    @only_required_for_messages(
+        "assignment-from-no-return",
+        "assignment-from-none",
+        "non-str-assignment-to-dunder-name",
+    )
+    def visit_assign(self, node: nodes.Assign) -> None:
         """Process assignments in the AST."""
-        pass
+        self._check_assignment_from_function_call(node)
+        self._check_dundername_is_string(node)

-    def _check_assignment_from_function_call(self, node: nodes.Assign) ->None:
+    def _check_assignment_from_function_call(self, node: nodes.Assign) -> None:
         """When assigning to a function call, check that the function returns a valid
         value.
         """
-        pass
+        if not isinstance(node.value, nodes.Call):
+            return
+
+        function_node = safe_infer(node.value.func)
+        funcs = (nodes.FunctionDef, astroid.UnboundMethod, astroid.BoundMethod)
+        if not isinstance(function_node, funcs):
+            return
+
+        # Unwrap to get the actual function node object
+        if isinstance(function_node, astroid.BoundMethod) and isinstance(
+            function_node._proxied, astroid.UnboundMethod
+        ):
+            function_node = function_node._proxied._proxied
+
+        # Make sure that it's a valid function that we can analyze.
+        # Ordered from less expensive to more expensive checks.
+        if (
+            not function_node.is_function
+            or function_node.decorators
+            or self._is_ignored_function(function_node)
+        ):
+            return
+
+        # Handle builtins such as list.sort() or dict.update()
+        if self._is_builtin_no_return(node):
+            self.add_message(
+                "assignment-from-no-return", node=node, confidence=INFERENCE
+            )
+            return
+
+        if not function_node.root().fully_defined():
+            return
+
+        return_nodes = list(
+            function_node.nodes_of_class(nodes.Return, skip_klass=nodes.FunctionDef)
+        )
+        if not return_nodes:
+            self.add_message("assignment-from-no-return", node=node)
+        else:
+            for ret_node in return_nodes:
+                if not (
+                    isinstance(ret_node.value, nodes.Const)
+                    and ret_node.value.value is None
+                    or ret_node.value is None
+                ):
+                    break
+            else:
+                self.add_message("assignment-from-none", node=node)

-    def _check_dundername_is_string(self, node: nodes.Assign) ->None:
+    @staticmethod
+    def _is_ignored_function(
+        function_node: nodes.FunctionDef | bases.UnboundMethod,
+    ) -> bool:
+        return (
+            isinstance(function_node, nodes.AsyncFunctionDef)
+            or utils.is_error(function_node)
+            or function_node.is_generator()
+            or function_node.is_abstract(pass_is_abstract=False)
+        )
+
+    @staticmethod
+    def _is_builtin_no_return(node: nodes.Assign) -> bool:
+        return (
+            isinstance(node.value, nodes.Call)
+            and isinstance(node.value.func, nodes.Attribute)
+            and bool(inferred := utils.safe_infer(node.value.func.expr))
+            and isinstance(inferred, bases.Instance)
+            and node.value.func.attrname
+            in BUILTINS_IMPLICIT_RETURN_NONE.get(inferred.pytype(), ())
+        )
+
+    def _check_dundername_is_string(self, node: nodes.Assign) -> None:
         """Check a string is assigned to self.__name__."""
-        pass
+        # Check the left-hand side of the assignment is <something>.__name__
+        lhs = node.targets[0]
+        if not isinstance(lhs, nodes.AssignAttr):
+            return
+        if not lhs.attrname == "__name__":
+            return
+
+        # If the right-hand side is not a string
+        rhs = node.value
+        if isinstance(rhs, nodes.Const) and isinstance(rhs.value, str):
+            return
+        inferred = utils.safe_infer(rhs)
+        if not inferred:
+            return
+        if not (isinstance(inferred, nodes.Const) and isinstance(inferred.value, str)):
+            # Add the message
+            self.add_message("non-str-assignment-to-dunder-name", node=node)

-    def _check_uninferable_call(self, node: nodes.Call) ->None:
+    def _check_uninferable_call(self, node: nodes.Call) -> None:
         """Check that the given uninferable Call node does not
         call an actual function.
         """
-        pass
+        if not isinstance(node.func, nodes.Attribute):
+            return
+
+        # Look for properties. First, obtain
+        # the lhs of the Attribute node and search the attribute
+        # there. If that attribute is a property or a subclass of properties,
+        # then most likely it's not callable.

-    def _check_argument_order(self, node: nodes.Call, call_site: arguments.
-        CallSite, called: CallableObjects, called_param_names: list[str | None]
-        ) ->None:
+        expr = node.func.expr
+        klass = safe_infer(expr)
+        if not isinstance(klass, astroid.Instance):
+            return
+
+        try:
+            attrs = klass._proxied.getattr(node.func.attrname)
+        except astroid.NotFoundError:
+            return
+
+        for attr in attrs:
+            if not isinstance(attr, nodes.FunctionDef):
+                continue
+
+            # Decorated, see if it is decorated with a property.
+            # Also, check the returns and see if they are callable.
+            if decorated_with_property(attr):
+                try:
+                    call_results = list(attr.infer_call_result(node))
+                except astroid.InferenceError:
+                    continue
+
+                if all(
+                    isinstance(return_node, util.UninferableBase)
+                    for return_node in call_results
+                ):
+                    # We were unable to infer return values of the call, skipping
+                    continue
+
+                if any(return_node.callable() for return_node in call_results):
+                    # Only raise this issue if *all* the inferred values are not callable
+                    continue
+
+                self.add_message("not-callable", node=node, args=node.func.as_string())
+
+    def _check_argument_order(
+        self,
+        node: nodes.Call,
+        call_site: arguments.CallSite,
+        called: CallableObjects,
+        called_param_names: list[str | None],
+    ) -> None:
         """Match the supplied argument names against the function parameters.

         Warn if some argument names are not in the same order as they are in
         the function signature.
         """
-        pass
+        # Check for called function being an object instance function
+        # If so, ignore the initial 'self' argument in the signature
+        try:
+            is_classdef = isinstance(called.parent, nodes.ClassDef)
+            if is_classdef and called_param_names[0] == "self":
+                called_param_names = called_param_names[1:]
+        except IndexError:
+            return

-    def visit_call(self, node: nodes.Call) ->None:
+        try:
+            # extract argument names, if they have names
+            calling_parg_names = [p.name for p in call_site.positional_arguments]
+
+            # Additionally, get names of keyword arguments to use in a full match
+            # against parameters
+            calling_kwarg_names = [
+                arg.name for arg in call_site.keyword_arguments.values()
+            ]
+        except AttributeError:
+            # the type of arg does not provide a `.name`. In this case we
+            # stop checking for out-of-order arguments because it is only relevant
+            # for named variables.
+            return
+
+        # Don't check for ordering if there is an unmatched arg or param
+        arg_set = set(calling_parg_names) | set(calling_kwarg_names)
+        param_set = set(called_param_names)
+        if arg_set != param_set:
+            return
+
+        # Warn based on the equality of argument ordering
+        if calling_parg_names != called_param_names[: len(calling_parg_names)]:
+            self.add_message("arguments-out-of-order", node=node, args=())
+
+    def _check_isinstance_args(self, node: nodes.Call) -> None:
+        if len(node.args) != 2:
+            # isinstance called with wrong number of args
+            return
+
+        second_arg = node.args[1]
+        if _is_invalid_isinstance_type(second_arg):
+            self.add_message(
+                "isinstance-second-argument-not-valid-type",
+                node=node,
+                confidence=INFERENCE,
+            )
+
+    # pylint: disable = too-many-branches, too-many-locals, too-many-statements
+    def visit_call(self, node: nodes.Call) -> None:
         """Check that called functions/methods are inferred to callable objects,
         and that passed arguments match the parameters in the inferred function.
         """
-        pass
+        called = safe_infer(node.func, compare_constructors=True)
+
+        self._check_not_callable(node, called)
+
+        try:
+            called, implicit_args, callable_name = _determine_callable(called)
+        except ValueError:
+            # Any error occurred during determining the function type, most of
+            # those errors are handled by different warnings.
+            return
+
+        if called.args.args is None:
+            if called.name == "isinstance":
+                # Verify whether second argument of isinstance is a valid type
+                self._check_isinstance_args(node)
+            # Built-in functions have no argument information.
+            return
+
+        if len(called.argnames()) != len(set(called.argnames())):
+            # Duplicate parameter name (see duplicate-argument).  We can't really
+            # make sense of the function call in this case, so just return.
+            return
+
+        # Build the set of keyword arguments, checking for duplicate keywords,
+        # and count the positional arguments.
+        call_site = astroid.arguments.CallSite.from_call(node)
+
+        # Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
+        for keyword in call_site.duplicated_keywords:
+            self.add_message("repeated-keyword", node=node, args=(keyword,))
+
+        if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
+            # Can't make sense of this.
+            return
+
+        # Has the function signature changed in ways we cannot reliably detect?
+        if hasattr(called, "decorators") and decorated_with(
+            called, self.linter.config.signature_mutators
+        ):
+            return
+
+        num_positional_args = len(call_site.positional_arguments)
+        keyword_args = list(call_site.keyword_arguments.keys())
+        overload_function = is_overload_stub(called)
+
+        # Determine if we don't have a context for our call and we use variadics.
+        node_scope = node.scope()
+        if isinstance(node_scope, (nodes.Lambda, nodes.FunctionDef)):
+            has_no_context_positional_variadic = _no_context_variadic_positional(
+                node, node_scope
+            )
+            has_no_context_keywords_variadic = _no_context_variadic_keywords(
+                node, node_scope
+            )
+        else:
+            has_no_context_positional_variadic = has_no_context_keywords_variadic = (
+                False
+            )
+
+        # These are coming from the functools.partial implementation in astroid
+        already_filled_positionals = getattr(called, "filled_positionals", 0)
+        already_filled_keywords = getattr(called, "filled_keywords", {})
+
+        keyword_args += list(already_filled_keywords)
+        num_positional_args += implicit_args + already_filled_positionals
+
+        # Decrement `num_positional_args` by 1 when a function call is assigned to a class attribute
+        # inside the class where the function is defined.
+        # This avoids emitting `too-many-function-args` since `num_positional_args`
+        # includes an implicit `self` argument which is not present in `called.args`.
+        if (
+            isinstance(node.frame(), nodes.ClassDef)
+            and isinstance(called, nodes.FunctionDef)
+            and called in node.frame().body
+            and num_positional_args > 0
+            and "builtins.staticmethod" not in called.decoratornames()
+        ):
+            num_positional_args -= 1
+
+        # Analyze the list of formal parameters.
+        args = list(itertools.chain(called.args.posonlyargs or (), called.args.args))
+        num_mandatory_parameters = len(args) - len(called.args.defaults)
+        parameters: list[tuple[tuple[str | None, nodes.NodeNG | None], bool]] = []
+        parameter_name_to_index = {}
+        for i, arg in enumerate(args):
+            name = arg.name
+            parameter_name_to_index[name] = i
+            if i >= num_mandatory_parameters:
+                defval = called.args.defaults[i - num_mandatory_parameters]
+            else:
+                defval = None
+            parameters.append(((name, defval), False))
+
+        kwparams = {}
+        for i, arg in enumerate(called.args.kwonlyargs):
+            if isinstance(arg, nodes.Keyword):
+                name = arg.arg
+            else:
+                assert isinstance(arg, nodes.AssignName)
+                name = arg.name
+            kwparams[name] = [called.args.kw_defaults[i], False]
+
+        self._check_argument_order(
+            node, call_site, called, [p[0][0] for p in parameters]
+        )
+
+        # 1. Match the positional arguments.
+        for i in range(num_positional_args):
+            if i < len(parameters):
+                parameters[i] = (parameters[i][0], True)
+            elif called.args.vararg is not None:
+                # The remaining positional arguments get assigned to the *args
+                # parameter.
+                break
+            elif not overload_function:
+                # Too many positional arguments.
+                self.add_message(
+                    "too-many-function-args", node=node, args=(callable_name,)
+                )
+                break
+
+        # 2. Match the keyword arguments.
+        for keyword in keyword_args:
+            # Skip if `keyword` is the same name as a positional-only parameter
+            # and a `**kwargs` parameter exists.
+            if called.args.kwarg and keyword in [
+                arg.name for arg in called.args.posonlyargs
+            ]:
+                self.add_message(
+                    "kwarg-superseded-by-positional-arg",
+                    node=node,
+                    args=(keyword, f"**{called.args.kwarg}"),
+                    confidence=HIGH,
+                )
+                continue
+            if keyword in parameter_name_to_index:
+                i = parameter_name_to_index[keyword]
+                if parameters[i][1]:
+                    # Duplicate definition of function parameter.
+
+                    # Might be too hard-coded, but this can actually
+                    # happen when using str.format and `self` is passed
+                    # by keyword argument, as in `.format(self=self)`.
+                    # It's perfectly valid to so, so we're just skipping
+                    # it if that's the case.
+                    if not (keyword == "self" and called.qname() in STR_FORMAT):
+                        self.add_message(
+                            "redundant-keyword-arg",
+                            node=node,
+                            args=(keyword, callable_name),
+                        )
+                else:
+                    parameters[i] = (parameters[i][0], True)
+            elif keyword in kwparams:
+                if kwparams[keyword][1]:
+                    # Duplicate definition of function parameter.
+                    self.add_message(
+                        "redundant-keyword-arg",
+                        node=node,
+                        args=(keyword, callable_name),
+                    )
+                else:
+                    kwparams[keyword][1] = True
+            elif called.args.kwarg is not None:
+                # The keyword argument gets assigned to the **kwargs parameter.
+                pass
+            elif isinstance(
+                called, nodes.FunctionDef
+            ) and self._keyword_argument_is_in_all_decorator_returns(called, keyword):
+                pass
+            elif not overload_function:
+                # Unexpected keyword argument.
+                self.add_message(
+                    "unexpected-keyword-arg", node=node, args=(keyword, callable_name)
+                )
+
+        # 3. Match the **kwargs, if any.
+        if node.kwargs:
+            for i, [(name, _defval), _assigned] in enumerate(parameters):
+                # Assume that *kwargs provides values for all remaining
+                # unassigned named parameters.
+                if name is not None:
+                    parameters[i] = (parameters[i][0], True)
+                else:
+                    # **kwargs can't assign to tuples.
+                    pass
+
+        # Check that any parameters without a default have been assigned
+        # values.
+        for [(name, defval), assigned] in parameters:
+            if (defval is None) and not assigned:
+                display_name = "<tuple>" if name is None else repr(name)
+                if not has_no_context_positional_variadic and not overload_function:
+                    self.add_message(
+                        "no-value-for-parameter",
+                        node=node,
+                        args=(display_name, callable_name),
+                    )
+
+        for name, val in kwparams.items():
+            defval, assigned = val
+            if (
+                defval is None
+                and not assigned
+                and not has_no_context_keywords_variadic
+                and not overload_function
+            ):
+                self.add_message(
+                    "missing-kwoa",
+                    node=node,
+                    args=(name, callable_name),
+                    confidence=INFERENCE,
+                )

     @staticmethod
-    def _keyword_argument_is_in_all_decorator_returns(func: nodes.
-        FunctionDef, keyword: str) ->bool:
+    def _keyword_argument_is_in_all_decorator_returns(
+        func: nodes.FunctionDef, keyword: str
+    ) -> bool:
         """Check if the keyword argument exists in all signatures of the
         return values of all decorators of the function.
         """
-        pass
+        if not func.decorators:
+            return False
+
+        for decorator in func.decorators.nodes:
+            inferred = safe_infer(decorator)
+
+            # If we can't infer the decorator we assume it satisfies consumes
+            # the keyword, so we don't raise false positives
+            if not inferred:
+                return True
+
+            # We only check arguments of function decorators
+            if not isinstance(inferred, nodes.FunctionDef):
+                return False
+
+            for return_value in inferred.infer_call_result(caller=None):
+                # infer_call_result() returns nodes.Const.None for None return values
+                # so this also catches non-returning decorators
+                if not isinstance(return_value, nodes.FunctionDef):
+                    return False
+
+                # If the return value uses a kwarg the keyword will be consumed
+                if return_value.args.kwarg:
+                    continue
+
+                # Check if the keyword is another type of argument
+                if return_value.args.is_argument(keyword):
+                    continue

-    def _check_not_callable(self, node: nodes.Call, inferred_call: (nodes.
-        NodeNG | None)) ->None:
+                return False
+
+        return True
+
+    def _check_invalid_sequence_index(self, subscript: nodes.Subscript) -> None:
+        # Look for index operations where the parent is a sequence type.
+        # If the types can be determined, only allow indices to be int,
+        # slice or instances with __index__.
+        parent_type = safe_infer(subscript.value)
+        if not isinstance(
+            parent_type, (nodes.ClassDef, astroid.Instance)
+        ) or not has_known_bases(parent_type):
+            return None
+
+        # Determine what method on the parent this index will use
+        # The parent of this node will be a Subscript, and the parent of that
+        # node determines if the Subscript is a get, set, or delete operation.
+        if subscript.ctx is astroid.Context.Store:
+            methodname = "__setitem__"
+        elif subscript.ctx is astroid.Context.Del:
+            methodname = "__delitem__"
+        else:
+            methodname = "__getitem__"
+
+        # Check if this instance's __getitem__, __setitem__, or __delitem__, as
+        # appropriate to the statement, is implemented in a builtin sequence
+        # type. This way we catch subclasses of sequence types but skip classes
+        # that override __getitem__ and which may allow non-integer indices.
+        try:
+            methods = astroid.interpreter.dunder_lookup.lookup(parent_type, methodname)
+            if isinstance(methods, util.UninferableBase):
+                return None
+            itemmethod = methods[0]
+        except (
+            astroid.AttributeInferenceError,
+            IndexError,
+        ):
+            return None
+        if (
+            not isinstance(itemmethod, nodes.FunctionDef)
+            or itemmethod.root().name != "builtins"
+            or not itemmethod.parent
+            or itemmethod.parent.frame().name not in SEQUENCE_TYPES
+        ):
+            return None
+
+        index_type = safe_infer(subscript.slice)
+        if index_type is None or isinstance(index_type, util.UninferableBase):
+            return None
+        # Constants must be of type int
+        if isinstance(index_type, nodes.Const):
+            if isinstance(index_type.value, int):
+                return None
+        # Instance values must be int, slice, or have an __index__ method
+        elif isinstance(index_type, astroid.Instance):
+            if index_type.pytype() in {"builtins.int", "builtins.slice"}:
+                return None
+            try:
+                index_type.getattr("__index__")
+                return None
+            except astroid.NotFoundError:
+                pass
+        elif isinstance(index_type, nodes.Slice):
+            # A slice can be present
+            # here after inferring the index node, which could
+            # be a `slice(...)` call for instance.
+            return self._check_invalid_slice_index(index_type)
+
+        # Anything else is an error
+        self.add_message("invalid-sequence-index", node=subscript)
+        return None
+
+    def _check_not_callable(
+        self, node: nodes.Call, inferred_call: nodes.NodeNG | None
+    ) -> None:
         """Checks to see if the not-callable message should be emitted.

         Only functions, generators and objects defining __call__ are "callable"
         We ignore instances of descriptors since astroid cannot properly handle them yet
         """
-        pass
+        # Handle uninferable calls
+        if not inferred_call or inferred_call.callable():
+            self._check_uninferable_call(node)
+            return
+
+        if not isinstance(inferred_call, astroid.Instance):
+            self.add_message("not-callable", node=node, args=node.func.as_string())
+            return
+
+        # Don't emit if we can't make sure this object is callable.
+        if not has_known_bases(inferred_call):
+            return
+
+        if inferred_call.parent and isinstance(inferred_call.scope(), nodes.ClassDef):
+            # Ignore descriptor instances
+            if "__get__" in inferred_call.locals:
+                return
+            # NamedTuple instances are callable
+            if inferred_call.qname() == "typing.NamedTuple":
+                return
+
+        self.add_message("not-callable", node=node, args=node.func.as_string())
+
+    def _check_invalid_slice_index(self, node: nodes.Slice) -> None:
+        # Check the type of each part of the slice
+        invalid_slices_nodes: list[nodes.NodeNG] = []
+        for index in (node.lower, node.upper, node.step):
+            if index is None:
+                continue
+
+            index_type = safe_infer(index)
+            if index_type is None or isinstance(index_type, util.UninferableBase):
+                continue

-    @only_required_for_messages('invalid-unary-operand-type')
-    def visit_unaryop(self, node: nodes.UnaryOp) ->None:
+            # Constants must be of type int or None
+            if isinstance(index_type, nodes.Const):
+                if isinstance(index_type.value, (int, type(None))):
+                    continue
+            # Instance values must be of type int, None or an object
+            # with __index__
+            elif isinstance(index_type, astroid.Instance):
+                if index_type.pytype() in {"builtins.int", "builtins.NoneType"}:
+                    continue
+
+                try:
+                    index_type.getattr("__index__")
+                    return
+                except astroid.NotFoundError:
+                    pass
+            invalid_slices_nodes.append(index)
+
+        invalid_slice_step = (
+            node.step and isinstance(node.step, nodes.Const) and node.step.value == 0
+        )
+
+        if not (invalid_slices_nodes or invalid_slice_step):
+            return
+
+        # Anything else is an error, unless the object that is indexed
+        # is a custom object, which knows how to handle this kind of slices
+        parent = node.parent
+        if isinstance(parent, nodes.Subscript):
+            inferred = safe_infer(parent.value)
+            if inferred is None or isinstance(inferred, util.UninferableBase):
+                # Don't know what this is
+                return
+            known_objects = (
+                nodes.List,
+                nodes.Dict,
+                nodes.Tuple,
+                astroid.objects.FrozenSet,
+                nodes.Set,
+            )
+            if not (
+                isinstance(inferred, known_objects)
+                or isinstance(inferred, nodes.Const)
+                and inferred.pytype() in {"builtins.str", "builtins.bytes"}
+                or isinstance(inferred, astroid.bases.Instance)
+                and inferred.pytype() == "builtins.range"
+            ):
+                # Might be an instance that knows how to handle this slice object
+                return
+        for snode in invalid_slices_nodes:
+            self.add_message("invalid-slice-index", node=snode)
+        if invalid_slice_step:
+            self.add_message("invalid-slice-step", node=node.step, confidence=HIGH)
+
+    @only_required_for_messages("not-context-manager")
+    def visit_with(self, node: nodes.With) -> None:
+        for ctx_mgr, _ in node.items:
+            context = astroid.context.InferenceContext()
+            inferred = safe_infer(ctx_mgr, context=context)
+            if inferred is None or isinstance(inferred, util.UninferableBase):
+                continue
+
+            if isinstance(inferred, astroid.bases.Generator):
+                # Check if we are dealing with a function decorated
+                # with contextlib.contextmanager.
+                if decorated_with(
+                    inferred.parent, self.linter.config.contextmanager_decorators
+                ):
+                    continue
+                # If the parent of the generator is not the context manager itself,
+                # that means that it could have been returned from another
+                # function which was the real context manager.
+                # The following approach is more of a hack rather than a real
+                # solution: walk all the inferred statements for the
+                # given *ctx_mgr* and if you find one function scope
+                # which is decorated, consider it to be the real
+                # manager and give up, otherwise emit not-context-manager.
+                # See the test file for not_context_manager for a couple
+                # of self explaining tests.
+
+                # Retrieve node from all previously visited nodes in the
+                # inference history
+                for inferred_path, _ in context.path:
+                    if not inferred_path:
+                        continue
+                    if isinstance(inferred_path, nodes.Call):
+                        scope = safe_infer(inferred_path.func)
+                    else:
+                        scope = inferred_path.scope()
+                    if not isinstance(scope, nodes.FunctionDef):
+                        continue
+                    if decorated_with(
+                        scope, self.linter.config.contextmanager_decorators
+                    ):
+                        break
+                else:
+                    self.add_message(
+                        "not-context-manager", node=node, args=(inferred.name,)
+                    )
+            else:
+                try:
+                    inferred.getattr("__enter__")
+                    inferred.getattr("__exit__")
+                except astroid.NotFoundError:
+                    if isinstance(inferred, astroid.Instance):
+                        # If we do not know the bases of this class,
+                        # just skip it.
+                        if not has_known_bases(inferred):
+                            continue
+                        # Just ignore mixin classes.
+                        if (
+                            "not-context-manager"
+                            in self.linter.config.ignored_checks_for_mixins
+                        ):
+                            if inferred.name[-5:].lower() == "mixin":
+                                continue
+
+                    self.add_message(
+                        "not-context-manager", node=node, args=(inferred.name,)
+                    )
+
+    @only_required_for_messages("invalid-unary-operand-type")
+    def visit_unaryop(self, node: nodes.UnaryOp) -> None:
         """Detect TypeErrors for unary operands."""
-        pass
+        for error in node.type_errors():
+            # Let the error customize its output.
+            self.add_message("invalid-unary-operand-type", args=str(error), node=node)

-    def _detect_unsupported_alternative_union_syntax(self, node: nodes.BinOp
-        ) ->None:
+    @only_required_for_messages("unsupported-binary-operation")
+    def visit_binop(self, node: nodes.BinOp) -> None:
+        if node.op == "|":
+            self._detect_unsupported_alternative_union_syntax(node)
+
+    def _detect_unsupported_alternative_union_syntax(self, node: nodes.BinOp) -> None:
         """Detect if unsupported alternative Union syntax (PEP 604) was used."""
-        pass
+        if self._py310_plus:  # 310+ supports the new syntax
+            return
+
+        if isinstance(
+            node.parent, TYPE_ANNOTATION_NODES_TYPES
+        ) and not is_postponed_evaluation_enabled(node):
+            # Use in type annotations only allowed if
+            # postponed evaluation is enabled.
+            self._check_unsupported_alternative_union_syntax(node)

-    def _includes_version_compatible_overload(self, attrs: list[nodes.NodeNG]
-        ) ->bool:
+        if isinstance(
+            node.parent,
+            (
+                nodes.Assign,
+                nodes.Call,
+                nodes.Keyword,
+                nodes.Dict,
+                nodes.Tuple,
+                nodes.Set,
+                nodes.List,
+                nodes.BinOp,
+            ),
+        ):
+            # Check other contexts the syntax might appear, but are invalid.
+            # Make sure to filter context if postponed evaluation is enabled
+            # and parent is allowed node type.
+            allowed_nested_syntax = False
+            if is_postponed_evaluation_enabled(node):
+                parent_node = node.parent
+                while True:
+                    if isinstance(parent_node, TYPE_ANNOTATION_NODES_TYPES):
+                        allowed_nested_syntax = True
+                        break
+                    parent_node = parent_node.parent
+                    if isinstance(parent_node, nodes.Module):
+                        break
+            if not allowed_nested_syntax:
+                self._check_unsupported_alternative_union_syntax(node)
+
+    def _includes_version_compatible_overload(self, attrs: list[nodes.NodeNG]) -> bool:
         """Check if a set of overloads of an operator includes one that
         can be relied upon for our configured Python version.

@@ -367,27 +1983,224 @@ class TypeChecker(BaseChecker):
         existence of __or__ / __ror__ on builtins.type, but these aren't
         available in the configured version of Python.
         """
-        pass
+        is_py310_builtin = all(
+            isinstance(attr, (nodes.FunctionDef, astroid.BoundMethod))
+            and attr.parent.qname() == "builtins.type"
+            for attr in attrs
+        )
+        return not is_py310_builtin or self._py310_plus
+
+    def _recursive_search_for_classdef_type(
+        self, node: nodes.ClassDef, operation: Literal["__or__", "__ror__"]
+    ) -> bool | VERSION_COMPATIBLE_OVERLOAD:
+        if not isinstance(node, nodes.ClassDef):
+            return False
+        try:
+            attrs = node.getattr(operation)
+        except astroid.NotFoundError:
+            return True
+        if self._includes_version_compatible_overload(attrs):
+            return VERSION_COMPATIBLE_OVERLOAD_SENTINEL
+        return True

-    def _check_unsupported_alternative_union_syntax(self, node: nodes.BinOp
-        ) ->None:
+    def _check_unsupported_alternative_union_syntax(self, node: nodes.BinOp) -> None:
         """Check if left or right node is of type `type`.

         If either is, and doesn't support an or operator via a metaclass,
         infer that this is a mistaken attempt to use alternative union
         syntax when not supported.
         """
-        pass
+        msg = "unsupported operand type(s) for |"
+        left_obj = astroid.helpers.object_type(node.left)
+        right_obj = astroid.helpers.object_type(node.right)
+        left_is_type = self._recursive_search_for_classdef_type(left_obj, "__or__")
+        if left_is_type is VERSION_COMPATIBLE_OVERLOAD_SENTINEL:
+            return
+        right_is_type = self._recursive_search_for_classdef_type(right_obj, "__ror__")
+        if right_is_type is VERSION_COMPATIBLE_OVERLOAD_SENTINEL:
+            return
+
+        if left_is_type or right_is_type:
+            self.add_message(
+                "unsupported-binary-operation",
+                args=msg,
+                node=node,
+                confidence=INFERENCE,
+            )

-    @only_required_for_messages('unsupported-binary-operation')
-    def _visit_binop(self, node: nodes.BinOp) ->None:
+    # TODO: This check was disabled (by adding the leading underscore)
+    # due to false positives several years ago - can we re-enable it?
+    # https://github.com/pylint-dev/pylint/issues/6359
+    @only_required_for_messages("unsupported-binary-operation")
+    def _visit_binop(self, node: nodes.BinOp) -> None:
         """Detect TypeErrors for binary arithmetic operands."""
-        pass
+        self._check_binop_errors(node)

-    @only_required_for_messages('unsupported-binary-operation')
-    def _visit_augassign(self, node: nodes.AugAssign) ->None:
+    # TODO: This check was disabled (by adding the leading underscore)
+    # due to false positives several years ago - can we re-enable it?
+    # https://github.com/pylint-dev/pylint/issues/6359
+    @only_required_for_messages("unsupported-binary-operation")
+    def _visit_augassign(self, node: nodes.AugAssign) -> None:
         """Detect TypeErrors for augmented binary arithmetic operands."""
-        pass
+        self._check_binop_errors(node)
+
+    def _check_binop_errors(self, node: nodes.BinOp | nodes.AugAssign) -> None:
+        for error in node.type_errors():
+            # Let the error customize its output.
+            if any(
+                isinstance(obj, nodes.ClassDef) and not has_known_bases(obj)
+                for obj in (error.left_type, error.right_type)
+            ):
+                continue
+            self.add_message("unsupported-binary-operation", args=str(error), node=node)
+
+    def _check_membership_test(self, node: nodes.NodeNG) -> None:
+        if is_inside_abstract_class(node):
+            return
+        if is_comprehension(node):
+            return
+        inferred = safe_infer(node)
+        if inferred is None or isinstance(inferred, util.UninferableBase):
+            return
+        if not supports_membership_test(inferred):
+            self.add_message(
+                "unsupported-membership-test", args=node.as_string(), node=node
+            )
+
+    @only_required_for_messages("unsupported-membership-test")
+    def visit_compare(self, node: nodes.Compare) -> None:
+        if len(node.ops) != 1:
+            return
+
+        op, right = node.ops[0]
+        if op in {"in", "not in"}:
+            self._check_membership_test(right)
+
+    @only_required_for_messages("unhashable-member")
+    def visit_dict(self, node: nodes.Dict) -> None:
+        for k, _ in node.items:
+            if not is_hashable(k):
+                self.add_message(
+                    "unhashable-member",
+                    node=k,
+                    args=(k.as_string(), "key", "dict"),
+                    confidence=INFERENCE,
+                )
+
+    @only_required_for_messages("unhashable-member")
+    def visit_set(self, node: nodes.Set) -> None:
+        for element in node.elts:
+            if not is_hashable(element):
+                self.add_message(
+                    "unhashable-member",
+                    node=element,
+                    args=(element.as_string(), "member", "set"),
+                    confidence=INFERENCE,
+                )
+
+    @only_required_for_messages(
+        "unsubscriptable-object",
+        "unsupported-assignment-operation",
+        "unsupported-delete-operation",
+        "unhashable-member",
+        "invalid-sequence-index",
+        "invalid-slice-index",
+        "invalid-slice-step",
+    )
+    def visit_subscript(self, node: nodes.Subscript) -> None:
+        self._check_invalid_sequence_index(node)
+
+        supported_protocol: Callable[[Any, Any], bool] | None = None
+        if isinstance(node.value, (nodes.ListComp, nodes.DictComp)):
+            return
+
+        if isinstance(node.value, nodes.Dict):
+            # Assert dict key is hashable
+            if not is_hashable(node.slice):
+                self.add_message(
+                    "unhashable-member",
+                    node=node.value,
+                    args=(node.slice.as_string(), "key", "dict"),
+                    confidence=INFERENCE,
+                )
+
+        if node.ctx == astroid.Context.Load:
+            supported_protocol = supports_getitem
+            msg = "unsubscriptable-object"
+        elif node.ctx == astroid.Context.Store:
+            supported_protocol = supports_setitem
+            msg = "unsupported-assignment-operation"
+        elif node.ctx == astroid.Context.Del:
+            supported_protocol = supports_delitem
+            msg = "unsupported-delete-operation"
+
+        if isinstance(node.value, nodes.SetComp):
+            self.add_message(msg, args=node.value.as_string(), node=node.value)
+            return
+
+        if is_inside_abstract_class(node):
+            return
+
+        inferred = safe_infer(node.value)
+
+        if inferred is None or isinstance(inferred, util.UninferableBase):
+            return
+
+        if getattr(inferred, "decorators", None):
+            first_decorator = astroid.util.safe_infer(inferred.decorators.nodes[0])
+            if isinstance(first_decorator, nodes.ClassDef):
+                inferred = first_decorator.instantiate_class()
+            else:
+                return  # It would be better to handle function
+                # decorators, but let's start slow.
+
+        if (
+            supported_protocol
+            and not supported_protocol(inferred, node)
+            and not utils.in_type_checking_block(node)
+        ):
+            self.add_message(msg, args=node.value.as_string(), node=node.value)
+
+    @only_required_for_messages("dict-items-missing-iter")
+    def visit_for(self, node: nodes.For) -> None:
+        if not isinstance(node.target, nodes.Tuple):
+            # target is not a tuple
+            return
+        if not len(node.target.elts) == 2:
+            # target is not a tuple of two elements
+            return
+
+        iterable = node.iter
+        if not isinstance(iterable, nodes.Name):
+            # it's not a bare variable
+            return
+
+        inferred = safe_infer(iterable)
+        if not inferred:
+            return
+        if not isinstance(inferred, nodes.Dict):
+            # the iterable is not a dict
+            return
+
+        if all(isinstance(i[0], nodes.Tuple) for i in inferred.items):
+            # if all keys are tuples
+            return
+
+        self.add_message("dict-iter-missing-items", node=node)
+
+    @only_required_for_messages("await-outside-async")
+    def visit_await(self, node: nodes.Await) -> None:
+        self._check_await_outside_coroutine(node)
+
+    def _check_await_outside_coroutine(self, node: nodes.Await) -> None:
+        node_scope = node.scope()
+        while not isinstance(node_scope, nodes.Module):
+            if isinstance(node_scope, nodes.AsyncFunctionDef):
+                return
+            if isinstance(node_scope, nodes.FunctionDef):
+                break
+            node_scope = node_scope.parent.scope()
+        self.add_message("await-outside-async", node=node)


 class IterableChecker(BaseChecker):
@@ -401,12 +2214,105 @@ class IterableChecker(BaseChecker):
     - generator expressions
     Also checks for non-mappings in function call kwargs.
     """
-    name = 'typecheck'
-    msgs = {'E1133': (
-        'Non-iterable value %s is used in an iterating context',
-        'not-an-iterable',
-        'Used when a non-iterable value is used in place where iterable is expected'
-        ), 'E1134': ('Non-mapping value %s is used in a mapping context',
-        'not-a-mapping',
-        'Used when a non-mapping value is used in place where mapping is expected'
-        )}
+
+    name = "typecheck"
+
+    msgs = {
+        "E1133": (
+            "Non-iterable value %s is used in an iterating context",
+            "not-an-iterable",
+            "Used when a non-iterable value is used in place where "
+            "iterable is expected",
+        ),
+        "E1134": (
+            "Non-mapping value %s is used in a mapping context",
+            "not-a-mapping",
+            "Used when a non-mapping value is used in place where "
+            "mapping is expected",
+        ),
+    }
+
+    @staticmethod
+    def _is_asyncio_coroutine(node: nodes.NodeNG) -> bool:
+        if not isinstance(node, nodes.Call):
+            return False
+
+        inferred_func = safe_infer(node.func)
+        if not isinstance(inferred_func, nodes.FunctionDef):
+            return False
+        if not inferred_func.decorators:
+            return False
+        for decorator in inferred_func.decorators.nodes:
+            inferred_decorator = safe_infer(decorator)
+            if not isinstance(inferred_decorator, nodes.FunctionDef):
+                continue
+            if inferred_decorator.qname() != ASYNCIO_COROUTINE:
+                continue
+            return True
+        return False
+
+    def _check_iterable(self, node: nodes.NodeNG, check_async: bool = False) -> None:
+        if is_inside_abstract_class(node):
+            return
+        inferred = safe_infer(node)
+        if not inferred or is_comprehension(inferred):
+            return
+        if not is_iterable(inferred, check_async=check_async):
+            self.add_message("not-an-iterable", args=node.as_string(), node=node)
+
+    def _check_mapping(self, node: nodes.NodeNG) -> None:
+        if is_inside_abstract_class(node):
+            return
+        if isinstance(node, nodes.DictComp):
+            return
+        inferred = safe_infer(node)
+        if inferred is None or isinstance(inferred, util.UninferableBase):
+            return
+        if not is_mapping(inferred):
+            self.add_message("not-a-mapping", args=node.as_string(), node=node)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_for(self, node: nodes.For) -> None:
+        self._check_iterable(node.iter)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_asyncfor(self, node: nodes.AsyncFor) -> None:
+        self._check_iterable(node.iter, check_async=True)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_yieldfrom(self, node: nodes.YieldFrom) -> None:
+        if self._is_asyncio_coroutine(node.value):
+            return
+        self._check_iterable(node.value)
+
+    @only_required_for_messages("not-an-iterable", "not-a-mapping")
+    def visit_call(self, node: nodes.Call) -> None:
+        for stararg in node.starargs:
+            self._check_iterable(stararg.value)
+        for kwarg in node.kwargs:
+            self._check_mapping(kwarg.value)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_listcomp(self, node: nodes.ListComp) -> None:
+        for gen in node.generators:
+            self._check_iterable(gen.iter, check_async=gen.is_async)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_dictcomp(self, node: nodes.DictComp) -> None:
+        for gen in node.generators:
+            self._check_iterable(gen.iter, check_async=gen.is_async)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_setcomp(self, node: nodes.SetComp) -> None:
+        for gen in node.generators:
+            self._check_iterable(gen.iter, check_async=gen.is_async)
+
+    @only_required_for_messages("not-an-iterable")
+    def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
+        for gen in node.generators:
+            self._check_iterable(gen.iter, check_async=gen.is_async)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(TypeChecker(linter))
+    linter.register_checker(IterableChecker(linter))
diff --git a/pylint/checkers/unicode.py b/pylint/checkers/unicode.py
index b6fd71f21..c90ace971 100644
--- a/pylint/checkers/unicode.py
+++ b/pylint/checkers/unicode.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Unicode and some other ASCII characters can be used to create programs that run
 much different compared to what a human reader would expect from them.

@@ -6,7 +10,9 @@ See: https://www.python.org/dev/peps/pep-0672/

 The following checkers are intended to make users are aware of these issues.
 """
+
 from __future__ import annotations
+
 import codecs
 import contextlib
 import io
@@ -16,111 +22,261 @@ from collections.abc import Iterable
 from functools import lru_cache
 from tokenize import detect_encoding
 from typing import NamedTuple, TypeVar
+
 from astroid import nodes
+
 import pylint.interfaces
 import pylint.lint
 from pylint import checkers
-_StrLike = TypeVar('_StrLike', str, bytes)
-BIDI_UNICODE = ['\u202a', '\u202b', '\u202c', '\u202d', '\u202e', '\u2066',
-    '\u2067', '\u2068', '\u2069', '\u200f']
+
+_StrLike = TypeVar("_StrLike", str, bytes)
+
+# Based on:
+# https://golangexample.com/go-linter-which-checks-for-dangerous-unicode-character-sequences/
+# We use '\u' because it doesn't require a map lookup and is therefore faster
+BIDI_UNICODE = [
+    "\u202A",  # \N{LEFT-TO-RIGHT EMBEDDING}
+    "\u202B",  # \N{RIGHT-TO-LEFT EMBEDDING}
+    "\u202C",  # \N{POP DIRECTIONAL FORMATTING}
+    "\u202D",  # \N{LEFT-TO-RIGHT OVERRIDE}
+    "\u202E",  # \N{RIGHT-TO-LEFT OVERRIDE}
+    "\u2066",  # \N{LEFT-TO-RIGHT ISOLATE}
+    "\u2067",  # \N{RIGHT-TO-LEFT ISOLATE}
+    "\u2068",  # \N{FIRST STRONG ISOLATE}
+    "\u2069",  # \N{POP DIRECTIONAL ISOLATE}
+    # The following was part of PEP 672:
+    # https://www.python.org/dev/peps/pep-0672/
+    # so the list above might not be complete
+    "\u200F",  # \n{RIGHT-TO-LEFT MARK}
+    # We don't use
+    #   "\u200E" # \n{LEFT-TO-RIGHT MARK}
+    # as this is the default for latin files and can't be used
+    # to hide code
+]


 class _BadChar(NamedTuple):
     """Representation of an ASCII char considered bad."""
+
     name: str
     unescaped: str
     escaped: str
     code: str
     help_text: str

-    def description(self) ->str:
+    def description(self) -> str:
         """Used for the detailed error message description."""
-        pass
+        return (
+            f"Invalid unescaped character {self.name}, "
+            f'use "{self.escaped}" instead.'
+        )

-    def human_code(self) ->str:
+    def human_code(self) -> str:
         """Used to generate the human readable error message."""
-        pass
-
-
-BAD_CHARS = [_BadChar('backspace', '\x08', '\\b', 'E2510',
-    'Moves the cursor back, so the character after it will overwrite the character before.'
-    ), _BadChar('carriage-return', '\r', '\\r', 'E2511',
-    'Moves the cursor to the start of line, subsequent characters overwrite the start of the line.'
-    ), _BadChar('sub', '\x1a', '\\x1A', 'E2512',
-    'Ctrl+Z "End of text" on Windows. Some programs (such as type) ignore the rest of the file after it.'
-    ), _BadChar('esc', '\x1b', '\\x1B', 'E2513',
-    'Commonly initiates escape codes which allow arbitrary control of the terminal.'
-    ), _BadChar('nul', '\x00', '\\0', 'E2514',
-    'Mostly end of input for python.'), _BadChar('zero-width-space',
-    '\u200b', '\\u200B', 'E2515',
-    'Invisible space character could hide real code execution.')]
+        return f"invalid-character-{self.name}"
+
+
+# Based on https://www.python.org/dev/peps/pep-0672/
+BAD_CHARS = [
+    _BadChar(
+        "backspace",
+        "\b",
+        "\\b",
+        "E2510",
+        (
+            "Moves the cursor back, so the character after it will overwrite the "
+            "character before."
+        ),
+    ),
+    _BadChar(
+        "carriage-return",
+        "\r",
+        "\\r",
+        "E2511",
+        (
+            "Moves the cursor to the start of line, subsequent characters overwrite "
+            "the start of the line."
+        ),
+    ),
+    _BadChar(
+        "sub",
+        "\x1A",
+        "\\x1A",
+        "E2512",
+        (
+            'Ctrl+Z "End of text" on Windows. Some programs (such as type) ignore '
+            "the rest of the file after it."
+        ),
+    ),
+    _BadChar(
+        "esc",
+        "\x1B",
+        "\\x1B",
+        "E2513",
+        (
+            "Commonly initiates escape codes which allow arbitrary control "
+            "of the terminal."
+        ),
+    ),
+    _BadChar(
+        "nul",
+        "\0",
+        "\\0",
+        "E2514",
+        "Mostly end of input for python.",
+    ),
+    _BadChar(
+        # Zero Width with Space. At the time of writing not accepted by Python.
+        # But used in Trojan Source Examples, so still included and tested for.
+        "zero-width-space",
+        "\u200B",  # \n{ZERO WIDTH SPACE}
+        "\\u200B",
+        "E2515",
+        "Invisible space character could hide real code execution.",
+    ),
+]
 BAD_ASCII_SEARCH_DICT = {char.unescaped: char for char in BAD_CHARS}


-def _line_length(line: _StrLike, codec: str) ->int:
+def _line_length(line: _StrLike, codec: str) -> int:
     """Get the length of a string like line as displayed in an editor."""
-    pass
+    if isinstance(line, bytes):
+        decoded = _remove_bom(line, codec).decode(codec, "replace")
+    else:
+        decoded = line
+
+    stripped = decoded.rstrip("\n")

+    if stripped != decoded:
+        stripped = stripped.rstrip("\r")

-def _map_positions_to_result(line: _StrLike, search_dict: dict[_StrLike,
-    _BadChar], new_line: _StrLike, byte_str_length: int=1) ->dict[int, _BadChar
-    ]:
+    return len(stripped)
+
+
+def _map_positions_to_result(
+    line: _StrLike,
+    search_dict: dict[_StrLike, _BadChar],
+    new_line: _StrLike,
+    byte_str_length: int = 1,
+) -> dict[int, _BadChar]:
     """Get all occurrences of search dict keys within line.

     Ignores Windows end of line and can handle bytes as well as string.
     Also takes care of encodings for which the length of an encoded code point does not
     default to 8 Bit.
     """
-    pass
-
-
-UNICODE_BOMS = {'utf-8': codecs.BOM_UTF8, 'utf-16': codecs.BOM_UTF16,
-    'utf-32': codecs.BOM_UTF32, 'utf-16le': codecs.BOM_UTF16_LE, 'utf-16be':
-    codecs.BOM_UTF16_BE, 'utf-32le': codecs.BOM_UTF32_LE, 'utf-32be':
-    codecs.BOM_UTF32_BE}
-BOM_SORTED_TO_CODEC = OrderedDict((UNICODE_BOMS[codec], codec) for codec in
-    ('utf-32le', 'utf-32be', 'utf-8', 'utf-16le', 'utf-16be'))
-UTF_NAME_REGEX_COMPILED = re.compile('utf[ -]?(8|16|32)[ -]?(le|be|)?(sig)?',
-    flags=re.IGNORECASE)
-
-
-def _normalize_codec_name(codec: str) ->str:
+    result: dict[int, _BadChar] = {}
+
+    for search_for, char in search_dict.items():
+        if search_for not in line:
+            continue
+
+        # Special Handling for Windows '\r\n'
+        if char.unescaped == "\r" and line.endswith(new_line):
+            ignore_pos = len(line) - 2 * byte_str_length
+        else:
+            ignore_pos = None
+
+        start = 0
+        pos = line.find(search_for, start)
+        while pos > 0:
+            if pos != ignore_pos:
+                # Calculate the column
+                col = int(pos / byte_str_length)
+                result[col] = char
+            start = pos + 1
+            pos = line.find(search_for, start)
+
+    return result
+
+
+UNICODE_BOMS = {
+    "utf-8": codecs.BOM_UTF8,
+    "utf-16": codecs.BOM_UTF16,
+    "utf-32": codecs.BOM_UTF32,
+    "utf-16le": codecs.BOM_UTF16_LE,
+    "utf-16be": codecs.BOM_UTF16_BE,
+    "utf-32le": codecs.BOM_UTF32_LE,
+    "utf-32be": codecs.BOM_UTF32_BE,
+}
+BOM_SORTED_TO_CODEC = OrderedDict(
+    # Sorted by length of BOM of each codec
+    (UNICODE_BOMS[codec], codec)
+    for codec in ("utf-32le", "utf-32be", "utf-8", "utf-16le", "utf-16be")
+)
+
+UTF_NAME_REGEX_COMPILED = re.compile(
+    "utf[ -]?(8|16|32)[ -]?(le|be|)?(sig)?", flags=re.IGNORECASE
+)
+
+
+def _normalize_codec_name(codec: str) -> str:
     """Make sure the codec name is always given as defined in the BOM dict."""
-    pass
+    return UTF_NAME_REGEX_COMPILED.sub(r"utf-\1\2", codec).lower()


-def _remove_bom(encoded: bytes, encoding: str) ->bytes:
+def _remove_bom(encoded: bytes, encoding: str) -> bytes:
     """Remove the bom if given from a line."""
-    pass
+    if encoding not in UNICODE_BOMS:
+        return encoded
+    bom = UNICODE_BOMS[encoding]
+    if encoded.startswith(bom):
+        return encoded[len(bom) :]
+    return encoded


-def _encode_without_bom(string: str, encoding: str) ->bytes:
+def _encode_without_bom(string: str, encoding: str) -> bytes:
     """Encode a string but remove the BOM."""
-    pass
+    return _remove_bom(string.encode(encoding), encoding)


-def _byte_to_str_length(codec: str) ->int:
+def _byte_to_str_length(codec: str) -> int:
     """Return how many byte are usually(!) a character point."""
-    pass
+    if codec.startswith("utf-32"):
+        return 4
+    if codec.startswith("utf-16"):
+        return 2
+
+    return 1


 @lru_cache(maxsize=1000)
-def _cached_encode_search(string: str, encoding: str) ->bytes:
+def _cached_encode_search(string: str, encoding: str) -> bytes:
     """A cached version of encode used for search pattern."""
-    pass
+    return _encode_without_bom(string, encoding)


-def _fix_utf16_32_line_stream(steam: Iterable[bytes], codec: str) ->Iterable[
-    bytes]:
-    """Handle line ending for UTF16 and UTF32 correctly.
+def _fix_utf16_32_line_stream(steam: Iterable[bytes], codec: str) -> Iterable[bytes]:
+    r"""Handle line ending for UTF16 and UTF32 correctly.

-    Currently, Python simply strips the required zeros after \\n after the
+    Currently, Python simply strips the required zeros after \n after the
     line ending. Leading to lines that can't be decoded properly
     """
-    pass
-
-
-def extract_codec_from_bom(first_line: bytes) ->str:
+    if not codec.startswith("utf-16") and not codec.startswith("utf-32"):
+        yield from steam
+    else:
+        # First we get all the bytes in memory
+        content = b"".join(line for line in steam)
+
+        new_line = _cached_encode_search("\n", codec)
+
+        # Now we split the line by the real new line in the correct encoding
+        # we can't use split as it would strip the \n that we need
+        start = 0
+        while True:
+            pos = content.find(new_line, start)
+            if pos >= 0:
+                yield content[start : pos + len(new_line)]
+            else:
+                # Yield the rest and finish
+                if content[start:]:
+                    yield content[start:]
+                break
+
+            start = pos + len(new_line)
+
+
+def extract_codec_from_bom(first_line: bytes) -> str:
     """Try to extract the codec (unicode only) by checking for the BOM.

     For details about BOM see https://unicode.org/faq/utf_bom.html#BOM
@@ -134,7 +290,11 @@ def extract_codec_from_bom(first_line: bytes) ->str:
     Raises:
         ValueError: if no codec was found
     """
-    pass
+    for bom, codec in BOM_SORTED_TO_CODEC.items():
+        if first_line.startswith(bom):
+            return codec
+
+    raise ValueError("No BOM found. Could not detect Unicode codec.")


 class UnicodeChecker(checkers.BaseRawFileChecker):
@@ -156,26 +316,72 @@ class UnicodeChecker(checkers.BaseRawFileChecker):
     https://stackoverflow.com/questions/69897842/ and https://bugs.python.org/issue1503789
     for background.
     """
-    name = 'unicode_checker'
-    msgs = {'E2501': (
-        "UTF-16 and UTF-32 aren't backward compatible. Use UTF-8 instead",
-        'invalid-unicode-codec',
-        'For compatibility use UTF-8 instead of UTF-16/UTF-32. See also https://bugs.python.org/issue1503789 for a history of this issue. And https://softwareengineering.stackexchange.com/questions/102205/ for some possible problems when using UTF-16 for instance.'
-        ), 'E2502': (
-        'Contains control characters that can permit obfuscated code executed differently than displayed'
-        , 'bidirectional-unicode',
-        """bidirectional unicode are typically not displayed characters required to display right-to-left (RTL) script (i.e. Chinese, Japanese, Arabic, Hebrew, ...) correctly. So can you trust this code? Are you sure it displayed correctly in all editors? If you did not write it or your language is not RTL, remove the special characters, as they could be used to trick you into executing code, that does something else than what it looks like.
-More Information:
-https://en.wikipedia.org/wiki/Bidirectional_text
-https://trojansource.codes/"""
-        ), 'C2503': ('PEP8 recommends UTF-8 as encoding for Python files',
-        'bad-file-encoding',
-        'PEP8 recommends UTF-8 default encoding for Python files. See https://peps.python.org/pep-0008/#source-file-encoding'
-        ), **{bad_char.code: (bad_char.description(), bad_char.human_code(),
-        bad_char.help_text) for bad_char in BAD_CHARS}}
+
+    name = "unicode_checker"
+
+    msgs = {
+        "E2501": (
+            # This error will be only displayed to users once Python Supports
+            # UTF-16/UTF-32 (if at all)
+            "UTF-16 and UTF-32 aren't backward compatible. Use UTF-8 instead",
+            "invalid-unicode-codec",
+            (
+                "For compatibility use UTF-8 instead of UTF-16/UTF-32. "
+                "See also https://bugs.python.org/issue1503789 for a history "
+                "of this issue. And "
+                "https://softwareengineering.stackexchange.com/questions/102205/ "
+                "for some possible problems when using UTF-16 for instance."
+            ),
+        ),
+        "E2502": (
+            (
+                "Contains control characters that can permit obfuscated code "
+                "executed differently than displayed"
+            ),
+            "bidirectional-unicode",
+            (
+                "bidirectional unicode are typically not displayed characters required "
+                "to display right-to-left (RTL) script "
+                "(i.e. Chinese, Japanese, Arabic, Hebrew, ...) correctly. "
+                "So can you trust this code? "
+                "Are you sure it displayed correctly in all editors? "
+                "If you did not write it or your language is not RTL,"
+                " remove the special characters, as they could be used to trick you into "
+                "executing code, "
+                "that does something else than what it looks like.\n"
+                "More Information:\n"
+                "https://en.wikipedia.org/wiki/Bidirectional_text\n"
+                "https://trojansource.codes/"
+            ),
+        ),
+        "C2503": (
+            "PEP8 recommends UTF-8 as encoding for Python files",
+            "bad-file-encoding",
+            (
+                "PEP8 recommends UTF-8 default encoding for Python files. See "
+                "https://peps.python.org/pep-0008/#source-file-encoding"
+            ),
+        ),
+        **{
+            bad_char.code: (
+                bad_char.description(),
+                bad_char.human_code(),
+                bad_char.help_text,
+            )
+            for bad_char in BAD_CHARS
+        },
+    }
+
+    @staticmethod
+    def _is_invalid_codec(codec: str) -> bool:
+        return codec.startswith(("utf-16", "utf-32"))
+
+    @staticmethod
+    def _is_unicode(codec: str) -> bool:
+        return codec.startswith("utf")

     @classmethod
-    def _find_line_matches(cls, line: bytes, codec: str) ->dict[int, _BadChar]:
+    def _find_line_matches(cls, line: bytes, codec: str) -> dict[int, _BadChar]:
         """Find all matches of BAD_CHARS within line.

         Args:
@@ -185,10 +391,33 @@ https://trojansource.codes/"""
         Return:
             A dictionary with the column offset and the BadASCIIChar
         """
-        pass
+        # We try to decode in Unicode to get the correct column offset
+        # if we would use bytes, it could be off because UTF-8 has no fixed length
+        try:
+            line_search = line.decode(codec, errors="strict")
+            search_dict = BAD_ASCII_SEARCH_DICT
+            return _map_positions_to_result(line_search, search_dict, "\n")
+        except UnicodeDecodeError:
+            # If we can't decode properly, we simply use bytes, even so the column offsets
+            # might be wrong a bit, but it is still better then nothing
+            line_search_byte = line
+            search_dict_byte: dict[bytes, _BadChar] = {}
+            for char in BAD_CHARS:
+                # Some characters might not exist in all encodings
+                with contextlib.suppress(UnicodeDecodeError):
+                    search_dict_byte[_cached_encode_search(char.unescaped, codec)] = (
+                        char
+                    )
+
+            return _map_positions_to_result(
+                line_search_byte,
+                search_dict_byte,
+                _cached_encode_search("\n", codec),
+                byte_str_length=_byte_to_str_length(codec),
+            )

     @staticmethod
-    def _determine_codec(stream: io.BytesIO) ->tuple[str, int]:
+    def _determine_codec(stream: io.BytesIO) -> tuple[str, int]:
         """Determine the codec from the given stream.

         first tries https://www.python.org/dev/peps/pep-0263/
@@ -205,21 +434,104 @@ https://trojansource.codes/"""
         Raises:
             SyntaxError: if failing to detect codec
         """
-        pass
-
-    def _check_codec(self, codec: str, codec_definition_line: int) ->None:
+        try:
+            # First try to detect encoding with PEP 263
+            # Doesn't work with UTF-16/32 at the time of writing
+            # see https://bugs.python.org/issue1503789
+            codec, lines = detect_encoding(stream.readline)
+
+            # lines are empty if UTF-8 BOM is found
+            codec_definition_line = len(lines) or 1
+        except SyntaxError as e:
+            # Codec could not be detected by Python, we try manually to check for
+            # UTF 16/32 BOMs, which aren't supported by Python at the time of writing.
+            # This is only included to be future save and handle these codecs as well
+            stream.seek(0)
+            try:
+                codec = extract_codec_from_bom(stream.readline())
+                codec_definition_line = 1
+            except ValueError as ve:
+                # Failed to detect codec, so the syntax error originated not from
+                # UTF16/32 codec usage. So simply raise the error again.
+                raise e from ve
+
+        return _normalize_codec_name(codec), codec_definition_line
+
+    def _check_codec(self, codec: str, codec_definition_line: int) -> None:
         """Check validity of the codec."""
-        pass
-
-    def _check_invalid_chars(self, line: bytes, lineno: int, codec: str
-        ) ->None:
+        if codec != "utf-8":
+            msg = "bad-file-encoding"
+            if self._is_invalid_codec(codec):
+                msg = "invalid-unicode-codec"
+            self.add_message(
+                msg,
+                # Currently Nodes will lead to crashes of pylint
+                # node=node,
+                line=codec_definition_line,
+                end_lineno=codec_definition_line,
+                confidence=pylint.interfaces.HIGH,
+                col_offset=None,
+                end_col_offset=None,
+            )
+
+    def _check_invalid_chars(self, line: bytes, lineno: int, codec: str) -> None:
         """Look for chars considered bad."""
-        pass
-
-    def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) ->None:
+        matches = self._find_line_matches(line, codec)
+        for col, char in matches.items():
+            self.add_message(
+                char.human_code(),
+                # Currently Nodes will lead to crashes of pylint
+                # node=node,
+                line=lineno,
+                end_lineno=lineno,
+                confidence=pylint.interfaces.HIGH,
+                col_offset=col + 1,
+                end_col_offset=col + len(char.unescaped) + 1,
+            )
+
+    def _check_bidi_chars(self, line: bytes, lineno: int, codec: str) -> None:
         """Look for Bidirectional Unicode, if we use unicode."""
-        pass
-
-    def process_module(self, node: nodes.Module) ->None:
+        if not self._is_unicode(codec):
+            return
+        for dangerous in BIDI_UNICODE:
+            if _cached_encode_search(dangerous, codec) in line:
+                # Note that we don't add a col_offset on purpose:
+                #   Using these unicode characters it depends on the editor
+                #   how it displays the location of characters in the line.
+                #   So we mark the complete line.
+                self.add_message(
+                    "bidirectional-unicode",
+                    # Currently Nodes will lead to crashes of pylint
+                    # node=node,
+                    line=lineno,
+                    end_lineno=lineno,
+                    # We mark the complete line, as bidi controls make it hard
+                    # to determine the correct cursor position within an editor
+                    col_offset=0,
+                    end_col_offset=_line_length(line, codec),
+                    confidence=pylint.interfaces.HIGH,
+                )
+                # We look for bidirectional unicode only once per line
+                # as we mark the complete line anyway
+                break
+
+    def process_module(self, node: nodes.Module) -> None:
         """Perform the actual check by checking module stream."""
-        pass
+        with node.stream() as stream:
+            codec, codec_line = self._determine_codec(stream)
+            self._check_codec(codec, codec_line)
+
+            stream.seek(0)
+
+            # Check for invalid content (controls/chars)
+            for lineno, line in enumerate(
+                _fix_utf16_32_line_stream(stream, codec), start=1
+            ):
+                if lineno == 1:
+                    line = _remove_bom(line, codec)
+                self._check_bidi_chars(line, lineno, codec)
+                self._check_invalid_chars(line, lineno, codec)
+
+
+def register(linter: pylint.lint.PyLinter) -> None:
+    linter.register_checker(UnicodeChecker(linter))
diff --git a/pylint/checkers/unsupported_version.py b/pylint/checkers/unsupported_version.py
index 8be4654c4..64f2630d8 100644
--- a/pylint/checkers/unsupported_version.py
+++ b/pylint/checkers/unsupported_version.py
@@ -1,11 +1,24 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for features used that are not supported by all python versions
 indicated by the py-version setting.
 """
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import only_required_for_messages, safe_infer, uninferable_final_decorators
+from pylint.checkers.utils import (
+    only_required_for_messages,
+    safe_infer,
+    uninferable_final_decorators,
+)
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -14,33 +27,58 @@ class UnsupportedVersionChecker(BaseChecker):
     """Checker for features that are not supported by all python versions
     indicated by the py-version setting.
     """
-    name = 'unsupported_version'
-    msgs = {'W2601': (
-        'F-strings are not supported by all versions included in the py-version setting'
-        , 'using-f-string-in-unsupported-version',
-        'Used when the py-version set by the user is lower than 3.6 and pylint encounters an f-string.'
-        ), 'W2602': (
-        'typing.final is not supported by all versions included in the py-version setting'
-        , 'using-final-decorator-in-unsupported-version',
-        'Used when the py-version set by the user is lower than 3.8 and pylint encounters a ``typing.final`` decorator.'
-        )}
-
-    def open(self) ->None:
+
+    name = "unsupported_version"
+    msgs = {
+        "W2601": (
+            "F-strings are not supported by all versions included in the py-version setting",
+            "using-f-string-in-unsupported-version",
+            "Used when the py-version set by the user is lower than 3.6 and pylint encounters "
+            "an f-string.",
+        ),
+        "W2602": (
+            "typing.final is not supported by all versions included in the py-version setting",
+            "using-final-decorator-in-unsupported-version",
+            "Used when the py-version set by the user is lower than 3.8 and pylint encounters "
+            "a ``typing.final`` decorator.",
+        ),
+    }
+
+    def open(self) -> None:
         """Initialize visit variables and statistics."""
-        pass
+        py_version = self.linter.config.py_version
+        self._py36_plus = py_version >= (3, 6)
+        self._py38_plus = py_version >= (3, 8)

-    @only_required_for_messages('using-f-string-in-unsupported-version')
-    def visit_joinedstr(self, node: nodes.JoinedStr) ->None:
+    @only_required_for_messages("using-f-string-in-unsupported-version")
+    def visit_joinedstr(self, node: nodes.JoinedStr) -> None:
         """Check f-strings."""
-        pass
+        if not self._py36_plus:
+            self.add_message("using-f-string-in-unsupported-version", node=node)

-    @only_required_for_messages('using-final-decorator-in-unsupported-version')
-    def visit_decorators(self, node: nodes.Decorators) ->None:
+    @only_required_for_messages("using-final-decorator-in-unsupported-version")
+    def visit_decorators(self, node: nodes.Decorators) -> None:
         """Check decorators."""
-        pass
+        self._check_typing_final(node)

-    def _check_typing_final(self, node: nodes.Decorators) ->None:
+    def _check_typing_final(self, node: nodes.Decorators) -> None:
         """Add a message when the `typing.final` decorator is used and the
         py-version is lower than 3.8.
         """
-        pass
+        if self._py38_plus:
+            return
+
+        decorators = []
+        for decorator in node.get_children():
+            inferred = safe_infer(decorator)
+            if inferred and inferred.qname() == "typing.final":
+                decorators.append(decorator)
+
+        for decorator in decorators or uninferable_final_decorators(node):
+            self.add_message(
+                "using-final-decorator-in-unsupported-version", node=decorator
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(UnsupportedVersionChecker(linter))
diff --git a/pylint/checkers/utils.py b/pylint/checkers/utils.py
index 83788746a..26aac1bd8 100644
--- a/pylint/checkers/utils.py
+++ b/pylint/checkers/utils.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Some functions that may be useful for various checkers."""
+
 from __future__ import annotations
+
 import builtins
 import fnmatch
 import itertools
@@ -10,6 +16,7 @@ from collections.abc import Iterable, Iterator
 from functools import lru_cache, partial
 from re import Match
 from typing import TYPE_CHECKING, Any, Callable, TypeVar
+
 import _string
 import astroid.objects
 from astroid import TooManyLevelsError, nodes, util
@@ -17,79 +24,221 @@ from astroid.context import InferenceContext
 from astroid.exceptions import AstroidError
 from astroid.nodes._base_nodes import ImportNode, Statement
 from astroid.typing import InferenceResult, SuccessfulInferenceResult
+
 from pylint.constants import TYPING_NEVER, TYPING_NORETURN
+
 if TYPE_CHECKING:
     from functools import _lru_cache_wrapper
+
     from pylint.checkers import BaseChecker
-_NodeT = TypeVar('_NodeT', bound=nodes.NodeNG)
-_CheckerT = TypeVar('_CheckerT', bound='BaseChecker')
+
+_NodeT = TypeVar("_NodeT", bound=nodes.NodeNG)
+_CheckerT = TypeVar("_CheckerT", bound="BaseChecker")
 AstCallbackMethod = Callable[[_CheckerT, _NodeT], None]
-COMP_NODE_TYPES = (nodes.ListComp, nodes.SetComp, nodes.DictComp, nodes.
-    GeneratorExp)
-EXCEPTIONS_MODULE = 'builtins'
-ABC_MODULES = {'abc', '_py_abc'}
-ABC_METHODS = {'abc.abstractproperty', 'abc.abstractmethod',
-    'abc.abstractclassmethod', 'abc.abstractstaticmethod'}
-TYPING_PROTOCOLS = frozenset({'typing.Protocol',
-    'typing_extensions.Protocol', '.Protocol'})
-COMMUTATIVE_OPERATORS = frozenset({'*', '+', '^', '&', '|'})
-ITER_METHOD = '__iter__'
-AITER_METHOD = '__aiter__'
-NEXT_METHOD = '__next__'
-GETITEM_METHOD = '__getitem__'
-CLASS_GETITEM_METHOD = '__class_getitem__'
-SETITEM_METHOD = '__setitem__'
-DELITEM_METHOD = '__delitem__'
-CONTAINS_METHOD = '__contains__'
-KEYS_METHOD = 'keys'
-_SPECIAL_METHODS_PARAMS = {None: ('__new__', '__init__', '__call__',
-    '__init_subclass__'), (0): ('__del__', '__repr__', '__str__',
-    '__bytes__', '__hash__', '__bool__', '__dir__', '__len__',
-    '__length_hint__', '__iter__', '__reversed__', '__neg__', '__pos__',
-    '__abs__', '__invert__', '__complex__', '__int__', '__float__',
-    '__index__', '__trunc__', '__floor__', '__ceil__', '__enter__',
-    '__aenter__', '__getnewargs_ex__', '__getnewargs__', '__getstate__',
-    '__reduce__', '__copy__', '__unicode__', '__nonzero__', '__await__',
-    '__aiter__', '__anext__', '__fspath__', '__subclasses__'), (1): (
-    '__format__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__',
-    '__ge__', '__getattr__', '__getattribute__', '__delattr__',
-    '__delete__', '__instancecheck__', '__subclasscheck__', '__getitem__',
-    '__missing__', '__delitem__', '__contains__', '__add__', '__sub__',
-    '__mul__', '__truediv__', '__floordiv__', '__rfloordiv__', '__mod__',
-    '__divmod__', '__lshift__', '__rshift__', '__and__', '__xor__',
-    '__or__', '__radd__', '__rsub__', '__rmul__', '__rtruediv__',
-    '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__',
-    '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__',
-    '__itruediv__', '__ifloordiv__', '__imod__', '__ilshift__',
-    '__irshift__', '__iand__', '__ixor__', '__ior__', '__ipow__',
-    '__setstate__', '__reduce_ex__', '__deepcopy__', '__cmp__',
-    '__matmul__', '__rmatmul__', '__imatmul__', '__div__'), (2): (
-    '__setattr__', '__get__', '__set__', '__setitem__', '__set_name__'), (3
-    ): ('__exit__', '__aexit__'), (0, 1): ('__round__',), (1, 2): ('__pow__',)}
-SPECIAL_METHODS_PARAMS = {name: params for params, methods in
-    _SPECIAL_METHODS_PARAMS.items() for name in methods}
+
+COMP_NODE_TYPES = (
+    nodes.ListComp,
+    nodes.SetComp,
+    nodes.DictComp,
+    nodes.GeneratorExp,
+)
+EXCEPTIONS_MODULE = "builtins"
+ABC_MODULES = {"abc", "_py_abc"}
+ABC_METHODS = {
+    "abc.abstractproperty",
+    "abc.abstractmethod",
+    "abc.abstractclassmethod",
+    "abc.abstractstaticmethod",
+}
+TYPING_PROTOCOLS = frozenset(
+    {"typing.Protocol", "typing_extensions.Protocol", ".Protocol"}
+)
+COMMUTATIVE_OPERATORS = frozenset({"*", "+", "^", "&", "|"})
+ITER_METHOD = "__iter__"
+AITER_METHOD = "__aiter__"
+NEXT_METHOD = "__next__"
+GETITEM_METHOD = "__getitem__"
+CLASS_GETITEM_METHOD = "__class_getitem__"
+SETITEM_METHOD = "__setitem__"
+DELITEM_METHOD = "__delitem__"
+CONTAINS_METHOD = "__contains__"
+KEYS_METHOD = "keys"
+
+# Dictionary which maps the number of expected parameters a
+# special method can have to a set of special methods.
+# The following keys are used to denote the parameters restrictions:
+#
+# * None: variable number of parameters
+# * number: exactly that number of parameters
+# * tuple: these are the odd ones. Basically it means that the function
+#          can work with any number of arguments from that tuple,
+#          although it's best to implement it in order to accept
+#          all of them.
+_SPECIAL_METHODS_PARAMS = {
+    None: ("__new__", "__init__", "__call__", "__init_subclass__"),
+    0: (
+        "__del__",
+        "__repr__",
+        "__str__",
+        "__bytes__",
+        "__hash__",
+        "__bool__",
+        "__dir__",
+        "__len__",
+        "__length_hint__",
+        "__iter__",
+        "__reversed__",
+        "__neg__",
+        "__pos__",
+        "__abs__",
+        "__invert__",
+        "__complex__",
+        "__int__",
+        "__float__",
+        "__index__",
+        "__trunc__",
+        "__floor__",
+        "__ceil__",
+        "__enter__",
+        "__aenter__",
+        "__getnewargs_ex__",
+        "__getnewargs__",
+        "__getstate__",
+        "__reduce__",
+        "__copy__",
+        "__unicode__",
+        "__nonzero__",
+        "__await__",
+        "__aiter__",
+        "__anext__",
+        "__fspath__",
+        "__subclasses__",
+    ),
+    1: (
+        "__format__",
+        "__lt__",
+        "__le__",
+        "__eq__",
+        "__ne__",
+        "__gt__",
+        "__ge__",
+        "__getattr__",
+        "__getattribute__",
+        "__delattr__",
+        "__delete__",
+        "__instancecheck__",
+        "__subclasscheck__",
+        "__getitem__",
+        "__missing__",
+        "__delitem__",
+        "__contains__",
+        "__add__",
+        "__sub__",
+        "__mul__",
+        "__truediv__",
+        "__floordiv__",
+        "__rfloordiv__",
+        "__mod__",
+        "__divmod__",
+        "__lshift__",
+        "__rshift__",
+        "__and__",
+        "__xor__",
+        "__or__",
+        "__radd__",
+        "__rsub__",
+        "__rmul__",
+        "__rtruediv__",
+        "__rmod__",
+        "__rdivmod__",
+        "__rpow__",
+        "__rlshift__",
+        "__rrshift__",
+        "__rand__",
+        "__rxor__",
+        "__ror__",
+        "__iadd__",
+        "__isub__",
+        "__imul__",
+        "__itruediv__",
+        "__ifloordiv__",
+        "__imod__",
+        "__ilshift__",
+        "__irshift__",
+        "__iand__",
+        "__ixor__",
+        "__ior__",
+        "__ipow__",
+        "__setstate__",
+        "__reduce_ex__",
+        "__deepcopy__",
+        "__cmp__",
+        "__matmul__",
+        "__rmatmul__",
+        "__imatmul__",
+        "__div__",
+    ),
+    2: ("__setattr__", "__get__", "__set__", "__setitem__", "__set_name__"),
+    3: ("__exit__", "__aexit__"),
+    (0, 1): ("__round__",),
+    (1, 2): ("__pow__",),
+}
+
+SPECIAL_METHODS_PARAMS = {
+    name: params
+    for params, methods in _SPECIAL_METHODS_PARAMS.items()
+    for name in methods
+}
 PYMETHODS = set(SPECIAL_METHODS_PARAMS)
-SUBSCRIPTABLE_CLASSES_PEP585 = frozenset(('builtins.tuple', 'builtins.list',
-    'builtins.dict', 'builtins.set', 'builtins.frozenset', 'builtins.type',
-    'collections.deque', 'collections.defaultdict',
-    'collections.OrderedDict', 'collections.Counter',
-    'collections.ChainMap', '_collections_abc.Awaitable',
-    '_collections_abc.Coroutine', '_collections_abc.AsyncIterable',
-    '_collections_abc.AsyncIterator', '_collections_abc.AsyncGenerator',
-    '_collections_abc.Iterable', '_collections_abc.Iterator',
-    '_collections_abc.Generator', '_collections_abc.Reversible',
-    '_collections_abc.Container', '_collections_abc.Collection',
-    '_collections_abc.Callable', '_collections_abc.Set',
-    '_collections_abc.MutableSet', '_collections_abc.Mapping',
-    '_collections_abc.MutableMapping', '_collections_abc.Sequence',
-    '_collections_abc.MutableSequence', '_collections_abc.ByteString',
-    '_collections_abc.MappingView', '_collections_abc.KeysView',
-    '_collections_abc.ItemsView', '_collections_abc.ValuesView',
-    'contextlib.AbstractContextManager',
-    'contextlib.AbstractAsyncContextManager', 're.Pattern', 're.Match'))
+
+SUBSCRIPTABLE_CLASSES_PEP585 = frozenset(
+    (
+        "builtins.tuple",
+        "builtins.list",
+        "builtins.dict",
+        "builtins.set",
+        "builtins.frozenset",
+        "builtins.type",
+        "collections.deque",
+        "collections.defaultdict",
+        "collections.OrderedDict",
+        "collections.Counter",
+        "collections.ChainMap",
+        "_collections_abc.Awaitable",
+        "_collections_abc.Coroutine",
+        "_collections_abc.AsyncIterable",
+        "_collections_abc.AsyncIterator",
+        "_collections_abc.AsyncGenerator",
+        "_collections_abc.Iterable",
+        "_collections_abc.Iterator",
+        "_collections_abc.Generator",
+        "_collections_abc.Reversible",
+        "_collections_abc.Container",
+        "_collections_abc.Collection",
+        "_collections_abc.Callable",
+        "_collections_abc.Set",
+        "_collections_abc.MutableSet",
+        "_collections_abc.Mapping",
+        "_collections_abc.MutableMapping",
+        "_collections_abc.Sequence",
+        "_collections_abc.MutableSequence",
+        "_collections_abc.ByteString",
+        "_collections_abc.MappingView",
+        "_collections_abc.KeysView",
+        "_collections_abc.ItemsView",
+        "_collections_abc.ValuesView",
+        "contextlib.AbstractContextManager",
+        "contextlib.AbstractAsyncContextManager",
+        "re.Pattern",
+        "re.Match",
+    )
+)
+
 SINGLETON_VALUES = {True, False, None}
-TERMINATING_FUNCS_QNAMES = frozenset({'_sitebuiltins.Quitter', 'sys.exit',
-    'posix._exit', 'nt._exit'})
+
+TERMINATING_FUNCS_QNAMES = frozenset(
+    {"_sitebuiltins.Quitter", "sys.exit", "posix._exit", "nt._exit"}
+)


 class NoSuchArgumentError(Exception):
@@ -100,36 +249,101 @@ class InferredTypeError(Exception):
     pass


-def get_all_elements(node: nodes.NodeNG) ->Iterable[nodes.NodeNG]:
+def get_all_elements(
+    node: nodes.NodeNG,
+) -> Iterable[nodes.NodeNG]:
     """Recursively returns all atoms in nested lists and tuples."""
-    pass
+    if isinstance(node, (nodes.Tuple, nodes.List)):
+        for child in node.elts:
+            yield from get_all_elements(child)
+    else:
+        yield node


-def is_super(node: nodes.NodeNG) ->bool:
+def is_super(node: nodes.NodeNG) -> bool:
     """Return True if the node is referencing the "super" builtin function."""
-    pass
+    if getattr(node, "name", None) == "super" and node.root().name == "builtins":
+        return True
+    return False


-def is_error(node: nodes.FunctionDef) ->bool:
+def is_error(node: nodes.FunctionDef) -> bool:
     """Return true if the given function node only raises an exception."""
-    pass
+    return len(node.body) == 1 and isinstance(node.body[0], nodes.Raise)


-builtins = builtins.__dict__.copy()
-SPECIAL_BUILTINS = '__builtins__',
+builtins = builtins.__dict__.copy()  # type: ignore[assignment]
+SPECIAL_BUILTINS = ("__builtins__",)  # '__path__', '__file__')


-def is_builtin_object(node: nodes.NodeNG) ->bool:
+def is_builtin_object(node: nodes.NodeNG) -> bool:
     """Returns True if the given node is an object from the __builtin__ module."""
-    pass
+    return node and node.root().name == "builtins"  # type: ignore[no-any-return]


-def is_builtin(name: str) ->bool:
+def is_builtin(name: str) -> bool:
     """Return true if <name> could be considered as a builtin defined by python."""
-    pass
+    return name in builtins or name in SPECIAL_BUILTINS  # type: ignore[operator]
+
+
+def is_defined_in_scope(
+    var_node: nodes.NodeNG,
+    varname: str,
+    scope: nodes.NodeNG,
+) -> bool:
+    return defnode_in_scope(var_node, varname, scope) is not None
+
+
+# pylint: disable = too-many-branches
+def defnode_in_scope(
+    var_node: nodes.NodeNG,
+    varname: str,
+    scope: nodes.NodeNG,
+) -> nodes.NodeNG | None:
+    if isinstance(scope, nodes.If):
+        for node in scope.body:
+            if isinstance(node, nodes.Nonlocal) and varname in node.names:
+                return node
+            if isinstance(node, nodes.Assign):
+                for target in node.targets:
+                    if isinstance(target, nodes.AssignName) and target.name == varname:
+                        return target
+    elif isinstance(scope, (COMP_NODE_TYPES, nodes.For)):
+        for ass_node in scope.nodes_of_class(nodes.AssignName):
+            if ass_node.name == varname:
+                return ass_node
+    elif isinstance(scope, nodes.With):
+        for expr, ids in scope.items:
+            if expr.parent_of(var_node):
+                break
+            if ids and isinstance(ids, nodes.AssignName) and ids.name == varname:
+                return ids
+    elif isinstance(scope, (nodes.Lambda, nodes.FunctionDef)):
+        if scope.args.is_argument(varname):
+            # If the name is found inside a default value
+            # of a function, then let the search continue
+            # in the parent's tree.
+            if scope.args.parent_of(var_node):
+                try:
+                    scope.args.default_value(varname)
+                    scope = scope.parent
+                    defnode = defnode_in_scope(var_node, varname, scope)
+                except astroid.NoDefault:
+                    pass
+                else:
+                    return defnode
+            return scope
+        if getattr(scope, "name", None) == varname:
+            return scope
+    elif isinstance(scope, nodes.ExceptHandler):
+        if isinstance(scope.name, nodes.AssignName):
+            ass_node = scope.name
+            if ass_node.name == varname:
+                return ass_node
+    return None


-def is_defined_before(var_node: nodes.Name) ->bool:
+def is_defined_before(var_node: nodes.Name) -> bool:
     """Check if the given variable node is defined before.

     Verify that the variable node is defined by a parent node
@@ -138,49 +352,129 @@ def is_defined_before(var_node: nodes.Name) ->bool:
     or in a previous sibling node on the same line
     (statement_defining ; statement_using).
     """
-    pass
-
-
-def is_default_argument(node: nodes.NodeNG, scope: (nodes.NodeNG | None)=None
-    ) ->bool:
+    varname = var_node.name
+    for parent in var_node.node_ancestors():
+        defnode = defnode_in_scope(var_node, varname, parent)
+        if defnode is None:
+            continue
+        defnode_scope = defnode.scope()
+        if isinstance(
+            defnode_scope, (*COMP_NODE_TYPES, nodes.Lambda, nodes.FunctionDef)
+        ):
+            # Avoid the case where var_node_scope is a nested function
+            if isinstance(defnode_scope, nodes.FunctionDef):
+                var_node_scope = var_node.scope()
+                if var_node_scope is not defnode_scope and isinstance(
+                    var_node_scope, nodes.FunctionDef
+                ):
+                    return False
+            return True
+        if defnode.lineno < var_node.lineno:
+            return True
+        # `defnode` and `var_node` on the same line
+        for defnode_anc in defnode.node_ancestors():
+            if defnode_anc.lineno != var_node.lineno:
+                continue
+            if isinstance(
+                defnode_anc,
+                (
+                    nodes.For,
+                    nodes.While,
+                    nodes.With,
+                    nodes.Try,
+                    nodes.ExceptHandler,
+                ),
+            ):
+                return True
+    # possibly multiple statements on the same line using semicolon separator
+    stmt = var_node.statement()
+    _node = stmt.previous_sibling()
+    lineno = stmt.fromlineno
+    while _node and _node.fromlineno == lineno:
+        for assign_node in _node.nodes_of_class(nodes.AssignName):
+            if assign_node.name == varname:
+                return True
+        for imp_node in _node.nodes_of_class((nodes.ImportFrom, nodes.Import)):
+            if varname in [name[1] or name[0] for name in imp_node.names]:
+                return True
+        _node = _node.previous_sibling()
+    return False
+
+
+def is_default_argument(node: nodes.NodeNG, scope: nodes.NodeNG | None = None) -> bool:
     """Return true if the given Name node is used in function or lambda
     default argument's value.
     """
-    pass
-
-
-def is_func_decorator(node: nodes.NodeNG) ->bool:
+    if not scope:
+        scope = node.scope()
+    if isinstance(scope, (nodes.FunctionDef, nodes.Lambda)):
+        all_defaults = itertools.chain(
+            scope.args.defaults, (d for d in scope.args.kw_defaults if d is not None)
+        )
+        return any(
+            default_name_node is node
+            for default_node in all_defaults
+            for default_name_node in default_node.nodes_of_class(nodes.Name)
+        )
+
+    return False
+
+
+def is_func_decorator(node: nodes.NodeNG) -> bool:
     """Return true if the name is used in function decorator."""
-    pass
-
-
-def is_ancestor_name(frame: nodes.ClassDef, node: nodes.NodeNG) ->bool:
+    for parent in node.node_ancestors():
+        if isinstance(parent, nodes.Decorators):
+            return True
+        if parent.is_statement or isinstance(
+            parent,
+            (
+                nodes.Lambda,
+                nodes.ComprehensionScope,
+                nodes.ListComp,
+            ),
+        ):
+            break
+    return False
+
+
+def is_ancestor_name(frame: nodes.ClassDef, node: nodes.NodeNG) -> bool:
     """Return whether `frame` is an astroid.Class node with `node` in the
     subtree of its bases attribute.
     """
-    pass
+    if not isinstance(frame, nodes.ClassDef):
+        return False
+    return any(node in base.nodes_of_class(nodes.Name) for base in frame.bases)


-def is_being_called(node: nodes.NodeNG) ->bool:
+def is_being_called(node: nodes.NodeNG) -> bool:
     """Return True if node is the function being called in a Call node."""
-    pass
+    return isinstance(node.parent, nodes.Call) and node.parent.func is node


-def assign_parent(node: nodes.NodeNG) ->nodes.NodeNG:
+def assign_parent(node: nodes.NodeNG) -> nodes.NodeNG:
     """Return the higher parent which is not an AssignName, Tuple or List node."""
-    pass
+    while node and isinstance(node, (nodes.AssignName, nodes.Tuple, nodes.List)):
+        node = node.parent
+    return node


-def overrides_a_method(class_node: nodes.ClassDef, name: str) ->bool:
+def overrides_a_method(class_node: nodes.ClassDef, name: str) -> bool:
     """Return True if <name> is a method overridden from an ancestor
     which is not the base object class.
     """
-    pass
-
-
-def only_required_for_messages(*messages: str) ->Callable[[
-    AstCallbackMethod[_CheckerT, _NodeT]], AstCallbackMethod[_CheckerT, _NodeT]
-    ]:
+    for ancestor in class_node.ancestors():
+        if ancestor.name == "object":
+            continue
+        if name in ancestor and isinstance(ancestor[name], nodes.FunctionDef):
+            return True
+    return False
+
+
+def only_required_for_messages(
+    *messages: str,
+) -> Callable[
+    [AstCallbackMethod[_CheckerT, _NodeT]], AstCallbackMethod[_CheckerT, _NodeT]
+]:
     """Decorator to store messages that are handled by a checker method as an
     attribute of the function object.

@@ -190,7 +484,14 @@ def only_required_for_messages(*messages: str) ->Callable[[
     This decorator only has an effect on ``visit_*`` and ``leave_*`` methods
     of a class inheriting from ``BaseChecker``.
     """
-    pass
+
+    def store_messages(
+        func: AstCallbackMethod[_CheckerT, _NodeT]
+    ) -> AstCallbackMethod[_CheckerT, _NodeT]:
+        func.checks_msgs = messages  # type: ignore[attr-defined]
+        return func
+
+    return store_messages


 class IncompleteFormatString(Exception):
@@ -202,33 +503,133 @@ class UnsupportedFormatCharacter(Exception):
     format characters.
     """

-    def __init__(self, index: int) ->None:
+    def __init__(self, index: int) -> None:
         super().__init__(index)
         self.index = index


-def parse_format_string(format_string: str) ->tuple[set[str], int, dict[str,
-    str], list[str]]:
+def parse_format_string(
+    format_string: str,
+) -> tuple[set[str], int, dict[str, str], list[str]]:
     """Parses a format string, returning a tuple (keys, num_args).

     Where 'keys' is the set of mapping keys in the format string, and 'num_args' is the number
     of arguments required by the format string. Raises IncompleteFormatString or
     UnsupportedFormatCharacter if a parse error occurs.
     """
-    pass
-
-
-def collect_string_fields(format_string: str) ->Iterable[str | None]:
+    keys = set()
+    key_types = {}
+    pos_types = []
+    num_args = 0
+
+    def next_char(i: int) -> tuple[int, str]:
+        i += 1
+        if i == len(format_string):
+            raise IncompleteFormatString
+        return (i, format_string[i])
+
+    i = 0
+    while i < len(format_string):
+        char = format_string[i]
+        if char == "%":
+            i, char = next_char(i)
+            # Parse the mapping key (optional).
+            key = None
+            if char == "(":
+                depth = 1
+                i, char = next_char(i)
+                key_start = i
+                while depth != 0:
+                    if char == "(":
+                        depth += 1
+                    elif char == ")":
+                        depth -= 1
+                    i, char = next_char(i)
+                key_end = i - 1
+                key = format_string[key_start:key_end]
+
+            # Parse the conversion flags (optional).
+            while char in "#0- +":
+                i, char = next_char(i)
+            # Parse the minimum field width (optional).
+            if char == "*":
+                num_args += 1
+                i, char = next_char(i)
+            else:
+                while char in string.digits:
+                    i, char = next_char(i)
+            # Parse the precision (optional).
+            if char == ".":
+                i, char = next_char(i)
+                if char == "*":
+                    num_args += 1
+                    i, char = next_char(i)
+                else:
+                    while char in string.digits:
+                        i, char = next_char(i)
+            # Parse the length modifier (optional).
+            if char in "hlL":
+                i, char = next_char(i)
+            # Parse the conversion type (mandatory).
+            flags = "diouxXeEfFgGcrs%a"
+            if char not in flags:
+                raise UnsupportedFormatCharacter(i)
+            if key:
+                keys.add(key)
+                key_types[key] = char
+            elif char != "%":
+                num_args += 1
+                pos_types.append(char)
+        i += 1
+    return keys, num_args, key_types, pos_types
+
+
+def split_format_field_names(
+    format_string: str,
+) -> tuple[str, Iterable[tuple[bool, str]]]:
+    try:
+        return _string.formatter_field_name_split(format_string)  # type: ignore[no-any-return]
+    except ValueError as e:
+        raise IncompleteFormatString() from e
+
+
+def collect_string_fields(format_string: str) -> Iterable[str | None]:
     """Given a format string, return an iterator
     of all the valid format fields.

     It handles nested fields as well.
     """
-    pass
-
-
-def parse_format_method_string(format_string: str) ->tuple[list[tuple[str,
-    list[tuple[bool, str]]]], int, int]:
+    formatter = string.Formatter()
+    # pylint: disable = too-many-try-statements
+    try:
+        parseiterator = formatter.parse(format_string)
+        for result in parseiterator:
+            if all(item is None for item in result[1:]):
+                # not a replacement format
+                continue
+            name = result[1]
+            nested = result[2]
+            yield name
+            if nested:
+                yield from collect_string_fields(nested)
+    except ValueError as exc:
+        # Probably the format string is invalid.
+        if exc.args[0].startswith("cannot switch from manual"):
+            # On Jython, parsing a string with both manual
+            # and automatic positions will fail with a ValueError,
+            # while on CPython it will simply return the fields,
+            # the validation being done in the interpreter (?).
+            # We're just returning two mixed fields in order
+            # to trigger the format-combined-specification check.
+            yield ""
+            yield "1"
+            return
+        raise IncompleteFormatString(format_string) from exc
+
+
+def parse_format_method_string(
+    format_string: str,
+) -> tuple[list[tuple[str, list[tuple[bool, str]]]], int, int]:
     """Parses a PEP 3101 format string, returning a tuple of
     (keyword_arguments, implicit_pos_args_cnt, explicit_pos_args).

@@ -236,39 +637,79 @@ def parse_format_method_string(format_string: str) ->tuple[list[tuple[str,
     is the number of arguments required by the format string and
     explicit_pos_args is the number of arguments passed with the position.
     """
-    pass
-
-
-def is_attr_protected(attrname: str) ->bool:
+    keyword_arguments = []
+    implicit_pos_args_cnt = 0
+    explicit_pos_args = set()
+    for name in collect_string_fields(format_string):
+        if name and str(name).isdigit():
+            explicit_pos_args.add(str(name))
+        elif name:
+            keyname, fielditerator = split_format_field_names(name)
+            if isinstance(keyname, numbers.Number):
+                explicit_pos_args.add(str(keyname))
+            try:
+                keyword_arguments.append((keyname, list(fielditerator)))
+            except ValueError as e:
+                raise IncompleteFormatString() from e
+        else:
+            implicit_pos_args_cnt += 1
+    return keyword_arguments, implicit_pos_args_cnt, len(explicit_pos_args)
+
+
+def is_attr_protected(attrname: str) -> bool:
     """Return True if attribute name is protected (start with _ and some other
     details), False otherwise.
     """
-    pass
+    return (
+        attrname[0] == "_"
+        and attrname != "_"
+        and not (attrname.startswith("__") and attrname.endswith("__"))
+    )


-def node_frame_class(node: nodes.NodeNG) ->(nodes.ClassDef | None):
+def node_frame_class(node: nodes.NodeNG) -> nodes.ClassDef | None:
     """Return the class that is wrapping the given node.

     The function returns a class for a method node (or a staticmethod or a
     classmethod), otherwise it returns `None`.
     """
-    pass
-
-
-def get_outer_class(class_node: astroid.ClassDef) ->(astroid.ClassDef | None):
+    klass = node.frame()
+    nodes_to_check = (
+        nodes.NodeNG,
+        astroid.UnboundMethod,
+        astroid.BaseInstance,
+    )
+    while (
+        klass
+        and isinstance(klass, nodes_to_check)
+        and not isinstance(klass, nodes.ClassDef)
+    ):
+        if klass.parent is None:
+            return None
+
+        klass = klass.parent.frame()
+
+    return klass
+
+
+def get_outer_class(class_node: astroid.ClassDef) -> astroid.ClassDef | None:
     """Return the class that is the outer class of given (nested) class_node."""
-    pass
+    parent_klass = class_node.parent.frame()

+    return parent_klass if isinstance(parent_klass, astroid.ClassDef) else None

-def is_attr_private(attrname: str) ->(Match[str] | None):
+
+def is_attr_private(attrname: str) -> Match[str] | None:
     """Check that attribute name is private (at least two leading underscores,
     at most one trailing underscore).
     """
-    pass
+    regex = re.compile("^_{2,10}.*[^_]+_?$")
+    return regex.match(attrname)


-def get_argument_from_call(call_node: nodes.Call, position: (int | None)=
-    None, keyword: (str | None)=None) ->nodes.Name:
+def get_argument_from_call(
+    call_node: nodes.Call, position: int | None = None, keyword: str | None = None
+) -> nodes.Name:
     """Returns the specified argument from a function call.

     :param nodes.Call call_node: Node representing a function call to check.
@@ -281,11 +722,22 @@ def get_argument_from_call(call_node: nodes.Call, position: (int | None)=
     :raises NoSuchArgumentError: if no argument at the provided position or with
     the provided keyword.
     """
-    pass
-
-
-def infer_kwarg_from_call(call_node: nodes.Call, keyword: str) ->(nodes.
-    Name | None):
+    if position is None and keyword is None:
+        raise ValueError("Must specify at least one of: position or keyword.")
+    if position is not None:
+        try:
+            return call_node.args[position]
+        except IndexError:
+            pass
+    if keyword and call_node.keywords:
+        for arg in call_node.keywords:
+            if arg.arg == keyword:
+                return arg.value
+
+    raise NoSuchArgumentError
+
+
+def infer_kwarg_from_call(call_node: nodes.Call, keyword: str) -> nodes.Name | None:
     """Returns the specified argument from a function's kwargs.

     :param nodes.Call call_node: Node representing a function call to check.
@@ -294,18 +746,32 @@ def infer_kwarg_from_call(call_node: nodes.Call, keyword: str) ->(nodes.
     :returns: The node representing the argument, None if the argument is not found.
     :rtype: nodes.Name
     """
-    pass
+    for arg in call_node.kwargs:
+        inferred = safe_infer(arg.value)
+        if isinstance(inferred, nodes.Dict):
+            for item in inferred.items:
+                if item[0].value == keyword:
+                    return item[1]
+
+    return None


-def inherit_from_std_ex(node: (nodes.NodeNG | astroid.Instance)) ->bool:
+def inherit_from_std_ex(node: nodes.NodeNG | astroid.Instance) -> bool:
     """Return whether the given class node is subclass of
     exceptions.Exception.
     """
-    pass
-
-
-def error_of_type(handler: nodes.ExceptHandler, error_type: (str | type[
-    Exception] | tuple[str | type[Exception], ...])) ->bool:
+    ancestors = node.ancestors() if hasattr(node, "ancestors") else []
+    return any(
+        ancestor.name in {"Exception", "BaseException"}
+        and ancestor.root().name == EXCEPTIONS_MODULE
+        for ancestor in itertools.chain([node], ancestors)
+    )
+
+
+def error_of_type(
+    handler: nodes.ExceptHandler,
+    error_type: str | type[Exception] | tuple[str | type[Exception], ...],
+) -> bool:
     """Check if the given exception handler catches
     the given error_type.

@@ -315,49 +781,163 @@ def error_of_type(handler: nodes.ExceptHandler, error_type: (str | type[
     The function will return True if the handler catches any of the
     given errors.
     """
-    pass

+    def stringify_error(error: str | type[Exception]) -> str:
+        if not isinstance(error, str):
+            return error.__name__
+        return error

-def decorated_with_property(node: nodes.FunctionDef) ->bool:
-    """Detect if the given function node is decorated with a property."""
-    pass
+    if not isinstance(error_type, tuple):
+        error_type = (error_type,)
+    expected_errors = {stringify_error(error) for error in error_type}
+    if not handler.type:
+        return False
+    return handler.catch(expected_errors)  # type: ignore[no-any-return]


-def is_property_setter(node: nodes.NodeNG) ->bool:
+def decorated_with_property(node: nodes.FunctionDef) -> bool:
+    """Detect if the given function node is decorated with a property."""
+    if not node.decorators:
+        return False
+    for decorator in node.decorators.nodes:
+        try:
+            if _is_property_decorator(decorator):
+                return True
+        except astroid.InferenceError:
+            pass
+    return False
+
+
+def _is_property_kind(node: nodes.NodeNG, *kinds: str) -> bool:
+    if not isinstance(node, (astroid.UnboundMethod, nodes.FunctionDef)):
+        return False
+    if node.decorators:
+        for decorator in node.decorators.nodes:
+            if isinstance(decorator, nodes.Attribute) and decorator.attrname in kinds:
+                return True
+    return False
+
+
+def is_property_setter(node: nodes.NodeNG) -> bool:
     """Check if the given node is a property setter."""
-    pass
+    return _is_property_kind(node, "setter")


-def is_property_deleter(node: nodes.NodeNG) ->bool:
+def is_property_deleter(node: nodes.NodeNG) -> bool:
     """Check if the given node is a property deleter."""
-    pass
+    return _is_property_kind(node, "deleter")


-def is_property_setter_or_deleter(node: nodes.NodeNG) ->bool:
+def is_property_setter_or_deleter(node: nodes.NodeNG) -> bool:
     """Check if the given node is either a property setter or a deleter."""
-    pass
-
-
-def decorated_with(func: (nodes.ClassDef | nodes.FunctionDef | astroid.
-    BoundMethod | astroid.UnboundMethod), qnames: Iterable[str]) ->bool:
+    return _is_property_kind(node, "setter", "deleter")
+
+
+def _is_property_decorator(decorator: nodes.Name) -> bool:
+    for inferred in decorator.infer():
+        if isinstance(inferred, nodes.ClassDef):
+            if inferred.qname() in {"builtins.property", "functools.cached_property"}:
+                return True
+            for ancestor in inferred.ancestors():
+                if ancestor.name == "property" and ancestor.root().name == "builtins":
+                    return True
+        elif isinstance(inferred, nodes.FunctionDef):
+            # If decorator is function, check if it has exactly one return
+            # and the return is itself a function decorated with property
+            returns: list[nodes.Return] = list(
+                inferred._get_return_nodes_skip_functions()
+            )
+            if len(returns) == 1 and isinstance(
+                returns[0].value, (nodes.Name, nodes.Attribute)
+            ):
+                inferred = safe_infer(returns[0].value)
+                if (
+                    inferred
+                    and isinstance(inferred, astroid.objects.Property)
+                    and isinstance(inferred.function, nodes.FunctionDef)
+                ):
+                    return decorated_with_property(inferred.function)
+    return False
+
+
+def decorated_with(
+    func: (
+        nodes.ClassDef | nodes.FunctionDef | astroid.BoundMethod | astroid.UnboundMethod
+    ),
+    qnames: Iterable[str],
+) -> bool:
     """Determine if the `func` node has a decorator with the qualified name `qname`."""
-    pass
-
-
-def uninferable_final_decorators(node: nodes.Decorators) ->list[nodes.
-    Attribute | nodes.Name | None]:
+    decorators = func.decorators.nodes if func.decorators else []
+    for decorator_node in decorators:
+        if isinstance(decorator_node, nodes.Call):
+            # We only want to infer the function name
+            decorator_node = decorator_node.func
+        try:
+            if any(
+                i.name in qnames or i.qname() in qnames
+                for i in decorator_node.infer()
+                if i is not None and not isinstance(i, util.UninferableBase)
+            ):
+                return True
+        except astroid.InferenceError:
+            continue
+    return False
+
+
+def uninferable_final_decorators(
+    node: nodes.Decorators,
+) -> list[nodes.Attribute | nodes.Name | None]:
     """Return a list of uninferable `typing.final` decorators in `node`.

     This function is used to determine if the `typing.final` decorator is used
     with an unsupported Python version; the decorator cannot be inferred when
     using a Python version lower than 3.8.
     """
-    pass
+    decorators = []
+    for decorator in getattr(node, "nodes", []):
+        import_nodes: tuple[nodes.Import | nodes.ImportFrom] | None = None
+
+        # Get the `Import` node. The decorator is of the form: @module.name
+        if isinstance(decorator, nodes.Attribute):
+            inferred = safe_infer(decorator.expr)
+            if isinstance(inferred, nodes.Module) and inferred.qname() == "typing":
+                _, import_nodes = decorator.expr.lookup(decorator.expr.name)
+
+        # Get the `ImportFrom` node. The decorator is of the form: @name
+        elif isinstance(decorator, nodes.Name):
+            _, import_nodes = decorator.lookup(decorator.name)
+
+        # The `final` decorator is expected to be found in the
+        # import_nodes. Continue if we don't find any `Import` or `ImportFrom`
+        # nodes for this decorator.
+        if not import_nodes:
+            continue
+        import_node = import_nodes[0]
+
+        if not isinstance(import_node, (astroid.Import, astroid.ImportFrom)):
+            continue
+
+        import_names = dict(import_node.names)
+
+        # Check if the import is of the form: `from typing import final`
+        is_from_import = ("final" in import_names) and import_node.modname == "typing"
+
+        # Check if the import is of the form: `import typing`
+        is_import = ("typing" in import_names) and getattr(
+            decorator, "attrname", None
+        ) == "final"
+
+        if is_from_import or is_import:
+            inferred = safe_infer(decorator)
+            if inferred is None or isinstance(inferred, util.UninferableBase):
+                decorators.append(decorator)
+    return decorators


 @lru_cache(maxsize=1024)
-def unimplemented_abstract_methods(node: nodes.ClassDef, is_abstract_cb: (
-    nodes.FunctionDef | None)=None) ->dict[str, nodes.FunctionDef]:
+def unimplemented_abstract_methods(
+    node: nodes.ClassDef, is_abstract_cb: nodes.FunctionDef | None = None
+) -> dict[str, nodes.FunctionDef]:
     """Get the unimplemented abstract methods for the given *node*.

     A method can be considered abstract if the callback *is_abstract_cb*
@@ -366,28 +946,113 @@ def unimplemented_abstract_methods(node: nodes.ClassDef, is_abstract_cb: (
     It will return a dictionary of abstract method
     names and their inferred objects.
     """
-    pass
-
-
-def find_try_except_wrapper_node(node: nodes.NodeNG) ->(nodes.ExceptHandler |
-    nodes.Try | None):
+    if is_abstract_cb is None:
+        is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS)
+    visited: dict[str, nodes.FunctionDef] = {}
+    try:
+        mro = reversed(node.mro())
+    except astroid.ResolveError:
+        # Probably inconsistent hierarchy, don't try to figure this out here.
+        return {}
+    for ancestor in mro:
+        for obj in ancestor.values():
+            inferred = obj
+            if isinstance(obj, nodes.AssignName):
+                inferred = safe_infer(obj)
+                if not inferred:
+                    # Might be an abstract function,
+                    # but since we don't have enough information
+                    # in order to take this decision, we're taking
+                    # the *safe* decision instead.
+                    if obj.name in visited:
+                        del visited[obj.name]
+                    continue
+                if not isinstance(inferred, nodes.FunctionDef):
+                    if obj.name in visited:
+                        del visited[obj.name]
+            if isinstance(inferred, nodes.FunctionDef):
+                # It's critical to use the original name,
+                # since after inferring, an object can be something
+                # else than expected, as in the case of the
+                # following assignment.
+                #
+                # class A:
+                #     def keys(self): pass
+                #     __iter__ = keys
+                abstract = is_abstract_cb(inferred)
+                if abstract:
+                    visited[obj.name] = inferred
+                elif not abstract and obj.name in visited:
+                    del visited[obj.name]
+    return visited
+
+
+def find_try_except_wrapper_node(
+    node: nodes.NodeNG,
+) -> nodes.ExceptHandler | nodes.Try | None:
     """Return the ExceptHandler or the Try node in which the node is."""
-    pass
+    current = node
+    ignores = (nodes.ExceptHandler, nodes.Try)
+    while current and not isinstance(current.parent, ignores):
+        current = current.parent
+
+    if current and isinstance(current.parent, ignores):
+        return current.parent
+    return None


-def find_except_wrapper_node_in_scope(node: nodes.NodeNG) ->(nodes.
-    ExceptHandler | None):
+def find_except_wrapper_node_in_scope(
+    node: nodes.NodeNG,
+) -> nodes.ExceptHandler | None:
     """Return the ExceptHandler in which the node is, without going out of scope."""
-    pass
+    for current in node.node_ancestors():
+        if isinstance(current, astroid.scoped_nodes.LocalsDictNodeNG):
+            # If we're inside a function/class definition, we don't want to keep checking
+            # higher ancestors for `except` clauses, because if these exist, it means our
+            # function/class was defined in an `except` clause, rather than the current code
+            # actually running in an `except` clause.
+            return None
+        if isinstance(current, nodes.ExceptHandler):
+            return current
+    return None


-def is_from_fallback_block(node: nodes.NodeNG) ->bool:
+def is_from_fallback_block(node: nodes.NodeNG) -> bool:
     """Check if the given node is from a fallback import block."""
-    pass
-
-
-def get_exception_handlers(node: nodes.NodeNG, exception: (type[Exception] |
-    str)=Exception) ->(list[nodes.ExceptHandler] | None):
+    context = find_try_except_wrapper_node(node)
+    if not context:
+        return False
+
+    if isinstance(context, nodes.ExceptHandler):
+        other_body = context.parent.body
+        handlers = context.parent.handlers
+    else:
+        other_body = itertools.chain.from_iterable(
+            handler.body for handler in context.handlers
+        )
+        handlers = context.handlers
+
+    has_fallback_imports = any(
+        isinstance(import_node, (nodes.ImportFrom, nodes.Import))
+        for import_node in other_body
+    )
+    ignores_import_error = _except_handlers_ignores_exceptions(
+        handlers, (ImportError, ModuleNotFoundError)
+    )
+    return ignores_import_error or has_fallback_imports
+
+
+def _except_handlers_ignores_exceptions(
+    handlers: nodes.ExceptHandler,
+    exceptions: tuple[type[ImportError], type[ModuleNotFoundError]],
+) -> bool:
+    func = partial(error_of_type, error_type=exceptions)
+    return any(func(handler) for handler in handlers)
+
+
+def get_exception_handlers(
+    node: nodes.NodeNG, exception: type[Exception] | str = Exception
+) -> list[nodes.ExceptHandler] | None:
     """Return the collections of handlers handling the exception in arguments.

     Args:
@@ -397,22 +1062,46 @@ def get_exception_handlers(node: nodes.NodeNG, exception: (type[Exception] |
     Returns:
         list: the collection of handlers that are handling the exception or None.
     """
-    pass
+    context = find_try_except_wrapper_node(node)
+    if isinstance(context, nodes.Try):
+        return [
+            handler for handler in context.handlers if error_of_type(handler, exception)
+        ]
+    return []


-def get_contextlib_with_statements(node: nodes.NodeNG) ->Iterator[nodes.With]:
+def get_contextlib_with_statements(node: nodes.NodeNG) -> Iterator[nodes.With]:
     """Get all contextlib.with statements in the ancestors of the given node."""
-    pass
+    for with_node in node.node_ancestors():
+        if isinstance(with_node, nodes.With):
+            yield with_node


-def _suppresses_exception(call: nodes.Call, exception: (type[Exception] |
-    str)=Exception) ->bool:
+def _suppresses_exception(
+    call: nodes.Call, exception: type[Exception] | str = Exception
+) -> bool:
     """Check if the given node suppresses the given exception."""
-    pass
-
-
-def get_contextlib_suppressors(node: nodes.NodeNG, exception: (type[
-    Exception] | str)=Exception) ->Iterator[nodes.With]:
+    if not isinstance(exception, str):
+        exception = exception.__name__
+    for arg in call.args:
+        inferred = safe_infer(arg)
+        if isinstance(inferred, nodes.ClassDef):
+            if inferred.name == exception:
+                return True
+        elif isinstance(inferred, nodes.Tuple):
+            for elt in inferred.elts:
+                inferred_elt = safe_infer(elt)
+                if (
+                    isinstance(inferred_elt, nodes.ClassDef)
+                    and inferred_elt.name == exception
+                ):
+                    return True
+    return False
+
+
+def get_contextlib_suppressors(
+    node: nodes.NodeNG, exception: type[Exception] | str = Exception
+) -> Iterator[nodes.With]:
     """Return the contextlib suppressors handling the exception.

     Args:
@@ -422,10 +1111,19 @@ def get_contextlib_suppressors(node: nodes.NodeNG, exception: (type[
     Yields:
         nodes.With: A with node that is suppressing the exception.
     """
-    pass
-
-
-def is_node_inside_try_except(node: nodes.Raise) ->bool:
+    for with_node in get_contextlib_with_statements(node):
+        for item, _ in with_node.items:
+            if isinstance(item, nodes.Call):
+                inferred = safe_infer(item.func)
+                if (
+                    isinstance(inferred, nodes.ClassDef)
+                    and inferred.qname() == "contextlib.suppress"
+                ):
+                    if _suppresses_exception(item, exception):
+                        yield with_node
+
+
+def is_node_inside_try_except(node: nodes.Raise) -> bool:
     """Check if the node is directly under a Try/Except statement
     (but not under an ExceptHandler!).

@@ -435,31 +1133,221 @@ def is_node_inside_try_except(node: nodes.Raise) ->bool:
     Returns:
         bool: True if the node is inside a try/except statement, False otherwise.
     """
-    pass
+    context = find_try_except_wrapper_node(node)
+    return isinstance(context, nodes.Try)


-def node_ignores_exception(node: nodes.NodeNG, exception: (type[Exception] |
-    str)=Exception) ->bool:
+def node_ignores_exception(
+    node: nodes.NodeNG, exception: type[Exception] | str = Exception
+) -> bool:
     """Check if the node is in a Try which handles the given exception.

     If the exception is not given, the function is going to look for bare
     excepts.
     """
-    pass
+    managing_handlers = get_exception_handlers(node, exception)
+    if managing_handlers:
+        return True
+    return any(get_contextlib_suppressors(node, exception))


 @lru_cache(maxsize=1024)
-def class_is_abstract(node: nodes.ClassDef) ->bool:
+def class_is_abstract(node: nodes.ClassDef) -> bool:
     """Return true if the given class node should be considered as an abstract
     class.
     """
-    pass
+    # Protocol classes are considered "abstract"
+    if is_protocol_class(node):
+        return True
+
+    # Only check for explicit metaclass=ABCMeta on this specific class
+    meta = node.declared_metaclass()
+    if meta is not None:
+        if meta.name == "ABCMeta" and meta.root().name in ABC_MODULES:
+            return True
+
+    for ancestor in node.ancestors():
+        if ancestor.name == "ABC" and ancestor.root().name in ABC_MODULES:
+            # abc.ABC inheritance
+            return True
+
+    for method in node.methods():
+        if method.parent.frame() is node:
+            if method.is_abstract(pass_is_abstract=False):
+                return True
+    return False
+
+
+def _supports_protocol_method(value: nodes.NodeNG, attr: str) -> bool:
+    try:
+        attributes = value.getattr(attr)
+    except astroid.NotFoundError:
+        return False
+
+    first = attributes[0]
+
+    # Return False if a constant is assigned
+    if isinstance(first, nodes.AssignName):
+        this_assign_parent = get_node_first_ancestor_of_type(
+            first, (nodes.Assign, nodes.NamedExpr)
+        )
+        if this_assign_parent is None:  # pragma: no cover
+            # Cannot imagine this being None, but return True to avoid false positives
+            return True
+        if isinstance(this_assign_parent.value, nodes.BaseContainer):
+            if all(isinstance(n, nodes.Const) for n in this_assign_parent.value.elts):
+                return False
+        if isinstance(this_assign_parent.value, nodes.Const):
+            return False
+    return True
+
+
+def is_comprehension(node: nodes.NodeNG) -> bool:
+    comprehensions = (
+        nodes.ListComp,
+        nodes.SetComp,
+        nodes.DictComp,
+        nodes.GeneratorExp,
+    )
+    return isinstance(node, comprehensions)
+
+
+def _supports_mapping_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(
+        value, GETITEM_METHOD
+    ) and _supports_protocol_method(value, KEYS_METHOD)
+
+
+def _supports_membership_test_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, CONTAINS_METHOD)
+
+
+def _supports_iteration_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, ITER_METHOD) or _supports_protocol_method(
+        value, GETITEM_METHOD
+    )
+
+
+def _supports_async_iteration_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, AITER_METHOD)
+
+
+def _supports_getitem_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, GETITEM_METHOD)
+
+
+def _supports_setitem_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, SETITEM_METHOD)
+
+
+def _supports_delitem_protocol(value: nodes.NodeNG) -> bool:
+    return _supports_protocol_method(value, DELITEM_METHOD)
+
+
+def _is_abstract_class_name(name: str) -> bool:
+    lname = name.lower()
+    is_mixin = lname.endswith("mixin")
+    is_abstract = lname.startswith("abstract")
+    is_base = lname.startswith("base") or lname.endswith("base")
+    return is_mixin or is_abstract or is_base
+
+
+def is_inside_abstract_class(node: nodes.NodeNG) -> bool:
+    while node is not None:
+        if isinstance(node, nodes.ClassDef):
+            if class_is_abstract(node):
+                return True
+            name = getattr(node, "name", None)
+            if name is not None and _is_abstract_class_name(name):
+                return True
+        node = node.parent
+    return False
+
+
+def _supports_protocol(
+    value: nodes.NodeNG, protocol_callback: Callable[[nodes.NodeNG], bool]
+) -> bool:
+    if isinstance(value, nodes.ClassDef):
+        if not has_known_bases(value):
+            return True
+        # classobj can only be iterable if it has an iterable metaclass
+        meta = value.metaclass()
+        if meta is not None:
+            if protocol_callback(meta):
+                return True
+    if isinstance(value, astroid.BaseInstance):
+        if not has_known_bases(value):
+            return True
+        if value.has_dynamic_getattr():
+            return True
+        if protocol_callback(value):
+            return True
+
+    if isinstance(value, nodes.ComprehensionScope):
+        return True
+
+    if (
+        isinstance(value, astroid.bases.Proxy)
+        and isinstance(value._proxied, astroid.BaseInstance)
+        and has_known_bases(value._proxied)
+    ):
+        value = value._proxied
+        return protocol_callback(value)
+
+    return False
+
+
+def is_iterable(value: nodes.NodeNG, check_async: bool = False) -> bool:
+    if check_async:
+        protocol_check = _supports_async_iteration_protocol
+    else:
+        protocol_check = _supports_iteration_protocol
+    return _supports_protocol(value, protocol_check)
+
+
+def is_mapping(value: nodes.NodeNG) -> bool:
+    return _supports_protocol(value, _supports_mapping_protocol)
+
+
+def supports_membership_test(value: nodes.NodeNG) -> bool:
+    supported = _supports_protocol(value, _supports_membership_test_protocol)
+    return supported or is_iterable(value)
+
+
+def supports_getitem(value: nodes.NodeNG, node: nodes.NodeNG) -> bool:
+    if isinstance(value, nodes.ClassDef):
+        if _supports_protocol_method(value, CLASS_GETITEM_METHOD):
+            return True
+        if is_postponed_evaluation_enabled(node) and is_node_in_type_annotation_context(
+            node
+        ):
+            return True
+    return _supports_protocol(value, _supports_getitem_protocol)
+
+
+def supports_setitem(value: nodes.NodeNG, _: nodes.NodeNG) -> bool:
+    return _supports_protocol(value, _supports_setitem_protocol)
+
+
+def supports_delitem(value: nodes.NodeNG, _: nodes.NodeNG) -> bool:
+    return _supports_protocol(value, _supports_delitem_protocol)
+
+
+def _get_python_type_of_node(node: nodes.NodeNG) -> str | None:
+    pytype: Callable[[], str] | None = getattr(node, "pytype", None)
+    if callable(pytype):
+        return pytype()
+    return None


 @lru_cache(maxsize=1024)
-def safe_infer(node: nodes.NodeNG, context: (InferenceContext | None)=None,
-    *, compare_constants: bool=False, compare_constructors: bool=False) ->(
-    InferenceResult | None):
+def safe_infer(
+    node: nodes.NodeNG,
+    context: InferenceContext | None = None,
+    *,
+    compare_constants: bool = False,
+    compare_constructors: bool = False,
+) -> InferenceResult | None:
     """Return the inferred value for the given node.

     Return None if inference failed or if there is some ambiguity (more than
@@ -471,145 +1359,462 @@ def safe_infer(node: nodes.NodeNG, context: (InferenceContext | None)=None,
     If compare_constructors is True and if multiple classes are inferred,
     constructors with different signatures are held ambiguous and return None.
     """
-    pass
-
-
-def has_known_bases(klass: nodes.ClassDef, context: (InferenceContext |
-    None)=None) ->bool:
+    inferred_types: set[str | None] = set()
+    try:
+        infer_gen = node.infer(context=context)
+        value = next(infer_gen)
+    except astroid.InferenceError:
+        return None
+    except Exception as e:  # pragma: no cover
+        raise AstroidError from e
+
+    if not isinstance(value, util.UninferableBase):
+        inferred_types.add(_get_python_type_of_node(value))
+
+    # pylint: disable = too-many-try-statements
+    try:
+        for inferred in infer_gen:
+            inferred_type = _get_python_type_of_node(inferred)
+            if inferred_type not in inferred_types:
+                return None  # If there is ambiguity on the inferred node.
+            if (
+                compare_constants
+                and isinstance(inferred, nodes.Const)
+                and isinstance(value, nodes.Const)
+                and inferred.value != value.value
+            ):
+                return None
+            if (
+                isinstance(inferred, nodes.FunctionDef)
+                and isinstance(value, nodes.FunctionDef)
+                and function_arguments_are_ambiguous(inferred, value)
+            ):
+                return None
+            if (
+                compare_constructors
+                and isinstance(inferred, nodes.ClassDef)
+                and isinstance(value, nodes.ClassDef)
+                and class_constructors_are_ambiguous(inferred, value)
+            ):
+                return None
+    except astroid.InferenceError:
+        return None  # There is some kind of ambiguity
+    except StopIteration:
+        return value
+    except Exception as e:  # pragma: no cover
+        raise AstroidError from e
+    return value if len(inferred_types) <= 1 else None
+
+
+@lru_cache(maxsize=512)
+def infer_all(
+    node: nodes.NodeNG, context: InferenceContext | None = None
+) -> list[InferenceResult]:
+    try:
+        return list(node.infer(context=context))
+    except astroid.InferenceError:
+        return []
+    except Exception as e:  # pragma: no cover
+        raise AstroidError from e
+
+
+def function_arguments_are_ambiguous(
+    func1: nodes.FunctionDef, func2: nodes.FunctionDef
+) -> bool:
+    if func1.argnames() != func2.argnames():
+        return True
+    # Check ambiguity among function default values
+    pairs_of_defaults = [
+        (func1.args.defaults, func2.args.defaults),
+        (func1.args.kw_defaults, func2.args.kw_defaults),
+    ]
+    for zippable_default in pairs_of_defaults:
+        if None in zippable_default:
+            continue
+        if len(zippable_default[0]) != len(zippable_default[1]):
+            return True
+        for default1, default2 in zip(*zippable_default):
+            if isinstance(default1, nodes.Const) and isinstance(default2, nodes.Const):
+                if default1.value != default2.value:
+                    return True
+            elif isinstance(default1, nodes.Name) and isinstance(default2, nodes.Name):
+                if default1.name != default2.name:
+                    return True
+            else:
+                return True
+    return False
+
+
+def class_constructors_are_ambiguous(
+    class1: nodes.ClassDef, class2: nodes.ClassDef
+) -> bool:
+    try:
+        constructor1 = class1.local_attr("__init__")[0]
+        constructor2 = class2.local_attr("__init__")[0]
+    except astroid.NotFoundError:
+        return False
+    if not isinstance(constructor1, nodes.FunctionDef):
+        return False
+    if not isinstance(constructor2, nodes.FunctionDef):
+        return False
+    return function_arguments_are_ambiguous(constructor1, constructor2)
+
+
+def has_known_bases(
+    klass: nodes.ClassDef, context: InferenceContext | None = None
+) -> bool:
     """Return true if all base classes of a class could be inferred."""
-    pass
-
-
-def node_type(node: nodes.NodeNG) ->(SuccessfulInferenceResult | None):
+    try:
+        return klass._all_bases_known  # type: ignore[no-any-return]
+    except AttributeError:
+        pass
+    for base in klass.bases:
+        result = safe_infer(base, context=context)
+        if (
+            not isinstance(result, nodes.ClassDef)
+            or result is klass
+            or not has_known_bases(result, context=context)
+        ):
+            klass._all_bases_known = False
+            return False
+    klass._all_bases_known = True
+    return True
+
+
+def is_none(node: nodes.NodeNG) -> bool:
+    return (
+        node is None
+        or (isinstance(node, nodes.Const) and node.value is None)
+        or (isinstance(node, nodes.Name) and node.name == "None")
+    )
+
+
+def node_type(node: nodes.NodeNG) -> SuccessfulInferenceResult | None:
     """Return the inferred type for `node`.

     If there is more than one possible type, or if inferred type is Uninferable or None,
     return None
     """
-    pass
+    # check there is only one possible type for the assign node. Else we
+    # don't handle it for now
+    types: set[SuccessfulInferenceResult] = set()
+    try:
+        for var_type in node.infer():
+            if isinstance(var_type, util.UninferableBase) or is_none(var_type):
+                continue
+            types.add(var_type)
+            if len(types) > 1:
+                return None
+    except astroid.InferenceError:
+        return None
+    return types.pop() if types else None
+
+
+def is_registered_in_singledispatch_function(node: nodes.FunctionDef) -> bool:
+    """Check if the given function node is a singledispatch function."""
+    singledispatch_qnames = (
+        "functools.singledispatch",
+        "singledispatch.singledispatch",
+    )

+    if not isinstance(node, nodes.FunctionDef):
+        return False

-def is_registered_in_singledispatch_function(node: nodes.FunctionDef) ->bool:
-    """Check if the given function node is a singledispatch function."""
-    pass
+    decorators = node.decorators.nodes if node.decorators else []
+    for decorator in decorators:
+        # func.register are function calls or register attributes
+        # when the function is annotated with types
+        if isinstance(decorator, nodes.Call):
+            func = decorator.func
+        elif isinstance(decorator, nodes.Attribute):
+            func = decorator
+        else:
+            continue

+        if not isinstance(func, nodes.Attribute) or func.attrname != "register":
+            continue

-def is_registered_in_singledispatchmethod_function(node: nodes.FunctionDef
-    ) ->bool:
+        try:
+            func_def = next(func.expr.infer())
+        except astroid.InferenceError:
+            continue
+
+        if isinstance(func_def, nodes.FunctionDef):
+            return decorated_with(func_def, singledispatch_qnames)
+
+    return False
+
+
+def find_inferred_fn_from_register(node: nodes.NodeNG) -> nodes.FunctionDef | None:
+    # func.register are function calls or register attributes
+    # when the function is annotated with types
+    if isinstance(node, nodes.Call):
+        func = node.func
+    elif isinstance(node, nodes.Attribute):
+        func = node
+    else:
+        return None
+
+    if not isinstance(func, nodes.Attribute) or func.attrname != "register":
+        return None
+
+    func_def = safe_infer(func.expr)
+    if not isinstance(func_def, nodes.FunctionDef):
+        return None
+
+    return func_def
+
+
+def is_registered_in_singledispatchmethod_function(node: nodes.FunctionDef) -> bool:
     """Check if the given function node is a singledispatchmethod function."""
-    pass
+    singledispatchmethod_qnames = (
+        "functools.singledispatchmethod",
+        "singledispatch.singledispatchmethod",
+    )
+
+    decorators = node.decorators.nodes if node.decorators else []
+    for decorator in decorators:
+        func_def = find_inferred_fn_from_register(decorator)
+        if func_def:
+            return decorated_with(func_def, singledispatchmethod_qnames)
+
+    return False


-def get_node_last_lineno(node: nodes.NodeNG) ->int:
+def get_node_last_lineno(node: nodes.NodeNG) -> int:
     """Get the last lineno of the given node.

     For a simple statement this will just be node.lineno,
     but for a node that has child statements (e.g. a method) this will be the lineno of the last
     child statement recursively.
     """
-    pass
-
-
-def is_postponed_evaluation_enabled(node: nodes.NodeNG) ->bool:
+    # 'finalbody' is always the last clause in a try statement, if present
+    if getattr(node, "finalbody", False):
+        return get_node_last_lineno(node.finalbody[-1])
+    # For if, while, and for statements 'orelse' is always the last clause.
+    # For try statements 'orelse' is the last in the absence of a 'finalbody'
+    if getattr(node, "orelse", False):
+        return get_node_last_lineno(node.orelse[-1])
+    # try statements have the 'handlers' last if there is no 'orelse' or 'finalbody'
+    if getattr(node, "handlers", False):
+        return get_node_last_lineno(node.handlers[-1])
+    # All compound statements have a 'body'
+    if getattr(node, "body", False):
+        return get_node_last_lineno(node.body[-1])
+    # Not a compound statement
+    return node.lineno  # type: ignore[no-any-return]
+
+
+def is_postponed_evaluation_enabled(node: nodes.NodeNG) -> bool:
     """Check if the postponed evaluation of annotations is enabled."""
-    pass
+    module = node.root()
+    return "annotations" in module.future_imports


-def is_node_in_type_annotation_context(node: nodes.NodeNG) ->bool:
+def is_node_in_type_annotation_context(node: nodes.NodeNG) -> bool:
     """Check if node is in type annotation context.

     Check for 'AnnAssign', function 'Arguments',
     or part of function return type annotation.
     """
-    pass
-
-
-def is_subclass_of(child: nodes.ClassDef, parent: nodes.ClassDef) ->bool:
+    # pylint: disable=too-many-boolean-expressions
+    current_node, parent_node = node, node.parent
+    while True:
+        if (
+            isinstance(parent_node, nodes.AnnAssign)
+            and parent_node.annotation == current_node
+            or isinstance(parent_node, nodes.Arguments)
+            and current_node
+            in (
+                *parent_node.annotations,
+                *parent_node.posonlyargs_annotations,
+                *parent_node.kwonlyargs_annotations,
+                parent_node.varargannotation,
+                parent_node.kwargannotation,
+            )
+            or isinstance(parent_node, nodes.FunctionDef)
+            and parent_node.returns == current_node
+        ):
+            return True
+        current_node, parent_node = parent_node, parent_node.parent
+        if isinstance(parent_node, nodes.Module):
+            return False
+
+
+def is_subclass_of(child: nodes.ClassDef, parent: nodes.ClassDef) -> bool:
     """Check if first node is a subclass of second node.

     :param child: Node to check for subclass.
     :param parent: Node to check for superclass.
     :returns: True if child is derived from parent. False otherwise.
     """
-    pass
+    if not all(isinstance(node, nodes.ClassDef) for node in (child, parent)):
+        return False
+
+    for ancestor in child.ancestors():
+        try:
+            if astroid.helpers.is_subtype(ancestor, parent):
+                return True
+        except astroid.exceptions._NonDeducibleTypeHierarchy:
+            continue
+    return False


 @lru_cache(maxsize=1024)
-def is_overload_stub(node: nodes.NodeNG) ->bool:
+def is_overload_stub(node: nodes.NodeNG) -> bool:
     """Check if a node is a function stub decorated with typing.overload.

     :param node: Node to check.
     :returns: True if node is an overload function stub. False otherwise.
     """
-    pass
+    decorators = getattr(node, "decorators", None)
+    return bool(decorators and decorated_with(node, ["typing.overload", "overload"]))


-def is_protocol_class(cls: nodes.NodeNG) ->bool:
+def is_protocol_class(cls: nodes.NodeNG) -> bool:
     """Check if the given node represents a protocol class.

     :param cls: The node to check
     :returns: True if the node is or inherits from typing.Protocol directly, false otherwise.
     """
-    pass
+    if not isinstance(cls, nodes.ClassDef):
+        return False

+    # Return if klass is protocol
+    if cls.qname() in TYPING_PROTOCOLS:
+        return True

-def is_call_of_name(node: nodes.NodeNG, name: str) ->bool:
+    for base in cls.bases:
+        try:
+            for inf_base in base.infer():
+                if inf_base.qname() in TYPING_PROTOCOLS:
+                    return True
+        except astroid.InferenceError:
+            continue
+    return False
+
+
+def is_call_of_name(node: nodes.NodeNG, name: str) -> bool:
     """Checks if node is a function call with the given name."""
-    pass
+    return (
+        isinstance(node, nodes.Call)
+        and isinstance(node.func, nodes.Name)
+        and node.func.name == name
+    )


-def is_test_condition(node: nodes.NodeNG, parent: (nodes.NodeNG | None)=None
-    ) ->bool:
+def is_test_condition(
+    node: nodes.NodeNG,
+    parent: nodes.NodeNG | None = None,
+) -> bool:
     """Returns true if the given node is being tested for truthiness."""
-    pass
+    parent = parent or node.parent
+    if isinstance(parent, (nodes.While, nodes.If, nodes.IfExp, nodes.Assert)):
+        return node is parent.test or parent.test.parent_of(node)
+    if isinstance(parent, nodes.Comprehension):
+        return node in parent.ifs
+    return is_call_of_name(parent, "bool") and parent.parent_of(node)


-def is_classdef_type(node: nodes.ClassDef) ->bool:
+def is_classdef_type(node: nodes.ClassDef) -> bool:
     """Test if ClassDef node is Type."""
-    pass
+    if node.name == "type":
+        return True
+    return any(isinstance(b, nodes.Name) and b.name == "type" for b in node.bases)


-def is_attribute_typed_annotation(node: (nodes.ClassDef | astroid.Instance),
-    attr_name: str) ->bool:
+def is_attribute_typed_annotation(
+    node: nodes.ClassDef | astroid.Instance, attr_name: str
+) -> bool:
     """Test if attribute is typed annotation in current node
     or any base nodes.
     """
-    pass
-
-
-def is_assign_name_annotated_with(node: nodes.AssignName, typing_name: str
-    ) ->bool:
+    attribute = node.locals.get(attr_name, [None])[0]
+    if (
+        attribute
+        and isinstance(attribute, nodes.AssignName)
+        and isinstance(attribute.parent, nodes.AnnAssign)
+    ):
+        return True
+    for base in node.bases:
+        inferred = safe_infer(base)
+        if (
+            inferred
+            and isinstance(inferred, nodes.ClassDef)
+            and is_attribute_typed_annotation(inferred, attr_name)
+        ):
+            return True
+    return False
+
+
+def is_enum(node: nodes.ClassDef) -> bool:
+    return node.name == "Enum" and node.root().name == "enum"  # type: ignore[no-any-return]
+
+
+def is_assign_name_annotated_with(node: nodes.AssignName, typing_name: str) -> bool:
     """Test if AssignName node has `typing_name` annotation.

     Especially useful to check for `typing._SpecialForm` instances
     like: `Union`, `Optional`, `Literal`, `ClassVar`, `Final`.
     """
-    pass
-
-
-def get_iterating_dictionary_name(node: (nodes.For | nodes.Comprehension)) ->(
-    str | None):
+    if not isinstance(node.parent, nodes.AnnAssign):
+        return False
+    annotation = node.parent.annotation
+    if isinstance(annotation, nodes.Subscript):
+        annotation = annotation.value
+    if (
+        isinstance(annotation, nodes.Name)
+        and annotation.name == typing_name
+        or isinstance(annotation, nodes.Attribute)
+        and annotation.attrname == typing_name
+    ):
+        return True
+    return False
+
+
+def get_iterating_dictionary_name(node: nodes.For | nodes.Comprehension) -> str | None:
     """Get the name of the dictionary which keys are being iterated over on
     a ``nodes.For`` or ``nodes.Comprehension`` node.

     If the iterating object is not either the keys method of a dictionary
     or a dictionary itself, this returns None.
     """
-    pass
+    # Is it a proper keys call?
+    if (
+        isinstance(node.iter, nodes.Call)
+        and isinstance(node.iter.func, nodes.Attribute)
+        and node.iter.func.attrname == "keys"
+    ):
+        inferred = safe_infer(node.iter.func)
+        if not isinstance(inferred, astroid.BoundMethod):
+            return None
+        return node.iter.as_string().rpartition(".keys")[0]  # type: ignore[no-any-return]
+
+    # Is it a dictionary?
+    if isinstance(node.iter, (nodes.Name, nodes.Attribute)):
+        inferred = safe_infer(node.iter)
+        if not isinstance(inferred, nodes.Dict):
+            return None
+        return node.iter.as_string()  # type: ignore[no-any-return]
+
+    return None


-def get_subscript_const_value(node: nodes.Subscript) ->nodes.Const:
+def get_subscript_const_value(node: nodes.Subscript) -> nodes.Const:
     """Returns the value 'subscript.slice' of a Subscript node.

     :param node: Subscript Node to extract value from
     :returns: Const Node containing subscript value
     :raises InferredTypeError: if the subscript node cannot be inferred as a Const
     """
-    pass
+    inferred = safe_infer(node.slice)
+    if not isinstance(inferred, nodes.Const):
+        raise InferredTypeError("Subscript.slice cannot be inferred as a nodes.Const")

+    return inferred

-def get_import_name(importnode: ImportNode, modname: (str | None)) ->(str |
-    None):
+
+def get_import_name(importnode: ImportNode, modname: str | None) -> str | None:
     """Get a prepared module name from the given import node.

     In the case of relative imports, this will return the
@@ -622,10 +1827,19 @@ def get_import_name(importnode: ImportNode, modname: (str | None)) ->(str |
     :returns: absolute qualified module name of the module
         used in import.
     """
-    pass
-
-
-def is_sys_guard(node: nodes.If) ->bool:
+    if isinstance(importnode, nodes.ImportFrom) and importnode.level:
+        root = importnode.root()
+        if isinstance(root, nodes.Module):
+            try:
+                return root.relative_to_absolute_name(  # type: ignore[no-any-return]
+                    modname, level=importnode.level
+                )
+            except TooManyLevelsError:
+                return modname
+    return modname
+
+
+def is_sys_guard(node: nodes.If) -> bool:
     """Return True if IF stmt is a sys.version_info guard.

     >>> import sys
@@ -634,110 +1848,303 @@ def is_sys_guard(node: nodes.If) ->bool:
     >>> else:
     >>>     from typing_extensions import Literal
     """
-    pass
-
-
-def is_reassigned_after_current(node: nodes.NodeNG, varname: str) ->bool:
+    if isinstance(node.test, nodes.Compare):
+        value = node.test.left
+        if isinstance(value, nodes.Subscript):
+            value = value.value
+        if (
+            isinstance(value, nodes.Attribute)
+            and value.as_string() == "sys.version_info"
+        ):
+            return True
+    elif isinstance(node.test, nodes.Attribute) and node.test.as_string() in {
+        "six.PY2",
+        "six.PY3",
+    }:
+        return True
+    return False
+
+
+def is_reassigned_after_current(node: nodes.NodeNG, varname: str) -> bool:
     """Check if the given variable name is reassigned in the same scope after the
     current node.
     """
-    pass
+    return any(
+        a.name == varname and a.lineno > node.lineno
+        for a in node.scope().nodes_of_class(
+            (nodes.AssignName, nodes.ClassDef, nodes.FunctionDef)
+        )
+    )


-def is_deleted_after_current(node: nodes.NodeNG, varname: str) ->bool:
+def is_deleted_after_current(node: nodes.NodeNG, varname: str) -> bool:
     """Check if the given variable name is deleted in the same scope after the current
     node.
     """
-    pass
+    return any(
+        getattr(target, "name", None) == varname and target.lineno > node.lineno
+        for del_node in node.scope().nodes_of_class(nodes.Delete)
+        for target in del_node.targets
+    )


-def is_function_body_ellipsis(node: nodes.FunctionDef) ->bool:
+def is_function_body_ellipsis(node: nodes.FunctionDef) -> bool:
     """Checks whether a function body only consists of a single Ellipsis."""
-    pass
+    return (
+        len(node.body) == 1
+        and isinstance(node.body[0], nodes.Expr)
+        and isinstance(node.body[0].value, nodes.Const)
+        and node.body[0].value.value == Ellipsis
+    )
+
+
+def is_base_container(node: nodes.NodeNG | None) -> bool:
+    return isinstance(node, nodes.BaseContainer) and not node.elts
+
+
+def is_empty_dict_literal(node: nodes.NodeNG | None) -> bool:
+    return isinstance(node, nodes.Dict) and not node.items
+

+def is_empty_str_literal(node: nodes.NodeNG | None) -> bool:
+    return (
+        isinstance(node, nodes.Const) and isinstance(node.value, str) and not node.value
+    )

-def returns_bool(node: nodes.NodeNG) ->bool:
+
+def returns_bool(node: nodes.NodeNG) -> bool:
     """Returns true if a node is a nodes.Return that returns a constant boolean."""
-    pass
+    return (
+        isinstance(node, nodes.Return)
+        and isinstance(node.value, nodes.Const)
+        and isinstance(node.value.value, bool)
+    )


-def assigned_bool(node: nodes.NodeNG) ->bool:
+def assigned_bool(node: nodes.NodeNG) -> bool:
     """Returns true if a node is a nodes.Assign that returns a constant boolean."""
-    pass
+    return (
+        isinstance(node, nodes.Assign)
+        and isinstance(node.value, nodes.Const)
+        and isinstance(node.value.value, bool)
+    )


-def get_node_first_ancestor_of_type(node: nodes.NodeNG, ancestor_type: (
-    type[_NodeT] | tuple[type[_NodeT], ...])) ->(_NodeT | None):
+def get_node_first_ancestor_of_type(
+    node: nodes.NodeNG, ancestor_type: type[_NodeT] | tuple[type[_NodeT], ...]
+) -> _NodeT | None:
     """Return the first parent node that is any of the provided types (or None)."""
-    pass
+    for ancestor in node.node_ancestors():
+        if isinstance(ancestor, ancestor_type):
+            return ancestor  # type: ignore[no-any-return]
+    return None


-def get_node_first_ancestor_of_type_and_its_child(node: nodes.NodeNG,
-    ancestor_type: (type[_NodeT] | tuple[type[_NodeT], ...])) ->(tuple[None,
-    None] | tuple[_NodeT, nodes.NodeNG]):
+def get_node_first_ancestor_of_type_and_its_child(
+    node: nodes.NodeNG, ancestor_type: type[_NodeT] | tuple[type[_NodeT], ...]
+) -> tuple[None, None] | tuple[_NodeT, nodes.NodeNG]:
     """Modified version of get_node_first_ancestor_of_type to also return the
     descendant visited directly before reaching the sought ancestor.

     Useful for extracting whether a statement is guarded by a try, except, or finally
     when searching for a Try ancestor.
     """
-    pass
+    child = node
+    for ancestor in node.node_ancestors():
+        if isinstance(ancestor, ancestor_type):
+            return (ancestor, child)
+        child = ancestor
+    return None, None


-def in_type_checking_block(node: nodes.NodeNG) ->bool:
+def in_type_checking_block(node: nodes.NodeNG) -> bool:
     """Check if a node is guarded by a TYPE_CHECKING guard."""
-    pass
-
-
-def is_typing_member(node: nodes.NodeNG, names_to_check: tuple[str, ...]
-    ) ->bool:
+    for ancestor in node.node_ancestors():
+        if not isinstance(ancestor, nodes.If):
+            continue
+        if isinstance(ancestor.test, nodes.Name):
+            if ancestor.test.name != "TYPE_CHECKING":
+                continue
+            lookup_result = ancestor.test.lookup(ancestor.test.name)[1]
+            if not lookup_result:
+                return False
+            maybe_import_from = lookup_result[0]
+            if (
+                isinstance(maybe_import_from, nodes.ImportFrom)
+                and maybe_import_from.modname == "typing"
+            ):
+                return True
+            inferred = safe_infer(ancestor.test)
+            if isinstance(inferred, nodes.Const) and inferred.value is False:
+                return True
+        elif isinstance(ancestor.test, nodes.Attribute):
+            if ancestor.test.attrname != "TYPE_CHECKING":
+                continue
+            inferred_module = safe_infer(ancestor.test.expr)
+            if (
+                isinstance(inferred_module, nodes.Module)
+                and inferred_module.name == "typing"
+            ):
+                return True
+
+    return False
+
+
+def is_typing_member(node: nodes.NodeNG, names_to_check: tuple[str, ...]) -> bool:
     """Check if `node` is a member of the `typing` module and has one of the names from
     `names_to_check`.
     """
-    pass
+    if isinstance(node, nodes.Name):
+        try:
+            import_from = node.lookup(node.name)[1][0]
+        except IndexError:
+            return False
+
+        if isinstance(import_from, nodes.ImportFrom):
+            return (
+                import_from.modname == "typing"
+                and import_from.real_name(node.name) in names_to_check
+            )
+    elif isinstance(node, nodes.Attribute):
+        inferred_module = safe_infer(node.expr)
+        return (
+            isinstance(inferred_module, nodes.Module)
+            and inferred_module.name == "typing"
+            and node.attrname in names_to_check
+        )
+    return False


 @lru_cache
-def in_for_else_branch(parent: nodes.NodeNG, stmt: Statement) ->bool:
+def in_for_else_branch(parent: nodes.NodeNG, stmt: Statement) -> bool:
     """Returns True if stmt is inside the else branch for a parent For stmt."""
-    pass
+    return isinstance(parent, nodes.For) and any(
+        else_stmt.parent_of(stmt) or else_stmt == stmt for else_stmt in parent.orelse
+    )


-def find_assigned_names_recursive(target: (nodes.AssignName | nodes.
-    BaseContainer)) ->Iterator[str]:
+def find_assigned_names_recursive(
+    target: nodes.AssignName | nodes.BaseContainer,
+) -> Iterator[str]:
     """Yield the names of assignment targets, accounting for nested ones."""
-    pass
+    if isinstance(target, nodes.AssignName):
+        if target.name is not None:
+            yield target.name
+    elif isinstance(target, nodes.BaseContainer):
+        for elt in target.elts:
+            yield from find_assigned_names_recursive(elt)


-def has_starred_node_recursive(node: (nodes.For | nodes.Comprehension |
-    nodes.Set)) ->Iterator[bool]:
+def has_starred_node_recursive(
+    node: nodes.For | nodes.Comprehension | nodes.Set,
+) -> Iterator[bool]:
     """Yield ``True`` if a Starred node is found recursively."""
-    pass
+    if isinstance(node, nodes.Starred):
+        yield True
+    elif isinstance(node, nodes.Set):
+        for elt in node.elts:
+            yield from has_starred_node_recursive(elt)
+    elif isinstance(node, (nodes.For, nodes.Comprehension)):
+        for elt in node.iter.elts:
+            yield from has_starred_node_recursive(elt)


-def is_hashable(node: nodes.NodeNG) ->bool:
+def is_hashable(node: nodes.NodeNG) -> bool:
     """Return whether any inferred value of `node` is hashable.

     When finding ambiguity, return True.
     """
-    pass
-
-
-def _is_target_name_in_binop_side(target: (nodes.AssignName | nodes.
-    AssignAttr), side: (nodes.NodeNG | None)) ->bool:
+    # pylint: disable = too-many-try-statements
+    try:
+        for inferred in node.infer():
+            if isinstance(inferred, (nodes.ClassDef, util.UninferableBase)):
+                return True
+            if not hasattr(inferred, "igetattr"):
+                return True
+            hash_fn = next(inferred.igetattr("__hash__"))
+            if hash_fn.parent is inferred:
+                return True
+            if getattr(hash_fn, "value", True) is not None:
+                return True
+        return False
+    except astroid.InferenceError:
+        return True
+
+
+def subscript_chain_is_equal(left: nodes.Subscript, right: nodes.Subscript) -> bool:
+    while isinstance(left, nodes.Subscript) and isinstance(right, nodes.Subscript):
+        try:
+            if (
+                get_subscript_const_value(left).value
+                != get_subscript_const_value(right).value
+            ):
+                return False
+
+            left = left.value
+            right = right.value
+        except InferredTypeError:
+            return False
+
+    return left.as_string() == right.as_string()  # type: ignore[no-any-return]
+
+
+def _is_target_name_in_binop_side(
+    target: nodes.AssignName | nodes.AssignAttr, side: nodes.NodeNG | None
+) -> bool:
     """Determine whether the target name-like node is referenced in the side node."""
-    pass
+    if isinstance(side, nodes.Name):
+        if isinstance(target, nodes.AssignName):
+            return target.name == side.name  # type: ignore[no-any-return]
+        return False
+    if isinstance(side, nodes.Attribute) and isinstance(target, nodes.AssignAttr):
+        return target.as_string() == side.as_string()  # type: ignore[no-any-return]
+    if isinstance(side, nodes.Subscript) and isinstance(target, nodes.Subscript):
+        return subscript_chain_is_equal(target, side)

+    return False

-def is_augmented_assign(node: nodes.Assign) ->tuple[bool, str]:
+
+def is_augmented_assign(node: nodes.Assign) -> tuple[bool, str]:
     """Determine if the node is assigning itself (with modifications) to itself.

     For example: x = 1 + x
     """
-    pass
-
-
-def _qualified_name_parts(qualified_module_name: str) ->list[str]:
+    if not isinstance(node.value, nodes.BinOp):
+        return False, ""
+
+    binop = node.value
+    target = node.targets[0]
+
+    if not isinstance(target, (nodes.AssignName, nodes.AssignAttr, nodes.Subscript)):
+        return False, ""
+
+    # We don't want to catch x = "1" + x or x = "%s" % x
+    if isinstance(binop.left, nodes.Const) and isinstance(
+        binop.left.value, (str, bytes)
+    ):
+        return False, ""
+
+    # This could probably be improved but for now we disregard all assignments from calls
+    if isinstance(binop.left, nodes.Call) or isinstance(binop.right, nodes.Call):
+        return False, ""
+
+    if _is_target_name_in_binop_side(target, binop.left):
+        return True, binop.op
+    if (
+        # Unless an operator is commutative, we should not raise (i.e. x = 3/x)
+        binop.op in COMMUTATIVE_OPERATORS
+        and _is_target_name_in_binop_side(target, binop.right)
+    ):
+        inferred_left = safe_infer(binop.left)
+        if isinstance(inferred_left, nodes.Const) and isinstance(
+            inferred_left.value, int
+        ):
+            return True, binop.op
+        return False, ""
+    return False, ""
+
+
+def _qualified_name_parts(qualified_module_name: str) -> list[str]:
     """Split the names of the given module into subparts.

     For example,
@@ -745,17 +2152,85 @@ def _qualified_name_parts(qualified_module_name: str) ->list[str]:
     returns
         ['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
     """
-    pass
+    names = qualified_module_name.split(".")
+    return [".".join(names[0 : i + 1]) for i in range(len(names))]


-def is_terminating_func(node: nodes.Call) ->bool:
+def is_module_ignored(
+    qualified_module_name: str, ignored_modules: Iterable[str]
+) -> bool:
+    ignored_modules = set(ignored_modules)
+    for current_module in _qualified_name_parts(qualified_module_name):
+        # Try to match the module name directly
+        if current_module in ignored_modules:
+            return True
+        for ignore in ignored_modules:
+            # Try to see if the ignores pattern match against the module name.
+            if fnmatch.fnmatch(current_module, ignore):
+                return True
+    return False
+
+
+def is_singleton_const(node: nodes.NodeNG) -> bool:
+    return isinstance(node, nodes.Const) and any(
+        node.value is value for value in SINGLETON_VALUES
+    )
+
+
+def is_terminating_func(node: nodes.Call) -> bool:
     """Detect call to exit(), quit(), os._exit(), sys.exit(), or
     functions annotated with `typing.NoReturn` or `typing.Never`.
     """
-    pass
-
-
-def get_inverse_comparator(op: str) ->str:
+    if (
+        not isinstance(node.func, nodes.Attribute)
+        and not (isinstance(node.func, nodes.Name))
+        or isinstance(node.parent, nodes.Lambda)
+    ):
+        return False
+
+    try:
+        for inferred in node.func.infer():
+            if (
+                hasattr(inferred, "qname")
+                and inferred.qname() in TERMINATING_FUNCS_QNAMES
+            ):
+                return True
+            # Unwrap to get the actual function node object
+            if isinstance(inferred, astroid.BoundMethod) and isinstance(
+                inferred._proxied, astroid.UnboundMethod
+            ):
+                inferred = inferred._proxied._proxied
+            if (
+                isinstance(inferred, nodes.FunctionDef)
+                and isinstance(inferred.returns, nodes.Name)
+                and (inferred_func := safe_infer(inferred.returns))
+                and hasattr(inferred_func, "qname")
+                and inferred_func.qname()
+                in (
+                    *TYPING_NEVER,
+                    *TYPING_NORETURN,
+                    # In Python 3.7 - 3.8, NoReturn is alias of '_SpecialForm'
+                    # "typing._SpecialForm",
+                    # But 'typing.Any' also inherits _SpecialForm
+                    # See #9751
+                )
+            ):
+                return True
+    except (StopIteration, astroid.InferenceError):
+        pass
+
+    return False
+
+
+def is_class_attr(name: str, klass: nodes.ClassDef) -> bool:
+    try:
+        klass.getattr(name)
+        return True
+    except astroid.NotFoundError:
+        return False
+
+
+def get_inverse_comparator(op: str) -> str:
     """Returns the inverse comparator given a comparator.

     E.g. when given "==", returns "!="
@@ -765,23 +2240,101 @@ def get_inverse_comparator(op: str) ->str:
     :returns: The inverse of the comparator in string format
     :raises KeyError: if input is not recognized as a comparator
     """
-    pass
+    return {
+        "==": "!=",
+        "!=": "==",
+        "<": ">=",
+        ">": "<=",
+        "<=": ">",
+        ">=": "<",
+        "in": "not in",
+        "not in": "in",
+        "is": "is not",
+        "is not": "is",
+    }[op]
+
+
+def not_condition_as_string(
+    test_node: nodes.Compare | nodes.Name | nodes.UnaryOp | nodes.BoolOp | nodes.BinOp,
+) -> str:
+    msg = f"not {test_node.as_string()}"
+    if isinstance(test_node, nodes.UnaryOp):
+        msg = test_node.operand.as_string()
+    elif isinstance(test_node, nodes.BoolOp):
+        msg = f"not ({test_node.as_string()})"
+    elif isinstance(test_node, nodes.Compare):
+        lhs = test_node.left
+        ops, rhs = test_node.ops[0]
+        lower_priority_expressions = (
+            nodes.Lambda,
+            nodes.UnaryOp,
+            nodes.BoolOp,
+            nodes.IfExp,
+            nodes.NamedExpr,
+        )
+        lhs = (
+            f"({lhs.as_string()})"
+            if isinstance(lhs, lower_priority_expressions)
+            else lhs.as_string()
+        )
+        rhs = (
+            f"({rhs.as_string()})"
+            if isinstance(rhs, lower_priority_expressions)
+            else rhs.as_string()
+        )
+        msg = f"{lhs} {get_inverse_comparator(ops)} {rhs}"
+    return msg


 @lru_cache(maxsize=1000)
-def overridden_method(klass: nodes.LocalsDictNodeNG, name: (str | None)) ->(
-    nodes.FunctionDef | None):
+def overridden_method(
+    klass: nodes.LocalsDictNodeNG, name: str | None
+) -> nodes.FunctionDef | None:
     """Get overridden method if any."""
-    pass
-
-
-def clear_lru_caches() ->None:
+    try:
+        parent = next(klass.local_attr_ancestors(name))
+    except (StopIteration, KeyError):
+        return None
+    try:
+        meth_node = parent[name]
+    except KeyError:  # pragma: no cover
+        # We have found an ancestor defining <name> but it's not in the local
+        # dictionary. This may happen with astroid built from living objects.
+        return None
+    if isinstance(meth_node, nodes.FunctionDef):
+        return meth_node
+    return None  # pragma: no cover
+
+
+def clear_lru_caches() -> None:
     """Clear caches holding references to AST nodes."""
-    pass
-
-
-def is_enum_member(node: nodes.AssignName) ->bool:
+    caches_holding_node_references: list[_lru_cache_wrapper[Any]] = [
+        class_is_abstract,
+        in_for_else_branch,
+        infer_all,
+        is_overload_stub,
+        overridden_method,
+        unimplemented_abstract_methods,
+        safe_infer,
+    ]
+    for lru in caches_holding_node_references:
+        lru.cache_clear()
+
+
+def is_enum_member(node: nodes.AssignName) -> bool:
     """Return `True` if `node` is an Enum member (is an item of the
     `__members__` container).
     """
-    pass
+    frame = node.frame()
+    if (
+        not isinstance(frame, nodes.ClassDef)
+        or not frame.is_subtype_of("enum.Enum")
+        or frame.root().qname() == "enum"
+    ):
+        return False
+
+    members = frame.locals.get("__members__")
+    # A dataclass is one known case for when `members` can be `None`
+    if members is None:
+        return False
+    return node.name in [name_obj.name for value, name_obj in members[0].items]
diff --git a/pylint/checkers/variables.py b/pylint/checkers/variables.py
index 822aebe55..495051f31 100644
--- a/pylint/checkers/variables.py
+++ b/pylint/checkers/variables.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Variables checkers for Python code."""
+
 from __future__ import annotations
+
 import collections
 import copy
 import itertools
@@ -11,39 +17,112 @@ from collections.abc import Generator, Iterable, Iterator
 from enum import Enum
 from functools import cached_property
 from typing import TYPE_CHECKING, Any, NamedTuple
+
 import astroid
 import astroid.exceptions
 from astroid import bases, extract_node, nodes, util
 from astroid.nodes import _base_nodes
 from astroid.typing import InferenceResult
+
 from pylint.checkers import BaseChecker, utils
-from pylint.checkers.utils import in_type_checking_block, is_module_ignored, is_postponed_evaluation_enabled, is_sys_guard, overridden_method
+from pylint.checkers.utils import (
+    in_type_checking_block,
+    is_module_ignored,
+    is_postponed_evaluation_enabled,
+    is_sys_guard,
+    overridden_method,
+)
 from pylint.constants import PY39_PLUS, TYPING_NEVER, TYPING_NORETURN
 from pylint.interfaces import CONTROL_FLOW, HIGH, INFERENCE, INFERENCE_FAILURE
 from pylint.typing import MessageDefinitionTuple
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-SPECIAL_OBJ = re.compile('^_{2}[a-z]+_{2}$')
-FUTURE = '__future__'
-IGNORED_ARGUMENT_NAMES = re.compile('_.*|^ignored_|^unused_')
-METACLASS_NAME_TRANSFORMS = {'_py_abc': 'abc'}
-BUILTIN_RANGE = 'builtins.range'
-TYPING_MODULE = 'typing'
-TYPING_NAMES = frozenset({'Any', 'Callable', 'ClassVar', 'Generic',
-    'Optional', 'Tuple', 'Type', 'TypeVar', 'Union', 'AbstractSet',
-    'ByteString', 'Container', 'ContextManager', 'Hashable', 'ItemsView',
-    'Iterable', 'Iterator', 'KeysView', 'Mapping', 'MappingView',
-    'MutableMapping', 'MutableSequence', 'MutableSet', 'Sequence', 'Sized',
-    'ValuesView', 'Awaitable', 'AsyncIterator', 'AsyncIterable',
-    'Coroutine', 'Collection', 'AsyncGenerator', 'AsyncContextManager',
-    'Reversible', 'SupportsAbs', 'SupportsBytes', 'SupportsComplex',
-    'SupportsFloat', 'SupportsInt', 'SupportsRound', 'Counter', 'Deque',
-    'Dict', 'DefaultDict', 'List', 'Set', 'FrozenSet', 'NamedTuple',
-    'Generator', 'AnyStr', 'Text', 'Pattern', 'BinaryIO'})
-DICT_TYPES = (astroid.objects.DictValues, astroid.objects.DictKeys, astroid
-    .objects.DictItems, astroid.nodes.node_classes.Dict)
-NODES_WITH_VALUE_ATTR = (nodes.Assign, nodes.AnnAssign, nodes.AugAssign,
-    nodes.Expr, nodes.Return, nodes.Match, nodes.TypeAlias)
+
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+FUTURE = "__future__"
+# regexp for ignored argument name
+IGNORED_ARGUMENT_NAMES = re.compile("_.*|^ignored_|^unused_")
+# In Python 3.7 abc has a Python implementation which is preferred
+# by astroid. Unfortunately this also messes up our explicit checks
+# for `abc`
+METACLASS_NAME_TRANSFORMS = {"_py_abc": "abc"}
+BUILTIN_RANGE = "builtins.range"
+TYPING_MODULE = "typing"
+TYPING_NAMES = frozenset(
+    {
+        "Any",
+        "Callable",
+        "ClassVar",
+        "Generic",
+        "Optional",
+        "Tuple",
+        "Type",
+        "TypeVar",
+        "Union",
+        "AbstractSet",
+        "ByteString",
+        "Container",
+        "ContextManager",
+        "Hashable",
+        "ItemsView",
+        "Iterable",
+        "Iterator",
+        "KeysView",
+        "Mapping",
+        "MappingView",
+        "MutableMapping",
+        "MutableSequence",
+        "MutableSet",
+        "Sequence",
+        "Sized",
+        "ValuesView",
+        "Awaitable",
+        "AsyncIterator",
+        "AsyncIterable",
+        "Coroutine",
+        "Collection",
+        "AsyncGenerator",
+        "AsyncContextManager",
+        "Reversible",
+        "SupportsAbs",
+        "SupportsBytes",
+        "SupportsComplex",
+        "SupportsFloat",
+        "SupportsInt",
+        "SupportsRound",
+        "Counter",
+        "Deque",
+        "Dict",
+        "DefaultDict",
+        "List",
+        "Set",
+        "FrozenSet",
+        "NamedTuple",
+        "Generator",
+        "AnyStr",
+        "Text",
+        "Pattern",
+        "BinaryIO",
+    }
+)
+
+DICT_TYPES = (
+    astroid.objects.DictValues,
+    astroid.objects.DictKeys,
+    astroid.objects.DictItems,
+    astroid.nodes.node_classes.Dict,
+)
+
+NODES_WITH_VALUE_ATTR = (
+    nodes.Assign,
+    nodes.AnnAssign,
+    nodes.AugAssign,
+    nodes.Expr,
+    nodes.Return,
+    nodes.Match,
+    nodes.TypeAlias,
+)


 class VariableVisitConsumerAction(Enum):
@@ -53,25 +132,50 @@ class VariableVisitConsumerAction(Enum):
     Continue -> continue loop to next consumer
     Return -> return and thereby break the loop
     """
+
     CONTINUE = 0
     RETURN = 1


-def _is_from_future_import(stmt: nodes.ImportFrom, name: str) ->(bool | None):
+def _is_from_future_import(stmt: nodes.ImportFrom, name: str) -> bool | None:
     """Check if the name is a future import from another module."""
-    pass
+    try:
+        module = stmt.do_import_module(stmt.modname)
+    except astroid.AstroidBuildingError:
+        return None

+    for local_node in module.locals.get(name, []):
+        if isinstance(local_node, nodes.ImportFrom) and local_node.modname == FUTURE:
+            return True
+    return None

-def _get_unpacking_extra_info(node: nodes.Assign, inferred: InferenceResult
-    ) ->str:
+
+def _get_unpacking_extra_info(node: nodes.Assign, inferred: InferenceResult) -> str:
     """Return extra information to add to the message for unpacking-non-sequence
     and unbalanced-tuple/dict-unpacking errors.
     """
-    pass
-
-
-def _detect_global_scope(node: nodes.Name, frame: nodes.LocalsDictNodeNG,
-    defframe: nodes.LocalsDictNodeNG) ->bool:
+    more = ""
+    if isinstance(inferred, DICT_TYPES):
+        if isinstance(node, nodes.Assign):
+            more = node.value.as_string()
+        elif isinstance(node, nodes.For):
+            more = node.iter.as_string()
+        return more
+
+    inferred_module = inferred.root().name
+    if node.root().name == inferred_module:
+        if node.lineno == inferred.lineno:
+            more = f"'{inferred.as_string()}'"
+        elif inferred.lineno:
+            more = f"defined at line {inferred.lineno}"
+    elif inferred.lineno:
+        more = f"defined at line {inferred.lineno} of {inferred_module}"
+    return more
+
+
+def _detect_global_scope(
+    node: nodes.Name, frame: nodes.LocalsDictNodeNG, defframe: nodes.LocalsDictNodeNG
+) -> bool:
     """Detect that the given frames share a global scope.

     Two frames share a global scope when neither
@@ -94,11 +198,68 @@ def _detect_global_scope(node: nodes.Name, frame: nodes.LocalsDictNodeNG,
                 class B(C): ...
         class C: ...
     """
-    pass
-
-
-def _fix_dot_imports(not_consumed: dict[str, list[nodes.NodeNG]]) ->list[tuple
-    [str, _base_nodes.ImportNode]]:
+    def_scope = scope = None
+    if frame and frame.parent:
+        scope = frame.parent.scope()
+    if defframe and defframe.parent:
+        def_scope = defframe.parent.scope()
+    if (
+        isinstance(frame, nodes.ClassDef)
+        and scope is not def_scope
+        and scope is utils.get_node_first_ancestor_of_type(node, nodes.FunctionDef)
+    ):
+        # If the current node's scope is a class nested under a function,
+        # and the def_scope is something else, then they aren't shared.
+        return False
+    if isinstance(frame, nodes.FunctionDef):
+        # If the parent of the current node is a
+        # function, then it can be under its scope (defined in); or
+        # the `->` part of annotations. The same goes
+        # for annotations of function arguments, they'll have
+        # their parent the Arguments node.
+        if frame.parent_of(defframe):
+            return node.lineno < defframe.lineno  # type: ignore[no-any-return]
+        if not isinstance(node.parent, (nodes.FunctionDef, nodes.Arguments)):
+            return False
+
+    break_scopes = []
+    for current_scope in (scope or frame, def_scope):
+        # Look for parent scopes. If there is anything different
+        # than a module or a class scope, then the frames don't
+        # share a global scope.
+        parent_scope = current_scope
+        while parent_scope:
+            if not isinstance(parent_scope, (nodes.ClassDef, nodes.Module)):
+                break_scopes.append(parent_scope)
+                break
+            if parent_scope.parent:
+                parent_scope = parent_scope.parent.scope()
+            else:
+                break
+    if len(set(break_scopes)) > 1:
+        # Store different scopes than expected.
+        # If the stored scopes are, in fact, the very same, then it means
+        # that the two frames (frame and defframe) share the same scope,
+        # and we could apply our lineno analysis over them.
+        # For instance, this works when they are inside a function, the node
+        # that uses a definition and the definition itself.
+        return False
+    # At this point, we are certain that frame and defframe share a scope
+    # and the definition of the first depends on the second.
+    return frame.lineno < defframe.lineno  # type: ignore[no-any-return]
+
+
+def _infer_name_module(
+    node: nodes.Import, name: str
+) -> Generator[InferenceResult, None, None]:
+    context = astroid.context.InferenceContext()
+    context.lookupname = name
+    return node.infer(context, asname=False)  # type: ignore[no-any-return]
+
+
+def _fix_dot_imports(
+    not_consumed: dict[str, list[nodes.NodeNG]]
+) -> list[tuple[str, _base_nodes.ImportNode]]:
     """Try to fix imports with multiple dots, by returning a dictionary
     with the import names expanded.

@@ -106,94 +267,268 @@ def _fix_dot_imports(not_consumed: dict[str, list[nodes.NodeNG]]) ->list[tuple
     like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
     and 'xml.sax' respectively.
     """
-    pass
-
-
-def _find_frame_imports(name: str, frame: nodes.LocalsDictNodeNG) ->bool:
+    names: dict[str, _base_nodes.ImportNode] = {}
+    for name, stmts in not_consumed.items():
+        if any(
+            isinstance(stmt, nodes.AssignName)
+            and isinstance(stmt.assign_type(), nodes.AugAssign)
+            for stmt in stmts
+        ):
+            continue
+        for stmt in stmts:
+            if not isinstance(stmt, (nodes.ImportFrom, nodes.Import)):
+                continue
+            for imports in stmt.names:
+                second_name = None
+                import_module_name = imports[0]
+                if import_module_name == "*":
+                    # In case of wildcard imports,
+                    # pick the name from inside the imported module.
+                    second_name = name
+                else:
+                    name_matches_dotted_import = False
+                    if (
+                        import_module_name.startswith(name)
+                        and import_module_name.find(".") > -1
+                    ):
+                        name_matches_dotted_import = True
+
+                    if name_matches_dotted_import or name in imports:
+                        # Most likely something like 'xml.etree',
+                        # which will appear in the .locals as 'xml'.
+                        # Only pick the name if it wasn't consumed.
+                        second_name = import_module_name
+                if second_name and second_name not in names:
+                    names[second_name] = stmt
+    return sorted(names.items(), key=lambda a: a[1].fromlineno)
+
+
+def _find_frame_imports(name: str, frame: nodes.LocalsDictNodeNG) -> bool:
     """Detect imports in the frame, with the required *name*.

     Such imports can be considered assignments if they are not globals.
     Returns True if an import for the given name was found.
     """
-    pass
-
-
-def _assigned_locally(name_node: nodes.Name) ->bool:
+    if name in _flattened_scope_names(frame.nodes_of_class(nodes.Global)):
+        return False
+
+    imports = frame.nodes_of_class((nodes.Import, nodes.ImportFrom))
+    for import_node in imports:
+        for import_name, import_alias in import_node.names:
+            # If the import uses an alias, check only that.
+            # Otherwise, check only the import name.
+            if import_alias:
+                if import_alias == name:
+                    return True
+            elif import_name and import_name == name:
+                return True
+    return False
+
+
+def _import_name_is_global(
+    stmt: nodes.Global | _base_nodes.ImportNode, global_names: set[str]
+) -> bool:
+    for import_name, import_alias in stmt.names:
+        # If the import uses an alias, check only that.
+        # Otherwise, check only the import name.
+        if import_alias:
+            if import_alias in global_names:
+                return True
+        elif import_name in global_names:
+            return True
+    return False
+
+
+def _flattened_scope_names(
+    iterator: Iterator[nodes.Global | nodes.Nonlocal],
+) -> set[str]:
+    values = (set(stmt.names) for stmt in iterator)
+    return set(itertools.chain.from_iterable(values))
+
+
+def _assigned_locally(name_node: nodes.Name) -> bool:
     """Checks if name_node has corresponding assign statement in same scope."""
-    pass
-
-
-MSGS: dict[str, MessageDefinitionTuple] = {'E0601': (
-    'Using variable %r before assignment', 'used-before-assignment',
-    'Emitted when a local variable is accessed before its assignment took place. Assignments in try blocks are assumed not to have occurred when evaluating associated except/finally blocks. Assignments in except blocks are assumed not to have occurred when evaluating statements outside the block, except when the associated try block contains a return statement.'
-    ), 'E0602': ('Undefined variable %r', 'undefined-variable',
-    'Used when an undefined variable is accessed.'), 'E0603': (
-    'Undefined variable name %r in __all__', 'undefined-all-variable',
-    'Used when an undefined variable name is referenced in __all__.'),
-    'E0604': ('Invalid object %r in __all__, must contain only strings',
-    'invalid-all-object',
-    'Used when an invalid (non-string) object occurs in __all__.'), 'E0605':
-    ('Invalid format for __all__, must be tuple or list',
-    'invalid-all-format', 'Used when __all__ has an invalid format.'),
-    'E0606': ('Possibly using variable %r before assignment',
-    'possibly-used-before-assignment',
-    'Emitted when a local variable is accessed before its assignment took place in both branches of an if/else switch.'
-    ), 'E0611': ('No name %r in module %r', 'no-name-in-module',
-    'Used when a name cannot be found in a module.'), 'W0601': (
-    'Global variable %r undefined at the module level',
-    'global-variable-undefined',
-    'Used when a variable is defined through the "global" statement but the variable is not defined in the module scope.'
-    ), 'W0602': ('Using global for %r but no assignment is done',
-    'global-variable-not-assigned',
-    "When a variable defined in the global scope is modified in an inner scope, the 'global' keyword is required in the inner scope only if there is an assignment operation done in the inner scope."
-    ), 'W0603': ('Using the global statement', 'global-statement',
-    'Used when you use the "global" statement to update a global variable. Pylint discourages its usage. That doesn\'t mean you cannot use it!'
-    ), 'W0604': ('Using the global statement at the module level',
-    'global-at-module-level',
-    'Used when you use the "global" statement at the module level since it has no effect.'
-    ), 'W0611': ('Unused %s', 'unused-import',
-    'Used when an imported module or variable is not used.'), 'W0612': (
-    'Unused variable %r', 'unused-variable',
-    'Used when a variable is defined but not used.'), 'W0613': (
-    'Unused argument %r', 'unused-argument',
-    'Used when a function or method argument is not used.'), 'W0614': (
-    'Unused import(s) %s from wildcard import of %s',
-    'unused-wildcard-import',
-    "Used when an imported module or variable is not used from a `'from X import *'` style import."
-    ), 'W0621': ('Redefining name %r from outer scope (line %s)',
-    'redefined-outer-name',
-    "Used when a variable's name hides a name defined in an outer scope or except handler."
-    ), 'W0622': ('Redefining built-in %r', 'redefined-builtin',
-    'Used when a variable or function override a built-in.'), 'W0631': (
-    'Using possibly undefined loop variable %r', 'undefined-loop-variable',
-    'Used when a loop variable (i.e. defined by a for loop or a list comprehension or a generator expression) is used outside the loop.'
-    ), 'W0632': (
-    'Possible unbalanced tuple unpacking with sequence %s: left side has %d label%s, right side has %d value%s'
-    , 'unbalanced-tuple-unpacking',
-    'Used when there is an unbalanced tuple unpacking in assignment', {
-    'old_names': [('E0632', 'old-unbalanced-tuple-unpacking')]}), 'E0633':
-    ('Attempting to unpack a non-sequence%s', 'unpacking-non-sequence',
-    'Used when something which is not a sequence is used in an unpack assignment'
-    , {'old_names': [('W0633', 'old-unpacking-non-sequence')]}), 'W0640': (
-    'Cell variable %s defined in loop', 'cell-var-from-loop',
-    'A variable used in a closure is defined in a loop. This will result in all closures using the same value for the closed-over variable.'
-    ), 'W0641': ('Possibly unused variable %r', 'possibly-unused-variable',
-    'Used when a variable is defined but might not be used. The possibility comes from the fact that locals() might be used, which could consume or not the said variable'
-    ), 'W0642': ('Invalid assignment to %s in method',
-    'self-cls-assignment',
-    'Invalid assignment to self or cls in instance or class method respectively.'
-    ), 'E0643': ('Invalid index for iterable length',
-    'potential-index-error',
-    'Emitted when an index used on an iterable goes beyond the length of that iterable.'
-    ), 'W0644': (
-    'Possible unbalanced dict unpacking with %s: left side has %d label%s, right side has %d value%s'
-    , 'unbalanced-dict-unpacking',
-    'Used when there is an unbalanced dict unpacking in assignment or for loop'
-    )}
+    name_node_scope = name_node.scope()
+    assign_stmts = name_node_scope.nodes_of_class(nodes.AssignName)
+    return any(a.name == name_node.name for a in assign_stmts) or _find_frame_imports(
+        name_node.name, name_node_scope
+    )
+
+
+def _has_locals_call_after_node(stmt: nodes.NodeNG, scope: nodes.FunctionDef) -> bool:
+    skip_nodes = (
+        nodes.FunctionDef,
+        nodes.ClassDef,
+        nodes.Import,
+        nodes.ImportFrom,
+    )
+    for call in scope.nodes_of_class(nodes.Call, skip_klass=skip_nodes):
+        inferred = utils.safe_infer(call.func)
+        if (
+            utils.is_builtin_object(inferred)
+            and getattr(inferred, "name", None) == "locals"
+        ):
+            if stmt.lineno < call.lineno:
+                return True
+    return False
+
+
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "E0601": (
+        "Using variable %r before assignment",
+        "used-before-assignment",
+        "Emitted when a local variable is accessed before its assignment took place. "
+        "Assignments in try blocks are assumed not to have occurred when evaluating "
+        "associated except/finally blocks. Assignments in except blocks are assumed "
+        "not to have occurred when evaluating statements outside the block, except "
+        "when the associated try block contains a return statement.",
+    ),
+    "E0602": (
+        "Undefined variable %r",
+        "undefined-variable",
+        "Used when an undefined variable is accessed.",
+    ),
+    "E0603": (
+        "Undefined variable name %r in __all__",
+        "undefined-all-variable",
+        "Used when an undefined variable name is referenced in __all__.",
+    ),
+    "E0604": (
+        "Invalid object %r in __all__, must contain only strings",
+        "invalid-all-object",
+        "Used when an invalid (non-string) object occurs in __all__.",
+    ),
+    "E0605": (
+        "Invalid format for __all__, must be tuple or list",
+        "invalid-all-format",
+        "Used when __all__ has an invalid format.",
+    ),
+    "E0606": (
+        "Possibly using variable %r before assignment",
+        "possibly-used-before-assignment",
+        "Emitted when a local variable is accessed before its assignment took place "
+        "in both branches of an if/else switch.",
+    ),
+    "E0611": (
+        "No name %r in module %r",
+        "no-name-in-module",
+        "Used when a name cannot be found in a module.",
+    ),
+    "W0601": (
+        "Global variable %r undefined at the module level",
+        "global-variable-undefined",
+        'Used when a variable is defined through the "global" statement '
+        "but the variable is not defined in the module scope.",
+    ),
+    "W0602": (
+        "Using global for %r but no assignment is done",
+        "global-variable-not-assigned",
+        "When a variable defined in the global scope is modified in an inner scope, "
+        "the 'global' keyword is required in the inner scope only if there is an "
+        "assignment operation done in the inner scope.",
+    ),
+    "W0603": (
+        "Using the global statement",  # W0121
+        "global-statement",
+        'Used when you use the "global" statement to update a global '
+        "variable. Pylint discourages its usage. That doesn't mean you cannot "
+        "use it!",
+    ),
+    "W0604": (
+        "Using the global statement at the module level",  # W0103
+        "global-at-module-level",
+        'Used when you use the "global" statement at the module level '
+        "since it has no effect.",
+    ),
+    "W0611": (
+        "Unused %s",
+        "unused-import",
+        "Used when an imported module or variable is not used.",
+    ),
+    "W0612": (
+        "Unused variable %r",
+        "unused-variable",
+        "Used when a variable is defined but not used.",
+    ),
+    "W0613": (
+        "Unused argument %r",
+        "unused-argument",
+        "Used when a function or method argument is not used.",
+    ),
+    "W0614": (
+        "Unused import(s) %s from wildcard import of %s",
+        "unused-wildcard-import",
+        "Used when an imported module or variable is not used from a "
+        "`'from X import *'` style import.",
+    ),
+    "W0621": (
+        "Redefining name %r from outer scope (line %s)",
+        "redefined-outer-name",
+        "Used when a variable's name hides a name defined in an outer scope or except handler.",
+    ),
+    "W0622": (
+        "Redefining built-in %r",
+        "redefined-builtin",
+        "Used when a variable or function override a built-in.",
+    ),
+    "W0631": (
+        "Using possibly undefined loop variable %r",
+        "undefined-loop-variable",
+        "Used when a loop variable (i.e. defined by a for loop or "
+        "a list comprehension or a generator expression) is used outside "
+        "the loop.",
+    ),
+    "W0632": (
+        "Possible unbalanced tuple unpacking with sequence %s: left side has %d "
+        "label%s, right side has %d value%s",
+        "unbalanced-tuple-unpacking",
+        "Used when there is an unbalanced tuple unpacking in assignment",
+        {"old_names": [("E0632", "old-unbalanced-tuple-unpacking")]},
+    ),
+    "E0633": (
+        "Attempting to unpack a non-sequence%s",
+        "unpacking-non-sequence",
+        "Used when something which is not a sequence is used in an unpack assignment",
+        {"old_names": [("W0633", "old-unpacking-non-sequence")]},
+    ),
+    "W0640": (
+        "Cell variable %s defined in loop",
+        "cell-var-from-loop",
+        "A variable used in a closure is defined in a loop. "
+        "This will result in all closures using the same value for "
+        "the closed-over variable.",
+    ),
+    "W0641": (
+        "Possibly unused variable %r",
+        "possibly-unused-variable",
+        "Used when a variable is defined but might not be used. "
+        "The possibility comes from the fact that locals() might be used, "
+        "which could consume or not the said variable",
+    ),
+    "W0642": (
+        "Invalid assignment to %s in method",
+        "self-cls-assignment",
+        "Invalid assignment to self or cls in instance or class method "
+        "respectively.",
+    ),
+    "E0643": (
+        "Invalid index for iterable length",
+        "potential-index-error",
+        "Emitted when an index used on an iterable goes beyond the length of that "
+        "iterable.",
+    ),
+    "W0644": (
+        "Possible unbalanced dict unpacking with %s: "
+        "left side has %d label%s, right side has %d value%s",
+        "unbalanced-dict-unpacking",
+        "Used when there is an unbalanced dict unpacking in assignment or for loop",
+    ),
+}


 class ScopeConsumer(NamedTuple):
     """Store nodes and their consumption states."""
+
     to_consume: dict[str, list[nodes.NodeNG]]
     consumed: dict[str, list[nodes.NodeNG]]
     consumed_uncertain: defaultdict[str, list[nodes.NodeNG]]
@@ -203,22 +538,23 @@ class ScopeConsumer(NamedTuple):
 class NamesConsumer:
     """A simple class to handle consumed, to consume and scope type info of node locals."""

-    def __init__(self, node: nodes.NodeNG, scope_type: str) ->None:
-        self._atomic = ScopeConsumer(copy.copy(node.locals), {},
-            collections.defaultdict(list), scope_type)
+    def __init__(self, node: nodes.NodeNG, scope_type: str) -> None:
+        self._atomic = ScopeConsumer(
+            copy.copy(node.locals), {}, collections.defaultdict(list), scope_type
+        )
         self.node = node
         self.names_under_always_false_test: set[str] = set()
         self.names_defined_under_one_branch_only: set[str] = set()

-    def __repr__(self) ->str:
-        _to_consumes = [f'{k}->{v}' for k, v in self._atomic.to_consume.items()
-            ]
-        _consumed = [f'{k}->{v}' for k, v in self._atomic.consumed.items()]
-        _consumed_uncertain = [f'{k}->{v}' for k, v in self._atomic.
-            consumed_uncertain.items()]
-        to_consumes = ', '.join(_to_consumes)
-        consumed = ', '.join(_consumed)
-        consumed_uncertain = ', '.join(_consumed_uncertain)
+    def __repr__(self) -> str:
+        _to_consumes = [f"{k}->{v}" for k, v in self._atomic.to_consume.items()]
+        _consumed = [f"{k}->{v}" for k, v in self._atomic.consumed.items()]
+        _consumed_uncertain = [
+            f"{k}->{v}" for k, v in self._atomic.consumed_uncertain.items()
+        ]
+        to_consumes = ", ".join(_to_consumes)
+        consumed = ", ".join(_consumed)
+        consumed_uncertain = ", ".join(_consumed_uncertain)
         return f"""
 to_consume : {to_consumes}
 consumed : {consumed}
@@ -226,11 +562,19 @@ consumed_uncertain: {consumed_uncertain}
 scope_type : {self._atomic.scope_type}
 """

-    def __iter__(self) ->Iterator[Any]:
+    def __iter__(self) -> Iterator[Any]:
         return iter(self._atomic)

     @property
-    def consumed_uncertain(self) ->defaultdict[str, list[nodes.NodeNG]]:
+    def to_consume(self) -> dict[str, list[nodes.NodeNG]]:
+        return self._atomic.to_consume
+
+    @property
+    def consumed(self) -> dict[str, list[nodes.NodeNG]]:
+        return self._atomic.consumed
+
+    @property
+    def consumed_uncertain(self) -> defaultdict[str, list[nodes.NodeNG]]:
         """Retrieves nodes filtered out by get_next_to_consume() that may not
         have executed.

@@ -239,75 +583,439 @@ scope_type : {self._atomic.scope_type}
         blocks). Checkers that want to treat the statements as executed
         (e.g. for unused-variable) may need to add them back.
         """
-        pass
+        return self._atomic.consumed_uncertain

-    def mark_as_consumed(self, name: str, consumed_nodes: list[nodes.NodeNG]
-        ) ->None:
+    @property
+    def scope_type(self) -> str:
+        return self._atomic.scope_type
+
+    def mark_as_consumed(self, name: str, consumed_nodes: list[nodes.NodeNG]) -> None:
         """Mark the given nodes as consumed for the name.

         If all of the nodes for the name were consumed, delete the name from
         the to_consume dictionary
         """
-        pass
+        unconsumed = [n for n in self.to_consume[name] if n not in set(consumed_nodes)]
+        self.consumed[name] = consumed_nodes
+
+        if unconsumed:
+            self.to_consume[name] = unconsumed
+        else:
+            del self.to_consume[name]

-    def get_next_to_consume(self, node: nodes.Name) ->(list[nodes.NodeNG] |
-        None):
+    def get_next_to_consume(self, node: nodes.Name) -> list[nodes.NodeNG] | None:
         """Return a list of the nodes that define `node` from this scope.

         If it is uncertain whether a node will be consumed, such as for statements in
         except blocks, add it to self.consumed_uncertain instead of returning it.
         Return None to indicate a special case that needs to be handled by the caller.
         """
-        pass
+        name = node.name
+        parent_node = node.parent
+        found_nodes = self.to_consume.get(name)
+        node_statement = node.statement()
+        if (
+            found_nodes
+            and isinstance(parent_node, nodes.Assign)
+            and parent_node == found_nodes[0].parent
+        ):
+            lhs = found_nodes[0].parent.targets[0]
+            if (
+                isinstance(lhs, nodes.AssignName) and lhs.name == name
+            ):  # this name is defined in this very statement
+                found_nodes = None
+
+        if (
+            found_nodes
+            and isinstance(parent_node, nodes.For)
+            and parent_node.iter == node
+            and parent_node.target in found_nodes
+        ):
+            found_nodes = None
+
+        # Before filtering, check that this node's name is not a nonlocal
+        if any(
+            isinstance(child, nodes.Nonlocal) and node.name in child.names
+            for child in node.frame().get_children()
+        ):
+            return found_nodes
+
+        # And no comprehension is under the node's frame
+        if VariablesChecker._comprehension_between_frame_and_node(node):
+            return found_nodes
+
+        # Filter out assignments in ExceptHandlers that node is not contained in
+        if found_nodes:
+            found_nodes = [
+                n
+                for n in found_nodes
+                if not isinstance(n.statement(), nodes.ExceptHandler)
+                or n.statement().parent_of(node)
+            ]

-    def _inferred_to_define_name_raise_or_return(self, name: str, node:
-        nodes.NodeNG) ->bool:
+        # Filter out assignments guarded by always false conditions
+        if found_nodes:
+            uncertain_nodes = self._uncertain_nodes_if_tests(found_nodes, node)
+            self.consumed_uncertain[node.name] += uncertain_nodes
+            uncertain_nodes_set = set(uncertain_nodes)
+            found_nodes = [n for n in found_nodes if n not in uncertain_nodes_set]
+
+        # Filter out assignments in an Except clause that the node is not
+        # contained in, assuming they may fail
+        if found_nodes:
+            uncertain_nodes = self._uncertain_nodes_in_except_blocks(
+                found_nodes, node, node_statement
+            )
+            self.consumed_uncertain[node.name] += uncertain_nodes
+            uncertain_nodes_set = set(uncertain_nodes)
+            found_nodes = [n for n in found_nodes if n not in uncertain_nodes_set]
+
+        # If this node is in a Finally block of a Try/Finally,
+        # filter out assignments in the try portion, assuming they may fail
+        if found_nodes:
+            uncertain_nodes = (
+                self._uncertain_nodes_in_try_blocks_when_evaluating_finally_blocks(
+                    found_nodes, node_statement, name
+                )
+            )
+            self.consumed_uncertain[node.name] += uncertain_nodes
+            uncertain_nodes_set = set(uncertain_nodes)
+            found_nodes = [n for n in found_nodes if n not in uncertain_nodes_set]
+
+        # If this node is in an ExceptHandler,
+        # filter out assignments in the try portion, assuming they may fail
+        if found_nodes:
+            uncertain_nodes = (
+                self._uncertain_nodes_in_try_blocks_when_evaluating_except_blocks(
+                    found_nodes, node_statement
+                )
+            )
+            self.consumed_uncertain[node.name] += uncertain_nodes
+            uncertain_nodes_set = set(uncertain_nodes)
+            found_nodes = [n for n in found_nodes if n not in uncertain_nodes_set]
+
+        return found_nodes
+
+    def _inferred_to_define_name_raise_or_return(
+        self, name: str, node: nodes.NodeNG
+    ) -> bool:
         """Return True if there is a path under this `if_node`
         that is inferred to define `name`, raise, or return.
         """
-        pass
-
-    def _uncertain_nodes_if_tests(self, found_nodes: list[nodes.NodeNG],
-        node: nodes.NodeNG) ->list[nodes.NodeNG]:
+        # Handle try and with
+        if isinstance(node, nodes.Try):
+            # Allow either a path through try/else/finally OR a path through ALL except handlers
+            try_except_node = node
+            if node.finalbody:
+                try_except_node = next(
+                    (child for child in node.nodes_of_class(nodes.Try)),
+                    None,
+                )
+            handlers = try_except_node.handlers if try_except_node else []
+            return NamesConsumer._defines_name_raises_or_returns_recursive(
+                name, node
+            ) or all(
+                NamesConsumer._defines_name_raises_or_returns_recursive(name, handler)
+                for handler in handlers
+            )
+
+        if isinstance(node, (nodes.With, nodes.For, nodes.While)):
+            return NamesConsumer._defines_name_raises_or_returns_recursive(name, node)
+
+        if not isinstance(node, nodes.If):
+            return False
+
+        # Be permissive if there is a break or a continue
+        if any(node.nodes_of_class(nodes.Break, nodes.Continue)):
+            return True
+
+        # Is there an assignment in this node itself, e.g. in named expression?
+        if NamesConsumer._defines_name_raises_or_returns(name, node):
+            return True
+
+        test = node.test.value if isinstance(node.test, nodes.NamedExpr) else node.test
+        all_inferred = utils.infer_all(test)
+        only_search_if = False
+        only_search_else = True
+
+        for inferred in all_inferred:
+            if not isinstance(inferred, nodes.Const):
+                only_search_else = False
+                continue
+            val = inferred.value
+            only_search_if = only_search_if or (val != NotImplemented and val)
+            only_search_else = only_search_else and not val
+
+        # Only search else branch when test condition is inferred to be false
+        if all_inferred and only_search_else:
+            self.names_under_always_false_test.add(name)
+            return self._branch_handles_name(name, node.orelse)
+        # Search both if and else branches
+        if_branch_handles = self._branch_handles_name(name, node.body)
+        else_branch_handles = self._branch_handles_name(name, node.orelse)
+        if if_branch_handles ^ else_branch_handles:
+            self.names_defined_under_one_branch_only.add(name)
+        elif name in self.names_defined_under_one_branch_only:
+            self.names_defined_under_one_branch_only.remove(name)
+        return if_branch_handles and else_branch_handles
+
+    def _branch_handles_name(self, name: str, body: Iterable[nodes.NodeNG]) -> bool:
+        return any(
+            NamesConsumer._defines_name_raises_or_returns(name, if_body_stmt)
+            or isinstance(
+                if_body_stmt,
+                (
+                    nodes.If,
+                    nodes.Try,
+                    nodes.With,
+                    nodes.For,
+                    nodes.While,
+                ),
+            )
+            and self._inferred_to_define_name_raise_or_return(name, if_body_stmt)
+            for if_body_stmt in body
+        )
+
+    def _uncertain_nodes_if_tests(
+        self, found_nodes: list[nodes.NodeNG], node: nodes.NodeNG
+    ) -> list[nodes.NodeNG]:
         """Identify nodes of uncertain execution because they are defined under if
         tests.

         Don't identify a node if there is a path that is inferred to
         define the name, raise, or return (e.g. any executed if/elif/else branch).
         """
-        pass
+        uncertain_nodes = []
+        for other_node in found_nodes:
+            if isinstance(other_node, nodes.AssignName):
+                name = other_node.name
+            elif isinstance(other_node, (nodes.Import, nodes.ImportFrom)):
+                name = node.name
+            else:
+                continue
+
+            all_if = [
+                n
+                for n in other_node.node_ancestors()
+                if isinstance(n, nodes.If) and not n.parent_of(node)
+            ]
+            if not all_if:
+                continue
+
+            closest_if = all_if[0]
+            if (
+                isinstance(node, nodes.AssignName)
+                and node.frame() is not closest_if.frame()
+            ):
+                continue
+            if closest_if.parent_of(node):
+                continue
+
+            outer_if = all_if[-1]
+            if NamesConsumer._node_guarded_by_same_test(node, outer_if):
+                continue
+
+            # Name defined in the if/else control flow
+            if self._inferred_to_define_name_raise_or_return(name, outer_if):
+                continue
+
+            uncertain_nodes.append(other_node)
+
+        return uncertain_nodes

     @staticmethod
-    def _node_guarded_by_same_test(node: nodes.NodeNG, other_if: nodes.If
-        ) ->bool:
+    def _node_guarded_by_same_test(node: nodes.NodeNG, other_if: nodes.If) -> bool:
         """Identify if `node` is guarded by an equivalent test as `other_if`.

         Two tests are equivalent if their string representations are identical
         or if their inferred values consist only of constants and those constants
         are identical, and the if test guarding `node` is not a Name.
         """
-        pass
+        other_if_test_as_string = other_if.test.as_string()
+        other_if_test_all_inferred = utils.infer_all(other_if.test)
+        for ancestor in node.node_ancestors():
+            if not isinstance(ancestor, nodes.If):
+                continue
+            if ancestor.test.as_string() == other_if_test_as_string:
+                return True
+            if isinstance(ancestor.test, nodes.Name):
+                continue
+            all_inferred = utils.infer_all(ancestor.test)
+            if len(all_inferred) == len(other_if_test_all_inferred):
+                if any(
+                    not isinstance(test, nodes.Const)
+                    for test in (*all_inferred, *other_if_test_all_inferred)
+                ):
+                    continue
+                if {test.value for test in all_inferred} != {
+                    test.value for test in other_if_test_all_inferred
+                }:
+                    continue
+                return True
+
+        return False

     @staticmethod
-    def _uncertain_nodes_in_except_blocks(found_nodes: list[nodes.NodeNG],
-        node: nodes.NodeNG, node_statement: _base_nodes.Statement) ->list[nodes
-        .NodeNG]:
+    def _uncertain_nodes_in_except_blocks(
+        found_nodes: list[nodes.NodeNG],
+        node: nodes.NodeNG,
+        node_statement: _base_nodes.Statement,
+    ) -> list[nodes.NodeNG]:
         """Return any nodes in ``found_nodes`` that should be treated as uncertain
         because they are in an except block.
         """
-        pass
+        uncertain_nodes = []
+        for other_node in found_nodes:
+            other_node_statement = other_node.statement()
+            # Only testing for statements in the except block of Try
+            closest_except_handler = utils.get_node_first_ancestor_of_type(
+                other_node_statement, nodes.ExceptHandler
+            )
+            if not closest_except_handler:
+                continue
+            # If the other node is in the same scope as this node, assume it executes
+            if closest_except_handler.parent_of(node):
+                continue
+            closest_try_except: nodes.Try = closest_except_handler.parent
+            # If the try or else blocks return, assume the except blocks execute.
+            try_block_returns = any(
+                isinstance(try_statement, nodes.Return)
+                for try_statement in closest_try_except.body
+            )
+            else_block_returns = any(
+                isinstance(else_statement, nodes.Return)
+                for else_statement in closest_try_except.orelse
+            )
+            else_block_exits = any(
+                isinstance(else_statement, nodes.Expr)
+                and isinstance(else_statement.value, nodes.Call)
+                and utils.is_terminating_func(else_statement.value)
+                for else_statement in closest_try_except.orelse
+            )
+            else_block_continues = any(
+                isinstance(else_statement, nodes.Continue)
+                for else_statement in closest_try_except.orelse
+            )
+            if (
+                else_block_continues
+                and isinstance(node_statement.parent, (nodes.For, nodes.While))
+                and closest_try_except.parent.parent_of(node_statement)
+            ):
+                continue
+
+            if try_block_returns or else_block_returns or else_block_exits:
+                # Exception: if this node is in the final block of the other_node_statement,
+                # it will execute before returning. Assume the except statements are uncertain.
+                if (
+                    isinstance(node_statement.parent, nodes.Try)
+                    and node_statement in node_statement.parent.finalbody
+                    and closest_try_except.parent.parent_of(node_statement)
+                ):
+                    uncertain_nodes.append(other_node)
+                # Or the node_statement is in the else block of the relevant Try
+                elif (
+                    isinstance(node_statement.parent, nodes.Try)
+                    and node_statement in node_statement.parent.orelse
+                    and closest_try_except.parent.parent_of(node_statement)
+                ):
+                    uncertain_nodes.append(other_node)
+                # Assume the except blocks execute, so long as each handler
+                # defines the name, raises, or returns.
+                elif all(
+                    NamesConsumer._defines_name_raises_or_returns_recursive(
+                        node.name, handler
+                    )
+                    for handler in closest_try_except.handlers
+                ):
+                    continue
+
+            if NamesConsumer._check_loop_finishes_via_except(node, closest_try_except):
+                continue
+
+            # Passed all tests for uncertain execution
+            uncertain_nodes.append(other_node)
+        return uncertain_nodes
+
+    @staticmethod
+    def _defines_name_raises_or_returns(name: str, node: nodes.NodeNG) -> bool:
+        if isinstance(node, (nodes.Raise, nodes.Assert, nodes.Return, nodes.Continue)):
+            return True
+        if isinstance(node, nodes.Expr) and isinstance(node.value, nodes.Call):
+            if utils.is_terminating_func(node.value):
+                return True
+            if (
+                isinstance(node.value.func, nodes.Name)
+                and node.value.func.name == "assert_never"
+            ):
+                return True
+        if (
+            isinstance(node, nodes.AnnAssign)
+            and node.value
+            and isinstance(node.target, nodes.AssignName)
+            and node.target.name == name
+        ):
+            return True
+        if isinstance(node, nodes.Assign):
+            for target in node.targets:
+                for elt in utils.get_all_elements(target):
+                    if isinstance(elt, nodes.Starred):
+                        elt = elt.value
+                    if isinstance(elt, nodes.AssignName) and elt.name == name:
+                        return True
+        if isinstance(node, nodes.If):
+            if any(
+                child_named_expr.target.name == name
+                for child_named_expr in node.nodes_of_class(nodes.NamedExpr)
+            ):
+                return True
+        if isinstance(node, (nodes.Import, nodes.ImportFrom)) and any(
+            (node_name[1] and node_name[1] == name) or (node_name[0] == name)
+            for node_name in node.names
+        ):
+            return True
+        if isinstance(node, nodes.With) and any(
+            isinstance(item[1], nodes.AssignName) and item[1].name == name
+            for item in node.items
+        ):
+            return True
+        if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)) and node.name == name:
+            return True
+        if (
+            isinstance(node, nodes.ExceptHandler)
+            and node.name
+            and node.name.name == name
+        ):
+            return True
+        return False

     @staticmethod
-    def _defines_name_raises_or_returns_recursive(name: str, node: nodes.NodeNG
-        ) ->bool:
+    def _defines_name_raises_or_returns_recursive(
+        name: str, node: nodes.NodeNG
+    ) -> bool:
         """Return True if some child of `node` defines the name `name`,
         raises, or returns.
         """
-        pass
+        for stmt in node.get_children():
+            if NamesConsumer._defines_name_raises_or_returns(name, stmt):
+                return True
+            if isinstance(stmt, (nodes.If, nodes.With)):
+                if any(
+                    NamesConsumer._defines_name_raises_or_returns(name, nested_stmt)
+                    for nested_stmt in stmt.get_children()
+                ):
+                    return True
+            if (
+                isinstance(stmt, nodes.Try)
+                and not stmt.finalbody
+                and NamesConsumer._defines_name_raises_or_returns_recursive(name, stmt)
+            ):
+                return True
+        return False

     @staticmethod
-    def _check_loop_finishes_via_except(node: nodes.NodeNG,
-        other_node_try_except: nodes.Try) ->bool:
+    def _check_loop_finishes_via_except(
+        node: nodes.NodeNG, other_node_try_except: nodes.Try
+    ) -> bool:
         """Check for a specific control flow scenario.

         Described in https://github.com/pylint-dev/pylint/issues/5683.
@@ -327,28 +1035,187 @@ scope_type : {self._atomic.scope_type}
         else:
             print(name)
         """
-        pass
+        if not other_node_try_except.orelse:
+            return False
+        closest_loop: None | (nodes.For | nodes.While) = (
+            utils.get_node_first_ancestor_of_type(node, (nodes.For, nodes.While))
+        )
+        if closest_loop is None:
+            return False
+        if not any(
+            else_statement is node or else_statement.parent_of(node)
+            for else_statement in closest_loop.orelse
+        ):
+            # `node` not guarded by `else`
+            return False
+        for inner_else_statement in other_node_try_except.orelse:
+            if isinstance(inner_else_statement, nodes.Break):
+                break_stmt = inner_else_statement
+                break
+        else:
+            # No break statement
+            return False
+
+        def _try_in_loop_body(
+            other_node_try_except: nodes.Try, loop: nodes.For | nodes.While
+        ) -> bool:
+            """Return True if `other_node_try_except` is a descendant of `loop`."""
+            return any(
+                loop_body_statement is other_node_try_except
+                or loop_body_statement.parent_of(other_node_try_except)
+                for loop_body_statement in loop.body
+            )
+
+        if not _try_in_loop_body(other_node_try_except, closest_loop):
+            for ancestor in closest_loop.node_ancestors():
+                if isinstance(ancestor, (nodes.For, nodes.While)):
+                    if _try_in_loop_body(other_node_try_except, ancestor):
+                        break
+            else:
+                # `other_node_try_except` didn't have a shared ancestor loop
+                return False
+
+        for loop_stmt in closest_loop.body:
+            if NamesConsumer._recursive_search_for_continue_before_break(
+                loop_stmt, break_stmt
+            ):
+                break
+        else:
+            # No continue found, so we arrived at our special case!
+            return True
+        return False

     @staticmethod
-    def _recursive_search_for_continue_before_break(stmt: _base_nodes.
-        Statement, break_stmt: nodes.Break) ->bool:
+    def _recursive_search_for_continue_before_break(
+        stmt: _base_nodes.Statement, break_stmt: nodes.Break
+    ) -> bool:
         """Return True if any Continue node can be found in descendants of `stmt`
         before encountering `break_stmt`, ignoring any nested loops.
         """
-        pass
+        if stmt is break_stmt:
+            return False
+        if isinstance(stmt, nodes.Continue):
+            return True
+        for child in stmt.get_children():
+            if isinstance(stmt, (nodes.For, nodes.While)):
+                continue
+            if NamesConsumer._recursive_search_for_continue_before_break(
+                child, break_stmt
+            ):
+                return True
+        return False

     @staticmethod
     def _uncertain_nodes_in_try_blocks_when_evaluating_except_blocks(
         found_nodes: list[nodes.NodeNG], node_statement: _base_nodes.Statement
-        ) ->list[nodes.NodeNG]:
+    ) -> list[nodes.NodeNG]:
         """Return any nodes in ``found_nodes`` that should be treated as uncertain.

         Nodes are uncertain when they are in a try block and the ``node_statement``
         being evaluated is in one of its except handlers.
         """
-        pass
-
+        uncertain_nodes: list[nodes.NodeNG] = []
+        closest_except_handler = utils.get_node_first_ancestor_of_type(
+            node_statement, nodes.ExceptHandler
+        )
+        if closest_except_handler is None:
+            return uncertain_nodes
+        for other_node in found_nodes:
+            other_node_statement = other_node.statement()
+            # If the other statement is the except handler guarding `node`, it executes
+            if other_node_statement is closest_except_handler:
+                continue
+            # Ensure other_node is in a try block
+            (
+                other_node_try_ancestor,
+                other_node_try_ancestor_visited_child,
+            ) = utils.get_node_first_ancestor_of_type_and_its_child(
+                other_node_statement, nodes.Try
+            )
+            if other_node_try_ancestor is None:
+                continue
+            if (
+                other_node_try_ancestor_visited_child
+                not in other_node_try_ancestor.body
+            ):
+                continue
+            # Make sure nesting is correct -- there should be at least one
+            # except handler that is a sibling attached to the try ancestor,
+            # or is an ancestor of the try ancestor.
+            if not any(
+                closest_except_handler in other_node_try_ancestor.handlers
+                or other_node_try_ancestor_except_handler
+                in closest_except_handler.node_ancestors()
+                for other_node_try_ancestor_except_handler in other_node_try_ancestor.handlers
+            ):
+                continue
+            # Passed all tests for uncertain execution
+            uncertain_nodes.append(other_node)
+        return uncertain_nodes

+    @staticmethod
+    def _uncertain_nodes_in_try_blocks_when_evaluating_finally_blocks(
+        found_nodes: list[nodes.NodeNG],
+        node_statement: _base_nodes.Statement,
+        name: str,
+    ) -> list[nodes.NodeNG]:
+        uncertain_nodes: list[nodes.NodeNG] = []
+        (
+            closest_try_finally_ancestor,
+            child_of_closest_try_finally_ancestor,
+        ) = utils.get_node_first_ancestor_of_type_and_its_child(
+            node_statement, nodes.Try
+        )
+        if closest_try_finally_ancestor is None:
+            return uncertain_nodes
+        if (
+            child_of_closest_try_finally_ancestor
+            not in closest_try_finally_ancestor.finalbody
+        ):
+            return uncertain_nodes
+        for other_node in found_nodes:
+            other_node_statement = other_node.statement()
+            (
+                other_node_try_finally_ancestor,
+                child_of_other_node_try_finally_ancestor,
+            ) = utils.get_node_first_ancestor_of_type_and_its_child(
+                other_node_statement, nodes.Try
+            )
+            if other_node_try_finally_ancestor is None:
+                continue
+            # other_node needs to descend from the try of a try/finally.
+            if (
+                child_of_other_node_try_finally_ancestor
+                not in other_node_try_finally_ancestor.body
+            ):
+                continue
+            # If the two try/finally ancestors are not the same, then
+            # node_statement's closest try/finally ancestor needs to be in
+            # the final body of other_node's try/finally ancestor, or
+            # descend from one of the statements in that final body.
+            if (
+                other_node_try_finally_ancestor is not closest_try_finally_ancestor
+                and not any(
+                    other_node_final_statement is closest_try_finally_ancestor
+                    or other_node_final_statement.parent_of(
+                        closest_try_finally_ancestor
+                    )
+                    for other_node_final_statement in other_node_try_finally_ancestor.finalbody
+                )
+            ):
+                continue
+            # Is the name defined in all exception clauses?
+            if other_node_try_finally_ancestor.handlers and all(
+                NamesConsumer._defines_name_raises_or_returns_recursive(name, handler)
+                for handler in other_node_try_finally_ancestor.handlers
+            ):
+                continue
+            # Passed all tests for uncertain execution
+            uncertain_nodes.append(other_node)
+        return uncertain_nodes
+
+
+# pylint: disable=too-many-public-methods
 class VariablesChecker(BaseChecker):
     """BaseChecker for variables.

@@ -360,221 +1227,1305 @@ class VariablesChecker(BaseChecker):
     * __all__ consistency
     * self/cls assignment
     """
-    name = 'variables'
+
+    name = "variables"
     msgs = MSGS
-    options = ('init-import', {'default': False, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        'Tells whether we should check for unused import in __init__ files.'}
-        ), ('dummy-variables-rgx', {'default':
-        '_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_',
-        'type': 'regexp', 'metavar': '<regexp>', 'help':
-        'A regular expression matching the name of dummy variables (i.e. expected to not be used).'
-        }), ('additional-builtins', {'default': (), 'type': 'csv',
-        'metavar': '<comma separated list>', 'help':
-        'List of additional names supposed to be defined in builtins. Remember that you should avoid defining new builtins when possible.'
-        }), ('callbacks', {'default': ('cb_', '_cb'), 'type': 'csv',
-        'metavar': '<callbacks>', 'help':
-        'List of strings which can identify a callback function by name. A callback name must start or end with one of those strings.'
-        }), ('redefining-builtins-modules', {'default': ('six.moves',
-        'past.builtins', 'future.builtins', 'builtins', 'io'), 'type':
-        'csv', 'metavar': '<comma separated list>', 'help':
-        'List of qualified module names which can have objects that can redefine builtins.'
-        }), ('ignored-argument-names', {'default': IGNORED_ARGUMENT_NAMES,
-        'type': 'regexp', 'metavar': '<regexp>', 'help':
-        'Argument names that match this expression will be ignored.'}), (
-        'allow-global-unused-variables', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Tells whether unused global variables should be treated as a violation.'
-        }), ('allowed-redefined-builtins', {'default': (), 'type': 'csv',
-        'metavar': '<comma separated list>', 'help':
-        'List of names allowed to shadow builtins'})
-
-    def __init__(self, linter: PyLinter) ->None:
+    options = (
+        (
+            "init-import",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Tells whether we should check for unused import in "
+                "__init__ files.",
+            },
+        ),
+        (
+            "dummy-variables-rgx",
+            {
+                "default": "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_",
+                "type": "regexp",
+                "metavar": "<regexp>",
+                "help": "A regular expression matching the name of dummy "
+                "variables (i.e. expected to not be used).",
+            },
+        ),
+        (
+            "additional-builtins",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<comma separated list>",
+                "help": "List of additional names supposed to be defined in "
+                "builtins. Remember that you should avoid defining new builtins "
+                "when possible.",
+            },
+        ),
+        (
+            "callbacks",
+            {
+                "default": ("cb_", "_cb"),
+                "type": "csv",
+                "metavar": "<callbacks>",
+                "help": "List of strings which can identify a callback "
+                "function by name. A callback name must start or "
+                "end with one of those strings.",
+            },
+        ),
+        (
+            "redefining-builtins-modules",
+            {
+                "default": (
+                    "six.moves",
+                    "past.builtins",
+                    "future.builtins",
+                    "builtins",
+                    "io",
+                ),
+                "type": "csv",
+                "metavar": "<comma separated list>",
+                "help": "List of qualified module names which can have objects "
+                "that can redefine builtins.",
+            },
+        ),
+        (
+            "ignored-argument-names",
+            {
+                "default": IGNORED_ARGUMENT_NAMES,
+                "type": "regexp",
+                "metavar": "<regexp>",
+                "help": "Argument names that match this expression will be ignored.",
+            },
+        ),
+        (
+            "allow-global-unused-variables",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Tells whether unused global variables should be treated as a violation.",
+            },
+        ),
+        (
+            "allowed-redefined-builtins",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<comma separated list>",
+                "help": "List of names allowed to shadow builtins",
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._to_consume: list[NamesConsumer] = []
         self._type_annotation_names: list[str] = []
-        self._except_handler_names_queue: list[tuple[nodes.ExceptHandler,
-            nodes.AssignName]] = []
+        self._except_handler_names_queue: list[
+            tuple[nodes.ExceptHandler, nodes.AssignName]
+        ] = []
         """This is a queue, last in first out."""
-        self._evaluated_type_checking_scopes: dict[str, list[nodes.
-            LocalsDictNodeNG]] = {}
+        self._evaluated_type_checking_scopes: dict[
+            str, list[nodes.LocalsDictNodeNG]
+        ] = {}
         self._postponed_evaluation_enabled = False

-    def visit_module(self, node: nodes.Module) ->None:
+    @utils.only_required_for_messages(
+        "unbalanced-dict-unpacking",
+    )
+    def visit_for(self, node: nodes.For) -> None:
+        if not isinstance(node.target, nodes.Tuple):
+            return
+
+        targets = node.target.elts
+
+        inferred = utils.safe_infer(node.iter)
+        if not isinstance(inferred, DICT_TYPES):
+            return
+
+        values = self._nodes_to_unpack(inferred)
+        if not values:
+            # no dict items returned
+            return
+
+        if isinstance(inferred, astroid.objects.DictItems):
+            # dict.items() is a bit special because values will be a tuple
+            # So as long as there are always 2 targets and values each are
+            # a tuple with two items, this will unpack correctly.
+            # Example: `for key, val in {1: 2, 3: 4}.items()`
+            if len(targets) == 2 and all(len(x.elts) == 2 for x in values):
+                return
+
+            # Starred nodes indicate ambiguous unpacking
+            # if `dict.items()` is used so we won't flag them.
+            if any(isinstance(target, nodes.Starred) for target in targets):
+                return
+
+        if isinstance(inferred, nodes.Dict):
+            if isinstance(node.iter, nodes.Name):
+                # If this a case of 'dict-items-missing-iter', we don't want to
+                # report it as an 'unbalanced-dict-unpacking' as well
+                # TODO (performance), merging both checks would streamline this
+                if len(targets) == 2:
+                    return
+
+        else:
+            is_starred_targets = any(
+                isinstance(target, nodes.Starred) for target in targets
+            )
+            for value in values:
+                value_length = self._get_value_length(value)
+                is_valid_star_unpack = is_starred_targets and value_length >= len(
+                    targets
+                )
+                if len(targets) != value_length and not is_valid_star_unpack:
+                    details = _get_unpacking_extra_info(node, inferred)
+                    self._report_unbalanced_unpacking(
+                        node, inferred, targets, value_length, details
+                    )
+                    break
+
+    def leave_for(self, node: nodes.For) -> None:
+        self._store_type_annotation_names(node)
+
+    def visit_module(self, node: nodes.Module) -> None:
         """Visit module : update consumption analysis variable
         checks globals doesn't overrides builtins.
         """
-        pass
-
-    @utils.only_required_for_messages('unused-import',
-        'unused-wildcard-import', 'redefined-builtin',
-        'undefined-all-variable', 'invalid-all-object',
-        'invalid-all-format', 'unused-variable', 'undefined-variable')
-    def leave_module(self, node: nodes.Module) ->None:
+        self._to_consume = [NamesConsumer(node, "module")]
+        self._postponed_evaluation_enabled = is_postponed_evaluation_enabled(node)
+
+        for name, stmts in node.locals.items():
+            if utils.is_builtin(name):
+                if self._should_ignore_redefined_builtin(stmts[0]) or name == "__doc__":
+                    continue
+                self.add_message("redefined-builtin", args=name, node=stmts[0])
+
+    @utils.only_required_for_messages(
+        "unused-import",
+        "unused-wildcard-import",
+        "redefined-builtin",
+        "undefined-all-variable",
+        "invalid-all-object",
+        "invalid-all-format",
+        "unused-variable",
+        "undefined-variable",
+    )
+    def leave_module(self, node: nodes.Module) -> None:
         """Leave module: check globals."""
-        pass
+        assert len(self._to_consume) == 1
+
+        self._check_metaclasses(node)
+        not_consumed = self._to_consume.pop().to_consume
+        # attempt to check for __all__ if defined
+        if "__all__" in node.locals:
+            self._check_all(node, not_consumed)
+
+        # check for unused globals
+        self._check_globals(not_consumed)
+
+        # don't check unused imports in __init__ files
+        if not self.linter.config.init_import and node.package:
+            return

-    def visit_classdef(self, node: nodes.ClassDef) ->None:
+        self._check_imports(not_consumed)
+        self._type_annotation_names = []
+
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
         """Visit class: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "class"))

-    def leave_classdef(self, node: nodes.ClassDef) ->None:
+    def leave_classdef(self, node: nodes.ClassDef) -> None:
         """Leave class: update consumption analysis variable."""
-        pass
-
-    def visit_lambda(self, node: nodes.Lambda) ->None:
+        # Check for hidden ancestor names
+        # e.g. "six" in: Class X(six.with_metaclass(ABCMeta, object)):
+        for name_node in node.nodes_of_class(nodes.Name):
+            if (
+                isinstance(name_node.parent, nodes.Call)
+                and isinstance(name_node.parent.func, nodes.Attribute)
+                and isinstance(name_node.parent.func.expr, nodes.Name)
+            ):
+                hidden_name_node = name_node.parent.func.expr
+                for consumer in self._to_consume:
+                    if hidden_name_node.name in consumer.to_consume:
+                        consumer.mark_as_consumed(
+                            hidden_name_node.name,
+                            consumer.to_consume[hidden_name_node.name],
+                        )
+                        break
+        self._to_consume.pop()
+
+    def visit_lambda(self, node: nodes.Lambda) -> None:
         """Visit lambda: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "lambda"))

-    def leave_lambda(self, _: nodes.Lambda) ->None:
+    def leave_lambda(self, _: nodes.Lambda) -> None:
         """Leave lambda: update consumption analysis variable."""
-        pass
+        # do not check for not used locals here
+        self._to_consume.pop()

-    def visit_generatorexp(self, node: nodes.GeneratorExp) ->None:
+    def visit_generatorexp(self, node: nodes.GeneratorExp) -> None:
         """Visit genexpr: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "comprehension"))

-    def leave_generatorexp(self, _: nodes.GeneratorExp) ->None:
+    def leave_generatorexp(self, _: nodes.GeneratorExp) -> None:
         """Leave genexpr: update consumption analysis variable."""
-        pass
+        # do not check for not used locals here
+        self._to_consume.pop()

-    def visit_dictcomp(self, node: nodes.DictComp) ->None:
+    def visit_dictcomp(self, node: nodes.DictComp) -> None:
         """Visit dictcomp: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "comprehension"))

-    def leave_dictcomp(self, _: nodes.DictComp) ->None:
+    def leave_dictcomp(self, _: nodes.DictComp) -> None:
         """Leave dictcomp: update consumption analysis variable."""
-        pass
+        # do not check for not used locals here
+        self._to_consume.pop()

-    def visit_setcomp(self, node: nodes.SetComp) ->None:
+    def visit_setcomp(self, node: nodes.SetComp) -> None:
         """Visit setcomp: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "comprehension"))

-    def leave_setcomp(self, _: nodes.SetComp) ->None:
+    def leave_setcomp(self, _: nodes.SetComp) -> None:
         """Leave setcomp: update consumption analysis variable."""
-        pass
+        # do not check for not used locals here
+        self._to_consume.pop()

-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Visit function: update consumption analysis variable and check locals."""
-        pass
-
-    def leave_functiondef(self, node: nodes.FunctionDef) ->None:
+        self._to_consume.append(NamesConsumer(node, "function"))
+        if not (
+            self.linter.is_message_enabled("redefined-outer-name")
+            or self.linter.is_message_enabled("redefined-builtin")
+        ):
+            return
+        globs = node.root().globals
+        for name, stmt in node.items():
+            if name in globs and not isinstance(stmt, nodes.Global):
+                definition = globs[name][0]
+                if (
+                    isinstance(definition, nodes.ImportFrom)
+                    and definition.modname == FUTURE
+                ):
+                    # It is a __future__ directive, not a symbol.
+                    continue
+
+                # Do not take in account redefined names for the purpose
+                # of type checking.:
+                if any(
+                    in_type_checking_block(definition) for definition in globs[name]
+                ):
+                    continue
+
+                # Suppress emitting the message if the outer name is in the
+                # scope of an exception assignment.
+                # For example: the `e` in `except ValueError as e`
+                global_node = globs[name][0]
+                if isinstance(global_node, nodes.AssignName) and isinstance(
+                    global_node.parent, nodes.ExceptHandler
+                ):
+                    continue
+
+                line = definition.fromlineno
+                if not self._is_name_ignored(stmt, name):
+                    self.add_message(
+                        "redefined-outer-name", args=(name, line), node=stmt
+                    )
+
+            elif (
+                utils.is_builtin(name)
+                and not self._allowed_redefined_builtin(name)
+                and not self._should_ignore_redefined_builtin(stmt)
+            ):
+                # do not print Redefining builtin for additional builtins
+                self.add_message("redefined-builtin", args=name, node=stmt)
+
+    def leave_functiondef(self, node: nodes.FunctionDef) -> None:
         """Leave function: check function's locals are consumed."""
-        pass
+        self._check_metaclasses(node)
+
+        if node.type_comment_returns:
+            self._store_type_annotation_node(node.type_comment_returns)
+        if node.type_comment_args:
+            for argument_annotation in node.type_comment_args:
+                self._store_type_annotation_node(argument_annotation)
+
+        not_consumed = self._to_consume.pop().to_consume
+        if not (
+            self.linter.is_message_enabled("unused-variable")
+            or self.linter.is_message_enabled("possibly-unused-variable")
+            or self.linter.is_message_enabled("unused-argument")
+        ):
+            return
+
+        # Don't check arguments of function which are only raising an exception.
+        if utils.is_error(node):
+            return
+
+        # Don't check arguments of abstract methods or within an interface.
+        is_method = node.is_method()
+        if is_method and node.is_abstract():
+            return
+
+        global_names = _flattened_scope_names(node.nodes_of_class(nodes.Global))
+        nonlocal_names = _flattened_scope_names(node.nodes_of_class(nodes.Nonlocal))
+        comprehension_target_names: set[str] = set()
+
+        for comprehension_scope in node.nodes_of_class(nodes.ComprehensionScope):
+            for generator in comprehension_scope.generators:
+                for name in utils.find_assigned_names_recursive(generator.target):
+                    comprehension_target_names.add(name)
+
+        for name, stmts in not_consumed.items():
+            self._check_is_unused(
+                name,
+                node,
+                stmts[0],
+                global_names,
+                nonlocal_names,
+                comprehension_target_names,
+            )
+
     visit_asyncfunctiondef = visit_functiondef
     leave_asyncfunctiondef = leave_functiondef

-    @utils.only_required_for_messages('global-variable-undefined',
-        'global-variable-not-assigned', 'global-statement',
-        'global-at-module-level', 'redefined-builtin')
-    def visit_global(self, node: nodes.Global) ->None:
+    @utils.only_required_for_messages(
+        "global-variable-undefined",
+        "global-variable-not-assigned",
+        "global-statement",
+        "global-at-module-level",
+        "redefined-builtin",
+    )
+    def visit_global(self, node: nodes.Global) -> None:
         """Check names imported exists in the global scope."""
-        pass
-
-    def visit_name(self, node: (nodes.Name | nodes.AssignName | nodes.DelName)
-        ) ->None:
+        frame = node.frame()
+        if isinstance(frame, nodes.Module):
+            self.add_message("global-at-module-level", node=node, confidence=HIGH)
+            return
+
+        module = frame.root()
+        default_message = True
+        locals_ = node.scope().locals
+        for name in node.names:
+            try:
+                assign_nodes = module.getattr(name)
+            except astroid.NotFoundError:
+                # unassigned global, skip
+                assign_nodes = []
+
+            not_defined_locally_by_import = not any(
+                isinstance(local, (nodes.Import, nodes.ImportFrom))
+                for local in locals_.get(name, ())
+            )
+            if (
+                not utils.is_reassigned_after_current(node, name)
+                and not utils.is_deleted_after_current(node, name)
+                and not_defined_locally_by_import
+            ):
+                self.add_message(
+                    "global-variable-not-assigned",
+                    args=name,
+                    node=node,
+                    confidence=HIGH,
+                )
+                default_message = False
+                continue
+
+            for anode in assign_nodes:
+                if (
+                    isinstance(anode, nodes.AssignName)
+                    and anode.name in module.special_attributes
+                ):
+                    self.add_message("redefined-builtin", args=name, node=node)
+                    break
+                if anode.frame() is module:
+                    # module level assignment
+                    break
+                if (
+                    isinstance(anode, (nodes.ClassDef, nodes.FunctionDef))
+                    and anode.parent is module
+                ):
+                    # module level function assignment
+                    break
+            else:
+                if not_defined_locally_by_import:
+                    # global undefined at the module scope
+                    self.add_message(
+                        "global-variable-undefined",
+                        args=name,
+                        node=node,
+                        confidence=HIGH,
+                    )
+                    default_message = False
+
+        if default_message:
+            self.add_message("global-statement", node=node, confidence=HIGH)
+
+    def visit_assignname(self, node: nodes.AssignName) -> None:
+        if isinstance(node.assign_type(), nodes.AugAssign):
+            self.visit_name(node)
+
+    def visit_delname(self, node: nodes.DelName) -> None:
+        self.visit_name(node)
+
+    def visit_name(self, node: nodes.Name | nodes.AssignName | nodes.DelName) -> None:
         """Don't add the 'utils.only_required_for_messages' decorator here!

         It's important that all 'Name' nodes are visited, otherwise the
         'NamesConsumers' won't be correct.
         """
-        pass
+        stmt = node.statement()
+        if stmt.fromlineno is None:
+            # name node from an astroid built from live code, skip
+            assert not stmt.root().file.endswith(".py")
+            return
+
+        self._undefined_and_used_before_checker(node, stmt)
+        self._loopvar_name(node)
+
+    @utils.only_required_for_messages("redefined-outer-name")
+    def visit_excepthandler(self, node: nodes.ExceptHandler) -> None:
+        if not node.name or not isinstance(node.name, nodes.AssignName):
+            return
+
+        for outer_except, outer_except_assign_name in self._except_handler_names_queue:
+            if node.name.name == outer_except_assign_name.name:
+                self.add_message(
+                    "redefined-outer-name",
+                    args=(outer_except_assign_name.name, outer_except.fromlineno),
+                    node=node,
+                )
+                break

-    def _should_node_be_skipped(self, node: nodes.Name, consumer:
-        NamesConsumer, is_start_index: bool) ->bool:
+        self._except_handler_names_queue.append((node, node.name))
+
+    @utils.only_required_for_messages("redefined-outer-name")
+    def leave_excepthandler(self, node: nodes.ExceptHandler) -> None:
+        if not node.name or not isinstance(node.name, nodes.AssignName):
+            return
+        self._except_handler_names_queue.pop()
+
+    def _undefined_and_used_before_checker(
+        self, node: nodes.Name, stmt: nodes.NodeNG
+    ) -> None:
+        frame = stmt.scope()
+        start_index = len(self._to_consume) - 1
+
+        # iterates through parent scopes, from the inner to the outer
+        base_scope_type = self._to_consume[start_index].scope_type
+
+        for i in range(start_index, -1, -1):
+            current_consumer = self._to_consume[i]
+
+            # Certain nodes shouldn't be checked as they get checked another time
+            if self._should_node_be_skipped(node, current_consumer, i == start_index):
+                continue
+
+            action, nodes_to_consume = self._check_consumer(
+                node, stmt, frame, current_consumer, base_scope_type
+            )
+            if nodes_to_consume:
+                # Any nodes added to consumed_uncertain by get_next_to_consume()
+                # should be added back so that they are marked as used.
+                # They will have already had a chance to emit used-before-assignment.
+                # We check here instead of before every single return in _check_consumer()
+                nodes_to_consume += current_consumer.consumed_uncertain[node.name]
+                current_consumer.mark_as_consumed(node.name, nodes_to_consume)
+            if action is VariableVisitConsumerAction.CONTINUE:
+                continue
+            if action is VariableVisitConsumerAction.RETURN:
+                return
+
+        # we have not found the name, if it isn't a builtin, that's an
+        # undefined name !
+        if not (
+            node.name in nodes.Module.scope_attrs
+            or utils.is_builtin(node.name)
+            or node.name in self.linter.config.additional_builtins
+            or (
+                node.name == "__class__"
+                and any(
+                    i.is_method()
+                    for i in node.node_ancestors()
+                    if isinstance(i, nodes.FunctionDef)
+                )
+            )
+        ) and not utils.node_ignores_exception(node, NameError):
+            self.add_message("undefined-variable", args=node.name, node=node)
+
+    def _should_node_be_skipped(
+        self, node: nodes.Name, consumer: NamesConsumer, is_start_index: bool
+    ) -> bool:
         """Tests a consumer and node for various conditions in which the node shouldn't
         be checked for the undefined-variable and used-before-assignment checks.
         """
-        pass
-
-    def _check_consumer(self, node: nodes.Name, stmt: nodes.NodeNG, frame:
-        nodes.LocalsDictNodeNG, current_consumer: NamesConsumer,
-        base_scope_type: str) ->tuple[VariableVisitConsumerAction, list[
-        nodes.NodeNG] | None]:
+        if consumer.scope_type == "class":
+            # The list of base classes in the class definition is not part
+            # of the class body.
+            # If the current scope is a class scope but it's not the inner
+            # scope, ignore it. This prevents to access this scope instead of
+            # the globals one in function members when there are some common
+            # names.
+            if utils.is_ancestor_name(consumer.node, node) or (
+                not is_start_index and self._ignore_class_scope(node)
+            ):
+                return True
+
+            # Ignore inner class scope for keywords in class definition
+            if isinstance(node.parent, nodes.Keyword) and isinstance(
+                node.parent.parent, nodes.ClassDef
+            ):
+                return True
+
+        elif consumer.scope_type == "function" and self._defined_in_function_definition(
+            node, consumer.node
+        ):
+            if any(node.name == param.name.name for param in consumer.node.type_params):
+                return False
+
+            # If the name node is used as a function default argument's value or as
+            # a decorator, then start from the parent frame of the function instead
+            # of the function frame - and thus open an inner class scope
+            return True
+
+        elif consumer.scope_type == "lambda" and utils.is_default_argument(
+            node, consumer.node
+        ):
+            return True
+
+        return False
+
+    # pylint: disable = too-many-return-statements, too-many-branches
+    def _check_consumer(
+        self,
+        node: nodes.Name,
+        stmt: nodes.NodeNG,
+        frame: nodes.LocalsDictNodeNG,
+        current_consumer: NamesConsumer,
+        base_scope_type: str,
+    ) -> tuple[VariableVisitConsumerAction, list[nodes.NodeNG] | None]:
         """Checks a consumer for conditions that should trigger messages."""
-        pass
-
-    def _report_unfound_name_definition(self, node: nodes.NodeNG,
-        current_consumer: NamesConsumer) ->None:
+        # If the name has already been consumed, only check it's not a loop
+        # variable used outside the loop.
+        if node.name in current_consumer.consumed:
+            # Avoid the case where there are homonyms inside function scope and
+            # comprehension current scope (avoid bug #1731)
+            if utils.is_func_decorator(current_consumer.node) or not isinstance(
+                node, nodes.ComprehensionScope
+            ):
+                self._check_late_binding_closure(node)
+                return (VariableVisitConsumerAction.RETURN, None)
+
+        found_nodes = current_consumer.get_next_to_consume(node)
+        if found_nodes is None:
+            return (VariableVisitConsumerAction.CONTINUE, None)
+        if not found_nodes:
+            self._report_unfound_name_definition(node, current_consumer)
+            # Mark for consumption any nodes added to consumed_uncertain by
+            # get_next_to_consume() because they might not have executed.
+            nodes_to_consume = current_consumer.consumed_uncertain[node.name]
+            nodes_to_consume = self._filter_type_checking_import_from_consumption(
+                node, nodes_to_consume
+            )
+            return (
+                VariableVisitConsumerAction.RETURN,
+                nodes_to_consume,
+            )
+
+        self._check_late_binding_closure(node)
+
+        defnode = utils.assign_parent(found_nodes[0])
+        defstmt = defnode.statement()
+        defframe = defstmt.frame()
+
+        # The class reuses itself in the class scope.
+        is_recursive_klass: bool = (
+            frame is defframe
+            and defframe.parent_of(node)
+            and isinstance(defframe, nodes.ClassDef)
+            and node.name == defframe.name
+        )
+
+        if (
+            is_recursive_klass
+            and utils.get_node_first_ancestor_of_type(node, nodes.Lambda)
+            and (
+                not utils.is_default_argument(node)
+                or node.scope().parent.scope() is not defframe
+            )
+        ):
+            # Self-referential class references are fine in lambda's --
+            # As long as they are not part of the default argument directly
+            # under the scope of the parent self-referring class.
+            # Example of valid default argument:
+            # class MyName3:
+            #     myattr = 1
+            #     mylambda3 = lambda: lambda a=MyName3: a
+            # Example of invalid default argument:
+            # class MyName4:
+            #     myattr = 1
+            #     mylambda4 = lambda a=MyName4: lambda: a
+
+            # If the above conditional is True,
+            # there is no possibility of undefined-variable
+            # Also do not consume class name
+            # (since consuming blocks subsequent checks)
+            # -- quit
+            return (VariableVisitConsumerAction.RETURN, None)
+
+        (
+            maybe_before_assign,
+            annotation_return,
+            use_outer_definition,
+        ) = self._is_variable_violation(
+            node,
+            defnode,
+            stmt,
+            defstmt,
+            frame,
+            defframe,
+            base_scope_type,
+            is_recursive_klass,
+        )
+
+        if use_outer_definition:
+            return (VariableVisitConsumerAction.CONTINUE, None)
+
+        if (
+            maybe_before_assign
+            and not utils.is_defined_before(node)
+            and not astroid.are_exclusive(stmt, defstmt, ("NameError",))
+        ):
+            # Used and defined in the same place, e.g `x += 1` and `del x`
+            defined_by_stmt = defstmt is stmt and isinstance(
+                node, (nodes.DelName, nodes.AssignName)
+            )
+            if (
+                is_recursive_klass
+                or defined_by_stmt
+                or annotation_return
+                or isinstance(defstmt, nodes.Delete)
+            ):
+                if not utils.node_ignores_exception(node, NameError):
+                    # Handle postponed evaluation of annotations
+                    if not (
+                        self._postponed_evaluation_enabled
+                        and isinstance(
+                            stmt,
+                            (
+                                nodes.AnnAssign,
+                                nodes.FunctionDef,
+                                nodes.Arguments,
+                            ),
+                        )
+                        and node.name in node.root().locals
+                    ):
+                        if defined_by_stmt:
+                            return (VariableVisitConsumerAction.CONTINUE, [node])
+                        return (VariableVisitConsumerAction.CONTINUE, None)
+
+            elif base_scope_type != "lambda":
+                # E0601 may *not* occurs in lambda scope.
+
+                # Skip postponed evaluation of annotations
+                # and unevaluated annotations inside a function body
+                if not (
+                    self._postponed_evaluation_enabled
+                    and isinstance(stmt, (nodes.AnnAssign, nodes.FunctionDef))
+                ) and not (
+                    isinstance(stmt, nodes.AnnAssign)
+                    and utils.get_node_first_ancestor_of_type(stmt, nodes.FunctionDef)
+                ):
+                    self.add_message(
+                        "used-before-assignment",
+                        args=node.name,
+                        node=node,
+                        confidence=HIGH,
+                    )
+                    return (VariableVisitConsumerAction.RETURN, found_nodes)
+
+            elif base_scope_type == "lambda":
+                # E0601 can occur in class-level scope in lambdas, as in
+                # the following example:
+                #   class A:
+                #      x = lambda attr: f + attr
+                #      f = 42
+                # We check lineno because doing the following is fine:
+                #   class A:
+                #      x = 42
+                #      y = lambda attr: x + attr
+                if (
+                    isinstance(frame, nodes.ClassDef)
+                    and node.name in frame.locals
+                    and stmt.fromlineno <= defstmt.fromlineno
+                ):
+                    self.add_message(
+                        "used-before-assignment",
+                        args=node.name,
+                        node=node,
+                        confidence=HIGH,
+                    )
+
+        elif not self._is_builtin(node.name) and self._is_only_type_assignment(
+            node, defstmt
+        ):
+            if node.scope().locals.get(node.name):
+                self.add_message(
+                    "used-before-assignment", args=node.name, node=node, confidence=HIGH
+                )
+            else:
+                self.add_message(
+                    "undefined-variable", args=node.name, node=node, confidence=HIGH
+                )
+            return (VariableVisitConsumerAction.RETURN, found_nodes)
+
+        elif isinstance(defstmt, nodes.ClassDef):
+            return self._is_first_level_self_reference(node, defstmt, found_nodes)
+
+        elif isinstance(defnode, nodes.NamedExpr):
+            if isinstance(defnode.parent, nodes.IfExp):
+                if self._is_never_evaluated(defnode, defnode.parent):
+                    self.add_message(
+                        "undefined-variable",
+                        args=node.name,
+                        node=node,
+                        confidence=INFERENCE,
+                    )
+                    return (VariableVisitConsumerAction.RETURN, found_nodes)
+
+        return (VariableVisitConsumerAction.RETURN, found_nodes)
+
+    def _report_unfound_name_definition(
+        self, node: nodes.NodeNG, current_consumer: NamesConsumer
+    ) -> None:
         """Reports used-before-assignment when all name definition nodes
         get filtered out by NamesConsumer.
         """
-        pass
-
-    def _filter_type_checking_import_from_consumption(self, node: nodes.
-        NodeNG, nodes_to_consume: list[nodes.NodeNG]) ->list[nodes.NodeNG]:
+        if (
+            self._postponed_evaluation_enabled
+            and utils.is_node_in_type_annotation_context(node)
+        ):
+            return
+        if self._is_builtin(node.name):
+            return
+        if self._is_variable_annotation_in_function(node):
+            return
+        if (
+            node.name in self._evaluated_type_checking_scopes
+            and node.scope() in self._evaluated_type_checking_scopes[node.name]
+        ):
+            return
+
+        confidence = HIGH
+        if node.name in current_consumer.names_under_always_false_test:
+            confidence = INFERENCE
+        elif node.name in current_consumer.consumed_uncertain:
+            confidence = CONTROL_FLOW
+
+        if node.name in current_consumer.names_defined_under_one_branch_only:
+            msg = "possibly-used-before-assignment"
+        else:
+            msg = "used-before-assignment"
+
+        self.add_message(
+            msg,
+            args=node.name,
+            node=node,
+            confidence=confidence,
+        )
+
+    def _filter_type_checking_import_from_consumption(
+        self, node: nodes.NodeNG, nodes_to_consume: list[nodes.NodeNG]
+    ) -> list[nodes.NodeNG]:
         """Do not consume type-checking import node as used-before-assignment
         may invoke in different scopes.
         """
-        pass
-
-    @utils.only_required_for_messages('no-name-in-module')
-    def visit_import(self, node: nodes.Import) ->None:
+        type_checking_import = next(
+            (
+                n
+                for n in nodes_to_consume
+                if isinstance(n, (nodes.Import, nodes.ImportFrom))
+                and in_type_checking_block(n)
+            ),
+            None,
+        )
+        # If used-before-assignment reported for usage of type checking import
+        # keep track of its scope
+        if type_checking_import and not self._is_variable_annotation_in_function(node):
+            self._evaluated_type_checking_scopes.setdefault(node.name, []).append(
+                node.scope()
+            )
+        nodes_to_consume = [n for n in nodes_to_consume if n != type_checking_import]
+        return nodes_to_consume
+
+    @utils.only_required_for_messages("no-name-in-module")
+    def visit_import(self, node: nodes.Import) -> None:
         """Check modules attribute accesses."""
-        pass
-
-    @utils.only_required_for_messages('no-name-in-module')
-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+        if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+            # No need to verify this, since ImportError is already
+            # handled by the client code.
+            return
+        # Don't verify import if part of guarded import block
+        if in_type_checking_block(node):
+            return
+        if isinstance(node.parent, nodes.If) and is_sys_guard(node.parent):
+            return
+
+        for name, _ in node.names:
+            parts = name.split(".")
+            try:
+                module = next(_infer_name_module(node, parts[0]))
+            except astroid.ResolveError:
+                continue
+            if not isinstance(module, nodes.Module):
+                continue
+            self._check_module_attrs(node, module, parts[1:])
+
+    @utils.only_required_for_messages("no-name-in-module")
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Check modules attribute accesses."""
-        pass
-
-    @utils.only_required_for_messages('unbalanced-tuple-unpacking',
-        'unpacking-non-sequence', 'self-cls-assignment',
-        'unbalanced_dict_unpacking')
-    def visit_assign(self, node: nodes.Assign) ->None:
+        if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+            # No need to verify this, since ImportError is already
+            # handled by the client code.
+            return
+        # Don't verify import if part of guarded import block
+        # I.e. `sys.version_info` or `typing.TYPE_CHECKING`
+        if in_type_checking_block(node):
+            return
+        if isinstance(node.parent, nodes.If) and is_sys_guard(node.parent):
+            return
+
+        name_parts = node.modname.split(".")
+        try:
+            module = node.do_import_module(name_parts[0])
+        except astroid.AstroidBuildingError:
+            return
+        module = self._check_module_attrs(node, module, name_parts[1:])
+        if not module:
+            return
+        for name, _ in node.names:
+            if name == "*":
+                continue
+            self._check_module_attrs(node, module, name.split("."))
+
+    @utils.only_required_for_messages(
+        "unbalanced-tuple-unpacking",
+        "unpacking-non-sequence",
+        "self-cls-assignment",
+        "unbalanced_dict_unpacking",
+    )
+    def visit_assign(self, node: nodes.Assign) -> None:
         """Check unbalanced tuple unpacking for assignments and unpacking
         non-sequences as well as in case self/cls get assigned.
         """
-        pass
+        self._check_self_cls_assign(node)
+        if not isinstance(node.targets[0], (nodes.Tuple, nodes.List)):
+            return
+
+        targets = node.targets[0].itered()

-    def visit_listcomp(self, node: nodes.ListComp) ->None:
+        # Check if we have starred nodes.
+        if any(isinstance(target, nodes.Starred) for target in targets):
+            return
+
+        try:
+            inferred = utils.safe_infer(node.value)
+            if inferred is not None:
+                self._check_unpacking(inferred, node, targets)
+        except astroid.InferenceError:
+            return
+
+    # listcomp have now also their scope
+    def visit_listcomp(self, node: nodes.ListComp) -> None:
         """Visit listcomp: update consumption analysis variable."""
-        pass
+        self._to_consume.append(NamesConsumer(node, "comprehension"))

-    def leave_listcomp(self, _: nodes.ListComp) ->None:
+    def leave_listcomp(self, _: nodes.ListComp) -> None:
         """Leave listcomp: update consumption analysis variable."""
-        pass
+        # do not check for not used locals here
+        self._to_consume.pop()
+
+    def leave_assign(self, node: nodes.Assign) -> None:
+        self._store_type_annotation_names(node)
+
+    def leave_with(self, node: nodes.With) -> None:
+        self._store_type_annotation_names(node)
+
+    def visit_arguments(self, node: nodes.Arguments) -> None:
+        for annotation in node.type_comment_args:
+            self._store_type_annotation_node(annotation)
+
+    # Relying on other checker's options, which might not have been initialized yet.
+    @cached_property
+    def _analyse_fallback_blocks(self) -> bool:
+        return bool(self.linter.config.analyse_fallback_blocks)
+
+    @cached_property
+    def _ignored_modules(self) -> Iterable[str]:
+        return self.linter.config.ignored_modules  # type: ignore[no-any-return]
+
+    @cached_property
+    def _allow_global_unused_variables(self) -> bool:
+        return bool(self.linter.config.allow_global_unused_variables)

     @staticmethod
-    def _in_lambda_or_comprehension_body(node: nodes.NodeNG, frame: nodes.
-        NodeNG) ->bool:
+    def _defined_in_function_definition(
+        node: nodes.NodeNG, frame: nodes.NodeNG
+    ) -> bool:
+        in_annotation_or_default_or_decorator = False
+        if isinstance(frame, nodes.FunctionDef) and node.statement() is frame:
+            in_annotation_or_default_or_decorator = (
+                (
+                    node in frame.args.annotations
+                    or node in frame.args.posonlyargs_annotations
+                    or node in frame.args.kwonlyargs_annotations
+                    or node is frame.args.varargannotation
+                    or node is frame.args.kwargannotation
+                )
+                or frame.args.parent_of(node)
+                or (frame.decorators and frame.decorators.parent_of(node))
+                or (
+                    frame.returns
+                    and (node is frame.returns or frame.returns.parent_of(node))
+                )
+            )
+        return in_annotation_or_default_or_decorator
+
+    @staticmethod
+    def _in_lambda_or_comprehension_body(
+        node: nodes.NodeNG, frame: nodes.NodeNG
+    ) -> bool:
         """Return True if node within a lambda/comprehension body (or similar) and thus
         should not have access to class attributes in frame.
         """
-        pass
+        child = node
+        parent = node.parent
+        while parent is not None:
+            if parent is frame:
+                return False
+            if isinstance(parent, nodes.Lambda) and child is not parent.args:
+                # Body of lambda should not have access to class attributes.
+                return True
+            if isinstance(parent, nodes.Comprehension) and child is not parent.iter:
+                # Only iter of list/set/dict/generator comprehension should have access.
+                return True
+            if isinstance(parent, nodes.ComprehensionScope) and not (
+                parent.generators and child is parent.generators[0]
+            ):
+                # Body of list/set/dict/generator comprehension should not have access to class attributes.
+                # Furthermore, only the first generator (if multiple) in comprehension should have access.
+                return True
+            child = parent
+            parent = parent.parent
+        return False

     @staticmethod
-    def _maybe_used_and_assigned_at_once(defstmt: _base_nodes.Statement
-        ) ->bool:
+    def _is_variable_violation(
+        node: nodes.Name,
+        defnode: nodes.NodeNG,
+        stmt: _base_nodes.Statement,
+        defstmt: _base_nodes.Statement,
+        frame: nodes.LocalsDictNodeNG,  # scope of statement of node
+        defframe: nodes.LocalsDictNodeNG,
+        base_scope_type: str,
+        is_recursive_klass: bool,
+    ) -> tuple[bool, bool, bool]:
+        maybe_before_assign = True
+        annotation_return = False
+        use_outer_definition = False
+        if frame is not defframe:
+            maybe_before_assign = _detect_global_scope(node, frame, defframe)
+        elif defframe.parent is None:
+            # we are at the module level, check the name is not
+            # defined in builtins
+            if (
+                node.name in defframe.scope_attrs
+                or astroid.builtin_lookup(node.name)[1]
+            ):
+                maybe_before_assign = False
+        else:
+            # we are in a local scope, check the name is not
+            # defined in global or builtin scope
+            # skip this lookup if name is assigned later in function scope/lambda
+            # Note: the node.frame() is not the same as the `frame` argument which is
+            # equivalent to frame.statement().scope()
+            forbid_lookup = (
+                isinstance(frame, nodes.FunctionDef)
+                or isinstance(node.frame(), nodes.Lambda)
+            ) and _assigned_locally(node)
+            if not forbid_lookup and defframe.root().lookup(node.name)[1]:
+                maybe_before_assign = False
+                use_outer_definition = stmt == defstmt and not isinstance(
+                    defnode, nodes.Comprehension
+                )
+            # check if we have a nonlocal
+            elif node.name in defframe.locals:
+                maybe_before_assign = not any(
+                    isinstance(child, nodes.Nonlocal) and node.name in child.names
+                    for child in defframe.get_children()
+                )
+
+        if (
+            base_scope_type == "lambda"
+            and isinstance(frame, nodes.ClassDef)
+            and node.name in frame.locals
+        ):
+            # This rule verifies that if the definition node of the
+            # checked name is an Arguments node and if the name
+            # is used a default value in the arguments defaults
+            # and the actual definition of the variable label
+            # is happening before the Arguments definition.
+            #
+            # bar = None
+            # foo = lambda bar=bar: bar
+            #
+            # In this case, maybe_before_assign should be False, otherwise
+            # it should be True.
+            maybe_before_assign = not (
+                isinstance(defnode, nodes.Arguments)
+                and node in defnode.defaults
+                and frame.locals[node.name][0].fromlineno < defstmt.fromlineno
+            )
+        elif isinstance(defframe, nodes.ClassDef) and isinstance(
+            frame, nodes.FunctionDef
+        ):
+            # Special rule for function return annotations,
+            # using a name defined earlier in the class containing the function.
+            if node is frame.returns and defframe.parent_of(frame.returns):
+                annotation_return = True
+                if frame.returns.name in defframe.locals:
+                    definition = defframe.locals[node.name][0]
+                    if definition.lineno is None or definition.lineno < frame.lineno:
+                        # Detect class assignments with a name defined earlier in the
+                        # class. In this case, no warning should be raised.
+                        maybe_before_assign = False
+                    else:
+                        maybe_before_assign = True
+                else:
+                    maybe_before_assign = True
+            if isinstance(node.parent, nodes.Arguments):
+                maybe_before_assign = stmt.fromlineno <= defstmt.fromlineno
+        elif is_recursive_klass:
+            maybe_before_assign = True
+        else:
+            maybe_before_assign = (
+                maybe_before_assign and stmt.fromlineno <= defstmt.fromlineno
+            )
+            if maybe_before_assign and stmt.fromlineno == defstmt.fromlineno:
+                if (
+                    isinstance(defframe, nodes.FunctionDef)
+                    and frame is defframe
+                    and defframe.parent_of(node)
+                    and (
+                        defnode in defframe.type_params
+                        # Single statement function, with the statement on the
+                        # same line as the function definition
+                        or stmt is not defstmt
+                    )
+                ):
+                    maybe_before_assign = False
+                elif (
+                    isinstance(defstmt, NODES_WITH_VALUE_ATTR)
+                    and VariablesChecker._maybe_used_and_assigned_at_once(defstmt)
+                    and frame is defframe
+                    and defframe.parent_of(node)
+                    and stmt is defstmt
+                ):
+                    # Single statement if, with assignment expression on same
+                    # line as assignment
+                    # x = b if (b := True) else False
+                    maybe_before_assign = False
+                elif (
+                    isinstance(  # pylint: disable=too-many-boolean-expressions
+                        defnode, nodes.NamedExpr
+                    )
+                    and frame is defframe
+                    and defframe.parent_of(stmt)
+                    and stmt is defstmt
+                    and (
+                        (
+                            defnode.lineno == node.lineno
+                            and defnode.col_offset < node.col_offset
+                        )
+                        or (defnode.lineno < node.lineno)
+                        or (
+                            # Issue in the `ast` module until py39
+                            # Nodes in a multiline string have the same lineno
+                            # Could be false-positive without check
+                            not PY39_PLUS
+                            and defnode.lineno == node.lineno
+                            and isinstance(
+                                defstmt,
+                                (
+                                    nodes.Assign,
+                                    nodes.AnnAssign,
+                                    nodes.AugAssign,
+                                    nodes.Return,
+                                ),
+                            )
+                            and isinstance(defstmt.value, nodes.JoinedStr)
+                        )
+                    )
+                ):
+                    # Relation of a name to the same name in a named expression
+                    # Could be used before assignment if self-referencing:
+                    # (b := b)
+                    # Otherwise, safe if used after assignment:
+                    # (b := 2) and b
+                    maybe_before_assign = defnode.value is node or any(
+                        anc is defnode.value for anc in node.node_ancestors()
+                    )
+
+        return maybe_before_assign, annotation_return, use_outer_definition
+
+    @staticmethod
+    def _maybe_used_and_assigned_at_once(defstmt: _base_nodes.Statement) -> bool:
         """Check if `defstmt` has the potential to use and assign a name in the
         same statement.
         """
-        pass
+        if isinstance(defstmt, nodes.Match):
+            return any(case.guard for case in defstmt.cases)
+        if isinstance(defstmt, nodes.IfExp):
+            return True
+        if isinstance(defstmt, nodes.TypeAlias):
+            return True
+        if isinstance(defstmt.value, nodes.BaseContainer):
+            return any(
+                VariablesChecker._maybe_used_and_assigned_at_once(elt)
+                for elt in defstmt.value.elts
+                if isinstance(elt, (*NODES_WITH_VALUE_ATTR, nodes.IfExp, nodes.Match))
+            )
+        value = defstmt.value
+        if isinstance(value, nodes.IfExp):
+            return True
+        if isinstance(value, nodes.Lambda) and isinstance(value.body, nodes.IfExp):
+            return True
+        if isinstance(value, nodes.Dict) and any(
+            isinstance(item[0], nodes.IfExp) or isinstance(item[1], nodes.IfExp)
+            for item in value.items
+        ):
+            return True
+        if not isinstance(value, nodes.Call):
+            return False
+        return any(
+            any(isinstance(kwarg.value, nodes.IfExp) for kwarg in call.keywords)
+            or any(isinstance(arg, nodes.IfExp) for arg in call.args)
+            or (
+                isinstance(call.func, nodes.Attribute)
+                and isinstance(call.func.expr, nodes.IfExp)
+            )
+            for call in value.nodes_of_class(klass=nodes.Call)
+        )
+
+    def _is_builtin(self, name: str) -> bool:
+        return name in self.linter.config.additional_builtins or utils.is_builtin(name)

     @staticmethod
-    def _is_only_type_assignment(node: nodes.Name, defstmt: _base_nodes.
-        Statement) ->bool:
+    def _is_only_type_assignment(
+        node: nodes.Name, defstmt: _base_nodes.Statement
+    ) -> bool:
         """Check if variable only gets assigned a type and never a value."""
-        pass
+        if not isinstance(defstmt, nodes.AnnAssign) or defstmt.value:
+            return False
+
+        defstmt_frame = defstmt.frame()
+        node_frame = node.frame()
+
+        parent = node
+        while parent is not defstmt_frame.parent:
+            parent_scope = parent.scope()
+
+            # Find out if any nonlocals receive values in nested functions
+            for inner_func in parent_scope.nodes_of_class(nodes.FunctionDef):
+                if inner_func is parent_scope:
+                    continue
+                if any(
+                    node.name in nl.names
+                    for nl in inner_func.nodes_of_class(nodes.Nonlocal)
+                ) and any(
+                    node.name == an.name
+                    for an in inner_func.nodes_of_class(nodes.AssignName)
+                ):
+                    return False
+
+            local_refs = parent_scope.locals.get(node.name, [])
+            for ref_node in local_refs:
+                # If local ref is in the same frame as our node, but on a later lineno
+                # we don't actually care about this local ref.
+                # Local refs are ordered, so we break.
+                #     print(var)
+                #     var = 1  # <- irrelevant
+                if defstmt_frame == node_frame and ref_node.lineno > node.lineno:
+                    break
+
+                # If the parent of the local reference is anything but an AnnAssign
+                # Or if the AnnAssign adds a value the variable will now have a value
+                #     var = 1  # OR
+                #     var: int = 1
+                if (
+                    not isinstance(ref_node.parent, nodes.AnnAssign)
+                    or ref_node.parent.value
+                ) and not (
+                    # EXCEPTION: will not have a value if a self-referencing named expression
+                    # var: int
+                    # if (var := var * var)  <-- "var" still undefined
+                    isinstance(ref_node.parent, nodes.NamedExpr)
+                    and any(
+                        anc is ref_node.parent.value for anc in node.node_ancestors()
+                    )
+                ):
+                    return False
+            parent = parent_scope.parent
+        return True

     @staticmethod
-    def _is_first_level_self_reference(node: nodes.Name, defstmt: nodes.
-        ClassDef, found_nodes: list[nodes.NodeNG]) ->tuple[
-        VariableVisitConsumerAction, list[nodes.NodeNG] | None]:
+    def _is_first_level_self_reference(
+        node: nodes.Name, defstmt: nodes.ClassDef, found_nodes: list[nodes.NodeNG]
+    ) -> tuple[VariableVisitConsumerAction, list[nodes.NodeNG] | None]:
         """Check if a first level method's annotation or default values
         refers to its own class, and return a consumer action.
         """
-        pass
+        if node.frame().parent == defstmt and node.statement() == node.frame():
+            # Check if used as type annotation
+            # Break if postponed evaluation is enabled
+            if utils.is_node_in_type_annotation_context(node):
+                if not utils.is_postponed_evaluation_enabled(node):
+                    return (VariableVisitConsumerAction.CONTINUE, None)
+                return (VariableVisitConsumerAction.RETURN, None)
+            # Check if used as default value by calling the class
+            if isinstance(node.parent, nodes.Call) and isinstance(
+                node.parent.parent, nodes.Arguments
+            ):
+                return (VariableVisitConsumerAction.CONTINUE, None)
+        return (VariableVisitConsumerAction.RETURN, found_nodes)

     @staticmethod
-    def _is_never_evaluated(defnode: nodes.NamedExpr, defnode_parent: nodes
-        .IfExp) ->bool:
+    def _is_never_evaluated(
+        defnode: nodes.NamedExpr, defnode_parent: nodes.IfExp
+    ) -> bool:
         """Check if a NamedExpr is inside a side of if ... else that never
         gets evaluated.
         """
-        pass
+        inferred_test = utils.safe_infer(defnode_parent.test)
+        if isinstance(inferred_test, nodes.Const):
+            if inferred_test.value is True and defnode == defnode_parent.orelse:
+                return True
+            if inferred_test.value is False and defnode == defnode_parent.body:
+                return True
+        return False

-    def _ignore_class_scope(self, node: nodes.NodeNG) ->bool:
+    @staticmethod
+    def _is_variable_annotation_in_function(node: nodes.NodeNG) -> bool:
+        is_annotation = utils.get_node_first_ancestor_of_type(node, nodes.AnnAssign)
+        return (
+            is_annotation
+            and utils.get_node_first_ancestor_of_type(  # type: ignore[return-value]
+                is_annotation, nodes.FunctionDef
+            )
+        )
+
+    def _ignore_class_scope(self, node: nodes.NodeNG) -> bool:
         """Return True if the node is in a local class scope, as an assignment.

         Detect if we are in a local class scope, as an assignment.
@@ -598,65 +2549,884 @@ class VariablesChecker(BaseChecker):
            class D(Tp):
                ...
         """
-        pass
-
-    def _check_late_binding_closure(self, node: nodes.Name) ->None:
+        name = node.name
+        frame = node.statement().scope()
+        in_annotation_or_default_or_decorator = self._defined_in_function_definition(
+            node, frame
+        )
+        in_ancestor_list = utils.is_ancestor_name(frame, node)
+        if in_annotation_or_default_or_decorator or in_ancestor_list:
+            frame_locals = frame.parent.scope().locals
+        else:
+            frame_locals = frame.locals
+        return not (
+            (isinstance(frame, nodes.ClassDef) or in_annotation_or_default_or_decorator)
+            and not self._in_lambda_or_comprehension_body(node, frame)
+            and name in frame_locals
+        )
+
+    # pylint: disable-next=too-many-branches,too-many-statements
+    def _loopvar_name(self, node: astroid.Name) -> None:
+        # filter variables according to node's scope
+        astmts = [s for s in node.lookup(node.name)[1] if hasattr(s, "assign_type")]
+        # If this variable usage exists inside a function definition
+        # that exists in the same loop,
+        # the usage is safe because the function will not be defined either if
+        # the variable is not defined.
+        scope = node.scope()
+        if isinstance(scope, (nodes.Lambda, nodes.FunctionDef)) and any(
+            asmt.scope().parent_of(scope) for asmt in astmts
+        ):
+            return
+        # Filter variables according to their respective scope. Test parent
+        # and statement to avoid #74747. This is not a total fix, which would
+        # introduce a mechanism similar to special attribute lookup in
+        # modules. Also, in order to get correct inference in this case, the
+        # scope lookup rules would need to be changed to return the initial
+        # assignment (which does not exist in code per se) as well as any later
+        # modifications.
+        if (
+            not astmts  # pylint: disable=too-many-boolean-expressions
+            or (
+                astmts[0].parent == astmts[0].root()
+                and astmts[0].parent.parent_of(node)
+            )
+            or (
+                astmts[0].is_statement
+                or not isinstance(astmts[0].parent, nodes.Module)
+                and astmts[0].statement().parent_of(node)
+            )
+        ):
+            _astmts = []
+        else:
+            _astmts = astmts[:1]
+        for i, stmt in enumerate(astmts[1:]):
+            try:
+                astmt_statement = astmts[i].statement()
+            except astroid.exceptions.ParentMissingError:
+                continue
+            if astmt_statement.parent_of(stmt) and not utils.in_for_else_branch(
+                astmt_statement, stmt
+            ):
+                continue
+            _astmts.append(stmt)
+        astmts = _astmts
+        if len(astmts) != 1:
+            return
+
+        assign = astmts[0].assign_type()
+        if not (
+            isinstance(assign, (nodes.For, nodes.Comprehension, nodes.GeneratorExp))
+            and assign.statement() is not node.statement()
+        ):
+            return
+
+        if not isinstance(assign, nodes.For):
+            self.add_message("undefined-loop-variable", args=node.name, node=node)
+            return
+        for else_stmt in assign.orelse:
+            if isinstance(
+                else_stmt, (nodes.Return, nodes.Raise, nodes.Break, nodes.Continue)
+            ):
+                return
+            # TODO: 4.0: Consider using utils.is_terminating_func
+            if isinstance(else_stmt, nodes.Expr) and isinstance(
+                else_stmt.value, nodes.Call
+            ):
+                inferred_func = utils.safe_infer(else_stmt.value.func)
+                if (
+                    isinstance(inferred_func, nodes.FunctionDef)
+                    and inferred_func.returns
+                ):
+                    inferred_return = utils.safe_infer(inferred_func.returns)
+                    if isinstance(
+                        inferred_return, nodes.FunctionDef
+                    ) and inferred_return.qname() in {
+                        *TYPING_NORETURN,
+                        *TYPING_NEVER,
+                        "typing._SpecialForm",
+                    }:
+                        return
+                    # typing_extensions.NoReturn returns a _SpecialForm
+                    if (
+                        isinstance(inferred_return, bases.Instance)
+                        and inferred_return.qname() == "typing._SpecialForm"
+                    ):
+                        return
+
+        maybe_walrus = utils.get_node_first_ancestor_of_type(node, nodes.NamedExpr)
+        if maybe_walrus:
+            maybe_comprehension = utils.get_node_first_ancestor_of_type(
+                maybe_walrus, nodes.Comprehension
+            )
+            if maybe_comprehension:
+                comprehension_scope = utils.get_node_first_ancestor_of_type(
+                    maybe_comprehension, nodes.ComprehensionScope
+                )
+                if comprehension_scope is None:
+                    # Should not be possible.
+                    pass
+                elif (
+                    comprehension_scope.parent.scope() is scope
+                    and node.name in comprehension_scope.locals
+                ):
+                    return
+
+        # For functions we can do more by inferring the length of the itered object
+        try:
+            inferred = next(assign.iter.infer())
+            # Prefer the target of enumerate() rather than the enumerate object itself
+            if (
+                isinstance(inferred, astroid.Instance)
+                and inferred.qname() == "builtins.enumerate"
+            ):
+                likely_call = assign.iter
+                if isinstance(assign.iter, nodes.IfExp):
+                    likely_call = assign.iter.body
+                if isinstance(likely_call, nodes.Call):
+                    inferred = next(likely_call.args[0].infer())
+        except astroid.InferenceError:
+            self.add_message("undefined-loop-variable", args=node.name, node=node)
+        else:
+            if (
+                isinstance(inferred, astroid.Instance)
+                and inferred.qname() == BUILTIN_RANGE
+            ):
+                # Consider range() objects safe, even if they might not yield any results.
+                return
+
+            # Consider sequences.
+            sequences = (
+                nodes.List,
+                nodes.Tuple,
+                nodes.Dict,
+                nodes.Set,
+                astroid.objects.FrozenSet,
+            )
+            if not isinstance(inferred, sequences):
+                self.add_message("undefined-loop-variable", args=node.name, node=node)
+                return
+
+            elements = getattr(inferred, "elts", getattr(inferred, "items", []))
+            if not elements:
+                self.add_message("undefined-loop-variable", args=node.name, node=node)
+
+    # pylint: disable = too-many-branches
+    def _check_is_unused(
+        self,
+        name: str,
+        node: nodes.FunctionDef,
+        stmt: nodes.NodeNG,
+        global_names: set[str],
+        nonlocal_names: Iterable[str],
+        comprehension_target_names: Iterable[str],
+    ) -> None:
+        # Ignore some special names specified by user configuration.
+        if self._is_name_ignored(stmt, name):
+            return
+        # Ignore names that were added dynamically to the Function scope
+        if (
+            isinstance(node, nodes.FunctionDef)
+            and name == "__class__"
+            and len(node.locals["__class__"]) == 1
+            and isinstance(node.locals["__class__"][0], nodes.ClassDef)
+        ):
+            return
+
+        # Ignore names imported by the global statement.
+        if isinstance(stmt, (nodes.Global, nodes.Import, nodes.ImportFrom)):
+            # Detect imports, assigned to global statements.
+            if global_names and _import_name_is_global(stmt, global_names):
+                return
+
+        # Ignore names in comprehension targets
+        if name in comprehension_target_names:
+            return
+
+        # Ignore names in string literal type annotation.
+        if name in self._type_annotation_names:
+            return
+
+        argnames = node.argnames()
+        # Care about functions with unknown argument (builtins)
+        if name in argnames:
+            if node.name == "__new__":
+                is_init_def = False
+                # Look for the `__init__` method in all the methods of the same class.
+                for n in node.parent.get_children():
+                    is_init_def = hasattr(n, "name") and (n.name == "__init__")
+                    if is_init_def:
+                        break
+                # Ignore unused arguments check for `__new__` if `__init__` is defined.
+                if is_init_def:
+                    return
+            self._check_unused_arguments(name, node, stmt, argnames, nonlocal_names)
+        else:
+            if stmt.parent and isinstance(
+                stmt.parent, (nodes.Assign, nodes.AnnAssign, nodes.Tuple, nodes.For)
+            ):
+                if name in nonlocal_names:
+                    return
+
+            qname = asname = None
+            if isinstance(stmt, (nodes.Import, nodes.ImportFrom)):
+                # Need the complete name, which we don't have in .locals.
+                if len(stmt.names) > 1:
+                    import_names = next(
+                        (names for names in stmt.names if name in names), None
+                    )
+                else:
+                    import_names = stmt.names[0]
+                if import_names:
+                    qname, asname = import_names
+                    name = asname or qname
+
+            if _has_locals_call_after_node(stmt, node.scope()):
+                message_name = "possibly-unused-variable"
+            else:
+                if isinstance(stmt, nodes.Import):
+                    if asname is not None:
+                        msg = f"{qname} imported as {asname}"
+                    else:
+                        msg = f"import {name}"
+                    self.add_message("unused-import", args=msg, node=stmt)
+                    return
+                if isinstance(stmt, nodes.ImportFrom):
+                    if asname is not None:
+                        msg = f"{qname} imported from {stmt.modname} as {asname}"
+                    else:
+                        msg = f"{name} imported from {stmt.modname}"
+                    self.add_message("unused-import", args=msg, node=stmt)
+                    return
+                message_name = "unused-variable"
+
+            if isinstance(stmt, nodes.FunctionDef) and stmt.decorators:
+                return
+
+            # Don't check function stubs created only for type information
+            if utils.is_overload_stub(node):
+                return
+
+            # Special case for exception variable
+            if isinstance(stmt.parent, nodes.ExceptHandler) and any(
+                n.name == name for n in stmt.parent.nodes_of_class(nodes.Name)
+            ):
+                return
+
+            self.add_message(message_name, args=name, node=stmt)
+
+    def _is_name_ignored(
+        self, stmt: nodes.NodeNG, name: str
+    ) -> re.Pattern[str] | re.Match[str] | None:
+        authorized_rgx = self.linter.config.dummy_variables_rgx
+        if (
+            isinstance(stmt, nodes.AssignName)
+            and isinstance(stmt.parent, nodes.Arguments)
+            or isinstance(stmt, nodes.Arguments)
+        ):
+            regex: re.Pattern[str] = self.linter.config.ignored_argument_names
+        else:
+            regex = authorized_rgx
+        # See https://stackoverflow.com/a/47007761/2519059 to
+        # understand what this function return. Please do NOT use
+        # this elsewhere, this is confusing for no benefit
+        return regex and regex.match(name)
+
+    def _check_unused_arguments(
+        self,
+        name: str,
+        node: nodes.FunctionDef,
+        stmt: nodes.NodeNG,
+        argnames: list[str],
+        nonlocal_names: Iterable[str],
+    ) -> None:
+        is_method = node.is_method()
+        klass = node.parent.frame()
+        if is_method and isinstance(klass, nodes.ClassDef):
+            confidence = (
+                INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
+            )
+        else:
+            confidence = HIGH
+
+        if is_method:
+            # Don't warn for the first argument of a (non static) method
+            if node.type != "staticmethod" and name == argnames[0]:
+                return
+            # Don't warn for argument of an overridden method
+            overridden = overridden_method(klass, node.name)
+            if overridden is not None and name in overridden.argnames():
+                return
+            if node.name in utils.PYMETHODS and node.name not in (
+                "__init__",
+                "__new__",
+            ):
+                return
+        # Don't check callback arguments
+        if any(
+            node.name.startswith(cb) or node.name.endswith(cb)
+            for cb in self.linter.config.callbacks
+        ):
+            return
+        # Don't check arguments of singledispatch.register function.
+        if utils.is_registered_in_singledispatch_function(node):
+            return
+
+        # Don't check function stubs created only for type information
+        if utils.is_overload_stub(node):
+            return
+
+        # Don't check protocol classes
+        if utils.is_protocol_class(klass):
+            return
+
+        if name in nonlocal_names:
+            return
+
+        self.add_message("unused-argument", args=name, node=stmt, confidence=confidence)
+
+    def _check_late_binding_closure(self, node: nodes.Name) -> None:
         """Check whether node is a cell var that is assigned within a containing loop.

         Special cases where we don't care about the error:
         1. When the node's function is immediately called, e.g. (lambda: i)()
         2. When the node's function is returned from within the loop, e.g. return lambda: i
         """
-        pass
+        if not self.linter.is_message_enabled("cell-var-from-loop"):
+            return
+
+        node_scope = node.frame()
+
+        # If node appears in a default argument expression,
+        # look at the next enclosing frame instead
+        if utils.is_default_argument(node, node_scope):
+            node_scope = node_scope.parent.frame()
+
+        # Check if node is a cell var
+        if (
+            not isinstance(node_scope, (nodes.Lambda, nodes.FunctionDef))
+            or node.name in node_scope.locals
+        ):
+            return
+
+        assign_scope, stmts = node.lookup(node.name)
+        if not stmts or not assign_scope.parent_of(node_scope):
+            return
+
+        if utils.is_comprehension(assign_scope):
+            self.add_message("cell-var-from-loop", node=node, args=node.name)
+        else:
+            # Look for an enclosing For loop.
+            # Currently, we only consider the first assignment
+            assignment_node = stmts[0]
+
+            maybe_for = assignment_node
+            while maybe_for and not isinstance(maybe_for, nodes.For):
+                if maybe_for is assign_scope:
+                    break
+                maybe_for = maybe_for.parent
+            else:
+                if (
+                    maybe_for
+                    and maybe_for.parent_of(node_scope)
+                    and not utils.is_being_called(node_scope)
+                    and node_scope.parent
+                    and not isinstance(node_scope.statement(), nodes.Return)
+                ):
+                    self.add_message("cell-var-from-loop", node=node, args=node.name)
+
+    def _should_ignore_redefined_builtin(self, stmt: nodes.NodeNG) -> bool:
+        if not isinstance(stmt, nodes.ImportFrom):
+            return False
+        return stmt.modname in self.linter.config.redefining_builtins_modules
+
+    def _allowed_redefined_builtin(self, name: str) -> bool:
+        return name in self.linter.config.allowed_redefined_builtins

     @staticmethod
-    def _comprehension_between_frame_and_node(node: nodes.Name) ->bool:
+    def _comprehension_between_frame_and_node(node: nodes.Name) -> bool:
         """Return True if a ComprehensionScope intervenes between `node` and its
         frame.
         """
-        pass
-
-    def _store_type_annotation_node(self, type_annotation: nodes.NodeNG
-        ) ->None:
+        closest_comprehension_scope = utils.get_node_first_ancestor_of_type(
+            node, nodes.ComprehensionScope
+        )
+        return closest_comprehension_scope is not None and node.frame().parent_of(
+            closest_comprehension_scope
+        )
+
+    def _store_type_annotation_node(self, type_annotation: nodes.NodeNG) -> None:
         """Given a type annotation, store all the name nodes it refers to."""
-        pass
-
-    def _check_self_cls_assign(self, node: nodes.Assign) ->None:
+        if isinstance(type_annotation, nodes.Name):
+            self._type_annotation_names.append(type_annotation.name)
+            return
+
+        if isinstance(type_annotation, nodes.Attribute):
+            self._store_type_annotation_node(type_annotation.expr)
+            return
+
+        if not isinstance(type_annotation, nodes.Subscript):
+            return
+
+        if (
+            isinstance(type_annotation.value, nodes.Attribute)
+            and isinstance(type_annotation.value.expr, nodes.Name)
+            and type_annotation.value.expr.name == TYPING_MODULE
+        ):
+            self._type_annotation_names.append(TYPING_MODULE)
+            return
+
+        self._type_annotation_names.extend(
+            annotation.name for annotation in type_annotation.nodes_of_class(nodes.Name)
+        )
+
+    def _store_type_annotation_names(
+        self, node: nodes.For | nodes.Assign | nodes.With
+    ) -> None:
+        type_annotation = node.type_annotation
+        if not type_annotation:
+            return
+        self._store_type_annotation_node(node.type_annotation)
+
+    def _check_self_cls_assign(self, node: nodes.Assign) -> None:
         """Check that self/cls don't get assigned."""
-        pass
-
-    def _check_unpacking(self, inferred: InferenceResult, node: nodes.
-        Assign, targets: list[nodes.NodeNG]) ->None:
+        assign_names: set[str | None] = set()
+        for target in node.targets:
+            if isinstance(target, nodes.AssignName):
+                assign_names.add(target.name)
+            elif isinstance(target, nodes.Tuple):
+                assign_names.update(
+                    elt.name for elt in target.elts if isinstance(elt, nodes.AssignName)
+                )
+        scope = node.scope()
+        nonlocals_with_same_name = node.scope().parent and any(
+            child for child in scope.body if isinstance(child, nodes.Nonlocal)
+        )
+        if nonlocals_with_same_name:
+            scope = node.scope().parent.scope()
+
+        if not (
+            isinstance(scope, nodes.FunctionDef)
+            and scope.is_method()
+            and "builtins.staticmethod" not in scope.decoratornames()
+        ):
+            return
+        argument_names = scope.argnames()
+        if not argument_names:
+            return
+        self_cls_name = argument_names[0]
+        if self_cls_name in assign_names:
+            self.add_message("self-cls-assignment", node=node, args=(self_cls_name,))
+
+    def _check_unpacking(
+        self, inferred: InferenceResult, node: nodes.Assign, targets: list[nodes.NodeNG]
+    ) -> None:
         """Check for unbalanced tuple unpacking
         and unpacking non sequences.
         """
-        pass
+        if utils.is_inside_abstract_class(node):
+            return
+        if utils.is_comprehension(node):
+            return
+        if isinstance(inferred, util.UninferableBase):
+            return
+        if (
+            isinstance(inferred.parent, nodes.Arguments)
+            and isinstance(node.value, nodes.Name)
+            and node.value.name == inferred.parent.vararg
+        ):
+            # Variable-length argument, we can't determine the length.
+            return
+
+        # Attempt to check unpacking is properly balanced
+        values = self._nodes_to_unpack(inferred)
+        details = _get_unpacking_extra_info(node, inferred)
+
+        if values is not None:
+            if len(targets) != len(values):
+                self._report_unbalanced_unpacking(
+                    node, inferred, targets, len(values), details
+                )
+        # attempt to check unpacking may be possible (i.e. RHS is iterable)
+        elif not utils.is_iterable(inferred):
+            self._report_unpacking_non_sequence(node, details)

     @staticmethod
-    def _nodes_to_unpack(node: nodes.NodeNG) ->(list[nodes.NodeNG] | None):
-        """Return the list of values of the `Assign` node."""
-        pass
+    def _get_value_length(value_node: nodes.NodeNG) -> int:
+        value_subnodes = VariablesChecker._nodes_to_unpack(value_node)
+        if value_subnodes is not None:
+            return len(value_subnodes)
+        if isinstance(value_node, nodes.Const) and isinstance(
+            value_node.value, (str, bytes)
+        ):
+            return len(value_node.value)
+        if isinstance(value_node, nodes.Subscript):
+            step = value_node.slice.step or 1
+            splice_range = value_node.slice.upper.value - value_node.slice.lower.value
+            splice_length = int(math.ceil(splice_range / step))
+            return splice_length
+        return 1

-    def _check_module_attrs(self, node: _base_nodes.ImportNode, module:
-        nodes.Module, module_names: list[str]) ->(nodes.Module | None):
+    @staticmethod
+    def _nodes_to_unpack(node: nodes.NodeNG) -> list[nodes.NodeNG] | None:
+        """Return the list of values of the `Assign` node."""
+        if isinstance(node, (nodes.Tuple, nodes.List, nodes.Set, *DICT_TYPES)):
+            return node.itered()  # type: ignore[no-any-return]
+        if isinstance(node, astroid.Instance) and any(
+            ancestor.qname() == "typing.NamedTuple" for ancestor in node.ancestors()
+        ):
+            return [i for i in node.values() if isinstance(i, nodes.AssignName)]
+        return None
+
+    def _report_unbalanced_unpacking(
+        self,
+        node: nodes.NodeNG,
+        inferred: InferenceResult,
+        targets: list[nodes.NodeNG],
+        values_count: int,
+        details: str,
+    ) -> None:
+        args = (
+            details,
+            len(targets),
+            "" if len(targets) == 1 else "s",
+            values_count,
+            "" if values_count == 1 else "s",
+        )
+
+        symbol = (
+            "unbalanced-dict-unpacking"
+            if isinstance(inferred, DICT_TYPES)
+            else "unbalanced-tuple-unpacking"
+        )
+        self.add_message(symbol, node=node, args=args, confidence=INFERENCE)
+
+    def _report_unpacking_non_sequence(self, node: nodes.NodeNG, details: str) -> None:
+        if details and not details.startswith(" "):
+            details = f" {details}"
+        self.add_message("unpacking-non-sequence", node=node, args=details)
+
+    def _check_module_attrs(
+        self,
+        node: _base_nodes.ImportNode,
+        module: nodes.Module,
+        module_names: list[str],
+    ) -> nodes.Module | None:
         """Check that module_names (list of string) are accessible through the
         given module, if the latest access name corresponds to a module, return it.
         """
-        pass
-
-    def _check_metaclasses(self, node: (nodes.Module | nodes.FunctionDef)
-        ) ->None:
+        while module_names:
+            name = module_names.pop(0)
+            if name == "__dict__":
+                module = None
+                break
+            try:
+                module = module.getattr(name)[0]
+                if not isinstance(module, nodes.Module):
+                    module = next(module.infer())
+                    if not isinstance(module, nodes.Module):
+                        return None
+            except astroid.NotFoundError:
+                # Unable to import `name` from `module`. Since `name` may itself be a
+                # module, we first check if it matches the ignored modules.
+                if is_module_ignored(f"{module.qname()}.{name}", self._ignored_modules):
+                    return None
+                self.add_message(
+                    "no-name-in-module", args=(name, module.name), node=node
+                )
+                return None
+            except astroid.InferenceError:
+                return None
+        if module_names:
+            modname = module.name if module else "__dict__"
+            self.add_message(
+                "no-name-in-module", node=node, args=(".".join(module_names), modname)
+            )
+            return None
+        if isinstance(module, nodes.Module):
+            return module
+        return None
+
+    def _check_all(
+        self, node: nodes.Module, not_consumed: dict[str, list[nodes.NodeNG]]
+    ) -> None:
+        try:
+            assigned = next(node.igetattr("__all__"))
+        except astroid.InferenceError:
+            return
+        if isinstance(assigned, util.UninferableBase):
+            return
+        if assigned.pytype() not in {"builtins.list", "builtins.tuple"}:
+            line, col = assigned.tolineno, assigned.col_offset
+            self.add_message("invalid-all-format", line=line, col_offset=col, node=node)
+            return
+        for elt in getattr(assigned, "elts", ()):
+            try:
+                elt_name = next(elt.infer())
+            except astroid.InferenceError:
+                continue
+            if isinstance(elt_name, util.UninferableBase):
+                continue
+            if not elt_name.parent:
+                continue
+
+            if not isinstance(elt_name, nodes.Const) or not isinstance(
+                elt_name.value, str
+            ):
+                self.add_message("invalid-all-object", args=elt.as_string(), node=elt)
+                continue
+
+            elt_name = elt_name.value
+            # If elt is in not_consumed, remove it from not_consumed
+            if elt_name in not_consumed:
+                del not_consumed[elt_name]
+                continue
+
+            if elt_name not in node.locals:
+                if not node.package:
+                    self.add_message(
+                        "undefined-all-variable", args=(elt_name,), node=elt
+                    )
+                else:
+                    basename = os.path.splitext(node.file)[0]
+                    if os.path.basename(basename) == "__init__":
+                        name = node.name + "." + elt_name
+                        try:
+                            astroid.modutils.file_from_modpath(name.split("."))
+                        except ImportError:
+                            self.add_message(
+                                "undefined-all-variable", args=(elt_name,), node=elt
+                            )
+                        except SyntaxError:
+                            # don't yield a syntax-error warning,
+                            # because it will be later yielded
+                            # when the file will be checked
+                            pass
+
+    def _check_globals(self, not_consumed: dict[str, nodes.NodeNG]) -> None:
+        if self._allow_global_unused_variables:
+            return
+        for name, node_lst in not_consumed.items():
+            for node in node_lst:
+                if in_type_checking_block(node):
+                    continue
+                self.add_message("unused-variable", args=(name,), node=node)
+
+    # pylint: disable = too-many-branches
+    def _check_imports(self, not_consumed: dict[str, list[nodes.NodeNG]]) -> None:
+        local_names = _fix_dot_imports(not_consumed)
+        checked = set()
+        unused_wildcard_imports: defaultdict[
+            tuple[str, nodes.ImportFrom], list[str]
+        ] = collections.defaultdict(list)
+        for name, stmt in local_names:
+            for imports in stmt.names:
+                real_name = imported_name = imports[0]
+                if imported_name == "*":
+                    real_name = name
+                as_name = imports[1]
+                if real_name in checked:
+                    continue
+                if name not in (real_name, as_name):
+                    continue
+                checked.add(real_name)
+
+                is_type_annotation_import = (
+                    imported_name in self._type_annotation_names
+                    or as_name in self._type_annotation_names
+                )
+
+                is_dummy_import = (
+                    as_name
+                    and self.linter.config.dummy_variables_rgx
+                    and self.linter.config.dummy_variables_rgx.match(as_name)
+                )
+
+                if isinstance(stmt, nodes.Import) or (
+                    isinstance(stmt, nodes.ImportFrom) and not stmt.modname
+                ):
+                    if isinstance(stmt, nodes.ImportFrom) and SPECIAL_OBJ.search(
+                        imported_name
+                    ):
+                        # Filter special objects (__doc__, __all__) etc.,
+                        # because they can be imported for exporting.
+                        continue
+
+                    if is_type_annotation_import or is_dummy_import:
+                        # Most likely a typing import if it wasn't used so far.
+                        # Also filter dummy variables.
+                        continue
+
+                    if as_name is None:
+                        msg = f"import {imported_name}"
+                    else:
+                        msg = f"{imported_name} imported as {as_name}"
+                    if not in_type_checking_block(stmt):
+                        self.add_message("unused-import", args=msg, node=stmt)
+                elif isinstance(stmt, nodes.ImportFrom) and stmt.modname != FUTURE:
+                    if SPECIAL_OBJ.search(imported_name):
+                        # Filter special objects (__doc__, __all__) etc.,
+                        # because they can be imported for exporting.
+                        continue
+
+                    if _is_from_future_import(stmt, name):
+                        # Check if the name is in fact loaded from a
+                        # __future__ import in another module.
+                        continue
+
+                    if is_type_annotation_import or is_dummy_import:
+                        # Most likely a typing import if it wasn't used so far.
+                        # Also filter dummy variables.
+                        continue
+
+                    if imported_name == "*":
+                        unused_wildcard_imports[(stmt.modname, stmt)].append(name)
+                    else:
+                        if as_name is None:
+                            msg = f"{imported_name} imported from {stmt.modname}"
+                        else:
+                            msg = f"{imported_name} imported from {stmt.modname} as {as_name}"
+                        if not in_type_checking_block(stmt):
+                            self.add_message("unused-import", args=msg, node=stmt)
+
+        # Construct string for unused-wildcard-import message
+        for module, unused_list in unused_wildcard_imports.items():
+            if len(unused_list) == 1:
+                arg_string = unused_list[0]
+            else:
+                arg_string = (
+                    f"{', '.join(i for i in unused_list[:-1])} and {unused_list[-1]}"
+                )
+            self.add_message(
+                "unused-wildcard-import", args=(arg_string, module[0]), node=module[1]
+            )
+        del self._to_consume
+
+    def _check_metaclasses(self, node: nodes.Module | nodes.FunctionDef) -> None:
         """Update consumption analysis for metaclasses."""
-        pass
-
-    def _check_potential_index_error(self, node: nodes.Subscript,
-        inferred_slice: (nodes.NodeNG | None)) ->None:
+        consumed: list[tuple[dict[str, list[nodes.NodeNG]], str]] = []
+
+        for child_node in node.get_children():
+            if isinstance(child_node, nodes.ClassDef):
+                consumed.extend(self._check_classdef_metaclasses(child_node, node))
+
+        # Pop the consumed items, in order to avoid having
+        # unused-import and unused-variable false positives
+        for scope_locals, name in consumed:
+            scope_locals.pop(name, None)
+
+    def _check_classdef_metaclasses(
+        self, klass: nodes.ClassDef, parent_node: nodes.Module | nodes.FunctionDef
+    ) -> list[tuple[dict[str, list[nodes.NodeNG]], str]]:
+        if not klass._metaclass:
+            # Skip if this class doesn't use explicitly a metaclass, but inherits it from ancestors
+            return []
+
+        consumed: list[tuple[dict[str, list[nodes.NodeNG]], str]] = []
+        metaclass = klass.metaclass()
+        name = ""
+        if isinstance(klass._metaclass, nodes.Name):
+            name = klass._metaclass.name
+        elif isinstance(klass._metaclass, nodes.Attribute) and klass._metaclass.expr:
+            attr = klass._metaclass.expr
+            while not isinstance(attr, nodes.Name):
+                attr = attr.expr
+            name = attr.name
+        elif isinstance(klass._metaclass, nodes.Call) and isinstance(
+            klass._metaclass.func, nodes.Name
+        ):
+            name = klass._metaclass.func.name
+        elif metaclass:
+            name = metaclass.root().name
+
+        found = False
+        name = METACLASS_NAME_TRANSFORMS.get(name, name)
+        if name:
+            # check enclosing scopes starting from most local
+            for scope_locals, _, _, _ in self._to_consume[::-1]:
+                found_nodes = scope_locals.get(name, [])
+                for found_node in found_nodes:
+                    if found_node.lineno <= klass.lineno:
+                        consumed.append((scope_locals, name))
+                        found = True
+                        break
+            # Check parent scope
+            nodes_in_parent_scope = parent_node.locals.get(name, [])
+            for found_node_parent in nodes_in_parent_scope:
+                if found_node_parent.lineno <= klass.lineno:
+                    found = True
+                    break
+        if (
+            not found
+            and not metaclass
+            and not (
+                name in nodes.Module.scope_attrs
+                or utils.is_builtin(name)
+                or name in self.linter.config.additional_builtins
+            )
+        ):
+            self.add_message("undefined-variable", node=klass, args=(name,))
+
+        return consumed
+
+    def visit_subscript(self, node: nodes.Subscript) -> None:
+        inferred_slice = utils.safe_infer(node.slice)
+
+        self._check_potential_index_error(node, inferred_slice)
+
+    def _check_potential_index_error(
+        self, node: nodes.Subscript, inferred_slice: nodes.NodeNG | None
+    ) -> None:
         """Check for the potential-index-error message."""
-        pass
-
-    @utils.only_required_for_messages('unused-import', 'unused-variable')
-    def visit_const(self, node: nodes.Const) ->None:
+        # Currently we only check simple slices of a single integer
+        if not isinstance(inferred_slice, nodes.Const) or not isinstance(
+            inferred_slice.value, int
+        ):
+            return
+
+        # If the node.value is a Tuple or List without inference it is defined in place
+        if isinstance(node.value, (nodes.Tuple, nodes.List)):
+            # Add 1 because iterables are 0-indexed
+            if len(node.value.elts) < inferred_slice.value + 1:
+                self.add_message(
+                    "potential-index-error", node=node, confidence=INFERENCE
+                )
+            return
+
+    @utils.only_required_for_messages(
+        "unused-import",
+        "unused-variable",
+    )
+    def visit_const(self, node: nodes.Const) -> None:
         """Take note of names that appear inside string literal type annotations
         unless the string is a parameter to `typing.Literal` or `typing.Annotation`.
         """
-        pass
+        if node.pytype() != "builtins.str":
+            return
+        if not utils.is_node_in_type_annotation_context(node):
+            return
+
+        # Check if parent's or grandparent's first child is typing.Literal
+        parent = node.parent
+        if isinstance(parent, nodes.Tuple):
+            parent = parent.parent
+        if isinstance(parent, nodes.Subscript):
+            origin = next(parent.get_children(), None)
+            if origin is not None and utils.is_typing_member(
+                origin, ("Annotated", "Literal")
+            ):
+                return
+
+        try:
+            annotation = extract_node(node.value)
+            self._store_type_annotation_node(annotation)
+        except ValueError:
+            # e.g. node.value is white space
+            pass
+        except astroid.AstroidSyntaxError:
+            # e.g. "?" or ":" in typing.Literal["?", ":"]
+            pass
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(VariablesChecker(linter))
diff --git a/pylint/config/_breaking_changes.py b/pylint/config/_breaking_changes.py
index 441d4c3b4..943f708aa 100644
--- a/pylint/config/_breaking_changes.py
+++ b/pylint/config/_breaking_changes.py
@@ -1,25 +1,32 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """List the breaking changes in configuration files and their solutions."""
+
 from __future__ import annotations
+
 import enum
 from typing import NamedTuple


 class BreakingChange(enum.Enum):
-    MESSAGE_MADE_DISABLED_BY_DEFAULT = (
-        '{symbol} ({msgid}) was disabled by default')
-    MESSAGE_MADE_ENABLED_BY_DEFAULT = (
-        '{symbol} ({msgid}) was enabled by default')
-    MESSAGE_MOVED_TO_EXTENSION = '{symbol} ({msgid}) was moved to {extension}'
-    EXTENSION_REMOVED = '{extension} was removed'
+    MESSAGE_MADE_DISABLED_BY_DEFAULT = "{symbol} ({msgid}) was disabled by default"
+    MESSAGE_MADE_ENABLED_BY_DEFAULT = "{symbol} ({msgid}) was enabled by default"
+    MESSAGE_MOVED_TO_EXTENSION = "{symbol} ({msgid}) was moved to {extension}"
+    EXTENSION_REMOVED = "{extension} was removed"
+    # This kind of upgrade is non-breaking but if we want to automatically upgrade it,
+    # then we should use the message store and old_names values instead of duplicating
+    # MESSAGE_RENAMED= "{symbol} ({msgid}) was renamed"


 class Condition(enum.Enum):
-    MESSAGE_IS_ENABLED = '{symbol} ({msgid}) is enabled'
-    MESSAGE_IS_NOT_ENABLED = '{symbol} ({msgid}) is not enabled'
-    MESSAGE_IS_DISABLED = '{symbol} ({msgid}) is disabled'
-    MESSAGE_IS_NOT_DISABLED = '{symbol} ({msgid}) is not disabled'
-    EXTENSION_IS_LOADED = '{extension} is loaded'
-    EXTENSION_IS_NOT_LOADED = '{extension} is not loaded'
+    MESSAGE_IS_ENABLED = "{symbol} ({msgid}) is enabled"
+    MESSAGE_IS_NOT_ENABLED = "{symbol} ({msgid}) is not enabled"
+    MESSAGE_IS_DISABLED = "{symbol} ({msgid}) is disabled"
+    MESSAGE_IS_NOT_DISABLED = "{symbol} ({msgid}) is not disabled"
+    EXTENSION_IS_LOADED = "{extension} is loaded"
+    EXTENSION_IS_NOT_LOADED = "{extension} is not loaded"


 class Information(NamedTuple):
@@ -31,33 +38,60 @@ class Solution(enum.Enum):
     ADD_EXTENSION = "Add {extension} in 'load-plugins' option"
     REMOVE_EXTENSION = "Remove {extension} from the 'load-plugins' option"
     ENABLE_MESSAGE_EXPLICITLY = (
-        "{symbol} ({msgid}) should be added in the 'enable' option")
+        "{symbol} ({msgid}) should be added in the 'enable' option"
+    )
     ENABLE_MESSAGE_IMPLICITLY = (
-        "{symbol} ({msgid}) should be removed from the 'disable' option")
+        "{symbol} ({msgid}) should be removed from the 'disable' option"
+    )
     DISABLE_MESSAGE_EXPLICITLY = (
-        "{symbol} ({msgid}) should be added in the 'disable' option")
+        "{symbol} ({msgid}) should be added in the 'disable' option"
+    )
     DISABLE_MESSAGE_IMPLICITLY = (
-        "{symbol} ({msgid}) should be removed from the 'enable' option")
+        "{symbol} ({msgid}) should be removed from the 'enable' option"
+    )


 ConditionsToBeAffected = list[Condition]
+# A solution to a breaking change might imply multiple actions
 MultipleActionSolution = list[Solution]
+# Sometimes there's multiple solutions and the user needs to choose
 Solutions = list[MultipleActionSolution]
-BreakingChangeWithSolution = tuple[BreakingChange, Information,
-    ConditionsToBeAffected, Solutions]
-NO_SELF_USE = Information(msgid_or_symbol='no-self-use', extension=
-    'pylint.extensions.no_self_use')
-COMPARE_TO_ZERO = Information(msgid_or_symbol='compare-to-zero', extension=
-    'pylint.extensions.comparetozero')
-COMPARE_TO_EMPTY_STRING = Information(msgid_or_symbol=
-    'compare-to-empty-string', extension='pylint.extensions.emptystring')
+BreakingChangeWithSolution = tuple[
+    BreakingChange, Information, ConditionsToBeAffected, Solutions
+]
+
+NO_SELF_USE = Information(
+    msgid_or_symbol="no-self-use", extension="pylint.extensions.no_self_use"
+)
+COMPARE_TO_ZERO = Information(
+    msgid_or_symbol="compare-to-zero", extension="pylint.extensions.comparetozero"
+)
+COMPARE_TO_EMPTY_STRING = Information(
+    msgid_or_symbol="compare-to-empty-string",
+    extension="pylint.extensions.emptystring",
+)
+
 CONFIGURATION_BREAKING_CHANGES: dict[str, list[BreakingChangeWithSolution]] = {
-    '2.14.0': [(BreakingChange.MESSAGE_MOVED_TO_EXTENSION, NO_SELF_USE, [
-    Condition.MESSAGE_IS_ENABLED, Condition.EXTENSION_IS_NOT_LOADED], [[
-    Solution.ADD_EXTENSION], [Solution.DISABLE_MESSAGE_IMPLICITLY]])],
-    '3.0.0': [(BreakingChange.EXTENSION_REMOVED, COMPARE_TO_ZERO, [
-    Condition.MESSAGE_IS_NOT_DISABLED, Condition.EXTENSION_IS_LOADED], [[
-    Solution.REMOVE_EXTENSION, Solution.ENABLE_MESSAGE_EXPLICITLY]]), (
-    BreakingChange.EXTENSION_REMOVED, COMPARE_TO_EMPTY_STRING, [Condition.
-    MESSAGE_IS_NOT_DISABLED, Condition.EXTENSION_IS_LOADED], [[Solution.
-    REMOVE_EXTENSION, Solution.ENABLE_MESSAGE_EXPLICITLY]])]}
+    "2.14.0": [
+        (
+            BreakingChange.MESSAGE_MOVED_TO_EXTENSION,
+            NO_SELF_USE,
+            [Condition.MESSAGE_IS_ENABLED, Condition.EXTENSION_IS_NOT_LOADED],
+            [[Solution.ADD_EXTENSION], [Solution.DISABLE_MESSAGE_IMPLICITLY]],
+        ),
+    ],
+    "3.0.0": [
+        (
+            BreakingChange.EXTENSION_REMOVED,
+            COMPARE_TO_ZERO,
+            [Condition.MESSAGE_IS_NOT_DISABLED, Condition.EXTENSION_IS_LOADED],
+            [[Solution.REMOVE_EXTENSION, Solution.ENABLE_MESSAGE_EXPLICITLY]],
+        ),
+        (
+            BreakingChange.EXTENSION_REMOVED,
+            COMPARE_TO_EMPTY_STRING,
+            [Condition.MESSAGE_IS_NOT_DISABLED, Condition.EXTENSION_IS_LOADED],
+            [[Solution.REMOVE_EXTENSION, Solution.ENABLE_MESSAGE_EXPLICITLY]],
+        ),
+    ],
+}
diff --git a/pylint/config/_pylint_config/generate_command.py b/pylint/config/_pylint_config/generate_command.py
index 51798cfff..d1b73c99b 100644
--- a/pylint/config/_pylint_config/generate_command.py
+++ b/pylint/config/_pylint_config/generate_command.py
@@ -1,13 +1,49 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Everything related to the 'pylint-config generate' command."""
+
+
 from __future__ import annotations
+
 from io import StringIO
 from typing import TYPE_CHECKING
+
 from pylint.config._pylint_config import utils
 from pylint.config._pylint_config.help_message import get_subparser_help
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


-def handle_generate_command(linter: PyLinter) ->int:
+def generate_interactive_config(linter: PyLinter) -> None:
+    print("Starting interactive pylint configuration generation")
+
+    format_type = utils.get_and_validate_format()
+    minimal = format_type == "toml" and utils.get_minimal_setting()
+    to_file, output_file_name = utils.get_and_validate_output_file()
+
+    if format_type == "toml":
+        config_string = linter._generate_config_file(minimal=minimal)
+    else:
+        output_stream = StringIO()
+        linter._generate_config(stream=output_stream, skipsections=("Commands",))
+        config_string = output_stream.getvalue()
+
+    if to_file:
+        with open(output_file_name, "w", encoding="utf-8") as f:
+            print(config_string, file=f)
+        print(f"Wrote configuration file to {output_file_name.resolve()}")
+    else:
+        print(config_string)
+
+
+def handle_generate_command(linter: PyLinter) -> int:
     """Handle 'pylint-config generate'."""
-    pass
+    # Interactively generate a pylint configuration
+    if linter.config.interactive:
+        generate_interactive_config(linter)
+        return 0
+    print(get_subparser_help(linter, "generate"))
+    return 32
diff --git a/pylint/config/_pylint_config/help_message.py b/pylint/config/_pylint_config/help_message.py
index ef3e26de2..7ba947429 100644
--- a/pylint/config/_pylint_config/help_message.py
+++ b/pylint/config/_pylint_config/help_message.py
@@ -1,19 +1,59 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Everything related to the 'pylint-config -h' command and subcommands."""
+
+
 from __future__ import annotations
+
 import argparse
 from typing import TYPE_CHECKING
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


-def get_subparser_help(linter: PyLinter, command: str) ->str:
+def get_subparser_help(linter: PyLinter, command: str) -> str:
     """Get the help message for one of the subcommands."""
-    pass
+    # Make sure subparsers are initialized properly
+    assert linter._arg_parser._subparsers
+    subparser_action = linter._arg_parser._subparsers._group_actions[0]
+    assert isinstance(subparser_action, argparse._SubParsersAction)
+
+    for name, subparser in subparser_action.choices.items():
+        assert isinstance(subparser, argparse.ArgumentParser)
+        if name == command:
+            # Remove last character which is an extra new line
+            return subparser.format_help()[:-1]
+    return ""  # pragma: no cover


-def get_help(parser: argparse.ArgumentParser) ->str:
+def get_help(parser: argparse.ArgumentParser) -> str:
     """Get the help message for the main 'pylint-config' command.

     Taken from argparse.ArgumentParser.format_help.
     """
-    pass
+    formatter = parser._get_formatter()
+
+    # usage
+    formatter.add_usage(
+        parser.usage, parser._actions, parser._mutually_exclusive_groups
+    )
+
+    # description
+    formatter.add_text(parser.description)
+
+    # positionals, optionals and user-defined groups
+    for action_group in parser._action_groups:
+        if action_group.title == "Subcommands":
+            formatter.start_section(action_group.title)
+            formatter.add_text(action_group.description)
+            formatter.add_arguments(action_group._group_actions)
+            formatter.end_section()
+
+    # epilog
+    formatter.add_text(parser.epilog)
+
+    # determine help from format above
+    return formatter.format_help()
diff --git a/pylint/config/_pylint_config/main.py b/pylint/config/_pylint_config/main.py
index 37f7adc06..e562da2ef 100644
--- a/pylint/config/_pylint_config/main.py
+++ b/pylint/config/_pylint_config/main.py
@@ -1,12 +1,25 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Everything related to the 'pylint-config' command."""
+
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from pylint.config._pylint_config.generate_command import handle_generate_command
 from pylint.config._pylint_config.help_message import get_help
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


-def _handle_pylint_config_commands(linter: PyLinter) ->int:
+def _handle_pylint_config_commands(linter: PyLinter) -> int:
     """Handle whichever command is passed to 'pylint-config'."""
-    pass
+    if linter.config.config_subcommand == "generate":
+        return handle_generate_command(linter)
+
+    print(get_help(linter._arg_parser))
+    return 32
diff --git a/pylint/config/_pylint_config/setup.py b/pylint/config/_pylint_config/setup.py
index def43d8bc..211f9bc6d 100644
--- a/pylint/config/_pylint_config/setup.py
+++ b/pylint/config/_pylint_config/setup.py
@@ -1,20 +1,49 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Everything related to the setup of the 'pylint-config' command."""
+
+
 from __future__ import annotations
+
 import argparse
 from collections.abc import Sequence
 from typing import Any
+
 from pylint.config._pylint_config.help_message import get_help
 from pylint.config.callback_actions import _AccessParserAction


 class _HelpAction(_AccessParserAction):
-
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--help') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--help",
+    ) -> None:
         get_help(self.parser)


-def _register_generate_config_options(parser: argparse.ArgumentParser) ->None:
+def _register_generate_config_options(parser: argparse.ArgumentParser) -> None:
     """Registers the necessary arguments on the parser."""
-    pass
+    parser.prog = "pylint-config"
+    # Overwrite the help command
+    parser.add_argument(
+        "-h",
+        "--help",
+        action=_HelpAction,
+        default=argparse.SUPPRESS,
+        help="show this help message and exit",
+        parser=parser,
+    )
+
+    # We use subparsers to create various subcommands under 'pylint-config'
+    subparsers = parser.add_subparsers(dest="config_subcommand", title="Subcommands")
+
+    # Add the generate command
+    generate_parser = subparsers.add_parser(
+        "generate", help="Generate a pylint configuration"
+    )
+    generate_parser.add_argument("--interactive", action="store_true")
diff --git a/pylint/config/_pylint_config/utils.py b/pylint/config/_pylint_config/utils.py
index d36cab5cc..f9185e8b1 100644
--- a/pylint/config/_pylint_config/utils.py
+++ b/pylint/config/_pylint_config/utils.py
@@ -1,53 +1,115 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Utils for the 'pylint-config' command."""
+
 from __future__ import annotations
+
 import sys
 from collections.abc import Callable
 from pathlib import Path
 from typing import Literal, TypeVar
+
 if sys.version_info >= (3, 10):
     from typing import ParamSpec
 else:
     from typing_extensions import ParamSpec
-_P = ParamSpec('_P')
-_ReturnValueT = TypeVar('_ReturnValueT', bool, str)
-SUPPORTED_FORMATS = {'t', 'toml', 'i', 'ini'}
-YES_NO_ANSWERS = {'y', 'yes', 'n', 'no'}
+
+_P = ParamSpec("_P")
+_ReturnValueT = TypeVar("_ReturnValueT", bool, str)
+
+SUPPORTED_FORMATS = {"t", "toml", "i", "ini"}
+YES_NO_ANSWERS = {"y", "yes", "n", "no"}


 class InvalidUserInput(Exception):
     """Raised whenever a user input is invalid."""

-    def __init__(self, valid_input: str, input_value: str, *args: object
-        ) ->None:
+    def __init__(self, valid_input: str, input_value: str, *args: object) -> None:
         self.valid = valid_input
         self.input = input_value
         super().__init__(*args)


-def should_retry_after_invalid_input(func: Callable[_P, _ReturnValueT]
-    ) ->Callable[_P, _ReturnValueT]:
+def should_retry_after_invalid_input(
+    func: Callable[_P, _ReturnValueT]
+) -> Callable[_P, _ReturnValueT]:
     """Decorator that handles InvalidUserInput exceptions and retries."""
-    pass
+
+    def inner_function(*args: _P.args, **kwargs: _P.kwargs) -> _ReturnValueT:
+        called_once = False
+        while True:
+            try:
+                return func(*args, **kwargs)
+            except InvalidUserInput as exc:
+                if called_once and exc.input == "exit()":
+                    print("Stopping 'pylint-config'.")
+                    sys.exit()
+                print(f"Answer should be one of {exc.valid}.")
+                print("Type 'exit()' if you want to exit the program.")
+                called_once = True
+
+    return inner_function


 @should_retry_after_invalid_input
-def get_and_validate_format() ->Literal['toml', 'ini']:
+def get_and_validate_format() -> Literal["toml", "ini"]:
     """Make sure that the output format is either .toml or .ini."""
-    pass
+    # pylint: disable-next=bad-builtin
+    format_type = input(
+        "Please choose the format of configuration, (T)oml or (I)ni (.cfg): "
+    ).lower()
+
+    if format_type not in SUPPORTED_FORMATS:
+        raise InvalidUserInput(", ".join(sorted(SUPPORTED_FORMATS)), format_type)
+
+    if format_type.startswith("t"):
+        return "toml"
+    return "ini"


 @should_retry_after_invalid_input
-def validate_yes_no(question: str, default: (Literal['yes', 'no'] | None)
-    ) ->bool:
+def validate_yes_no(question: str, default: Literal["yes", "no"] | None) -> bool:
     """Validate that a yes or no answer is correct."""
-    pass
+    question = f"{question} (y)es or (n)o "
+    if default:
+        question += f" (default={default}) "
+    # pylint: disable-next=bad-builtin
+    answer = input(question).lower()
+
+    if not answer and default:
+        answer = default
+
+    if answer not in YES_NO_ANSWERS:
+        raise InvalidUserInput(", ".join(sorted(YES_NO_ANSWERS)), answer)
+
+    return answer.startswith("y")


-def get_minimal_setting() ->bool:
+def get_minimal_setting() -> bool:
     """Ask the user if they want to use the minimal setting."""
-    pass
+    return validate_yes_no(
+        "Do you want a minimal configuration without comments or default values?", "no"
+    )


-def get_and_validate_output_file() ->tuple[bool, Path]:
+def get_and_validate_output_file() -> tuple[bool, Path]:
     """Make sure that the output file is correct."""
-    pass
+    to_file = validate_yes_no("Do you want to write the output to a file?", "no")
+
+    if not to_file:
+        return False, Path()
+
+    # pylint: disable-next=bad-builtin
+    file_name = Path(input("What should the file be called: "))
+    if file_name.exists():
+        overwrite = validate_yes_no(
+            f"{file_name} already exists. Are you sure you want to overwrite?", "no"
+        )
+
+        if not overwrite:
+            return False, file_name
+        return True, file_name
+
+    return True, file_name
diff --git a/pylint/config/argument.py b/pylint/config/argument.py
index 0ac2e1955..2d2a46a3f 100644
--- a/pylint/config/argument.py
+++ b/pylint/config/argument.py
@@ -1,8 +1,14 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Definition of an Argument class and transformers for various argument types.

 An Argument instance represents a pylint option to be handled by an argparse.ArgumentParser
 """
+
 from __future__ import annotations
+
 import argparse
 import os
 import pathlib
@@ -10,80 +16,139 @@ import re
 from collections.abc import Callable
 from glob import glob
 from typing import Any, Literal, Pattern, Sequence, Tuple, Union
+
 from pylint import interfaces
 from pylint import utils as pylint_utils
 from pylint.config.callback_actions import _CallbackAction
 from pylint.config.deprecation_actions import _NewNamesAction, _OldNamesAction
-_ArgumentTypes = Union[str, int, float, bool, Pattern[str], Sequence[str],
-    Sequence[Pattern[str]], Tuple[int, ...]]
+
+_ArgumentTypes = Union[
+    str,
+    int,
+    float,
+    bool,
+    Pattern[str],
+    Sequence[str],
+    Sequence[Pattern[str]],
+    Tuple[int, ...],
+]
 """List of possible argument types."""


-def _confidence_transformer(value: str) ->Sequence[str]:
+def _confidence_transformer(value: str) -> Sequence[str]:
     """Transforms a comma separated string of confidence values."""
-    pass
-
-
-def _csv_transformer(value: str) ->Sequence[str]:
+    if not value:
+        return interfaces.CONFIDENCE_LEVEL_NAMES
+    values = pylint_utils._check_csv(value)
+    for confidence in values:
+        if confidence not in interfaces.CONFIDENCE_LEVEL_NAMES:
+            raise argparse.ArgumentTypeError(
+                f"{value} should be in {*interfaces.CONFIDENCE_LEVEL_NAMES,}"
+            )
+    return values
+
+
+def _csv_transformer(value: str) -> Sequence[str]:
     """Transforms a comma separated string."""
-    pass
+    return pylint_utils._check_csv(value)


-YES_VALUES = {'y', 'yes', 'true'}
-NO_VALUES = {'n', 'no', 'false'}
+YES_VALUES = {"y", "yes", "true"}
+NO_VALUES = {"n", "no", "false"}


-def _yn_transformer(value: str) ->bool:
+def _yn_transformer(value: str) -> bool:
     """Transforms a yes/no or stringified bool into a bool."""
-    pass
+    value = value.lower()
+    if value in YES_VALUES:
+        return True
+    if value in NO_VALUES:
+        return False
+    raise argparse.ArgumentTypeError(
+        None, f"Invalid yn value '{value}', should be in {*YES_VALUES, *NO_VALUES}"
+    )


-def _non_empty_string_transformer(value: str) ->str:
+def _non_empty_string_transformer(value: str) -> str:
     """Check that a string is not empty and remove quotes."""
-    pass
+    if not value:
+        raise argparse.ArgumentTypeError("Option cannot be an empty string.")
+    return pylint_utils._unquote(value)


-def _path_transformer(value: str) ->str:
+def _path_transformer(value: str) -> str:
     """Expand user and variables in a path."""
-    pass
+    return os.path.expandvars(os.path.expanduser(value))


-def _glob_paths_csv_transformer(value: str) ->Sequence[str]:
+def _glob_paths_csv_transformer(value: str) -> Sequence[str]:
     """Transforms a comma separated list of paths while expanding user and
     variables and glob patterns.
     """
-    pass
+    paths: list[str] = []
+    for path in _csv_transformer(value):
+        paths.extend(glob(_path_transformer(path), recursive=True))
+    return paths


-def _py_version_transformer(value: str) ->tuple[int, ...]:
+def _py_version_transformer(value: str) -> tuple[int, ...]:
     """Transforms a version string into a version tuple."""
-    pass
+    try:
+        version = tuple(int(val) for val in value.replace(",", ".").split("."))
+    except ValueError:
+        raise argparse.ArgumentTypeError(
+            f"{value} has an invalid format, should be a version string. E.g., '3.8'"
+        ) from None
+    return version


-def _regex_transformer(value: str) ->Pattern[str]:
+def _regex_transformer(value: str) -> Pattern[str]:
     """Return `re.compile(value)`."""
-    pass
+    try:
+        return re.compile(value)
+    except re.error as e:
+        msg = f"Error in provided regular expression: {value} beginning at index {e.pos}: {e.msg}"
+        raise argparse.ArgumentTypeError(msg) from e


-def _regexp_csv_transfomer(value: str) ->Sequence[Pattern[str]]:
+def _regexp_csv_transfomer(value: str) -> Sequence[Pattern[str]]:
     """Transforms a comma separated list of regular expressions."""
-    pass
+    patterns: list[Pattern[str]] = []
+    for pattern in pylint_utils._check_regexp_csv(value):
+        patterns.append(_regex_transformer(pattern))
+    return patterns


-def _regexp_paths_csv_transfomer(value: str) ->Sequence[Pattern[str]]:
+def _regexp_paths_csv_transfomer(value: str) -> Sequence[Pattern[str]]:
     """Transforms a comma separated list of regular expressions paths."""
-    pass
-
-
-_TYPE_TRANSFORMERS: dict[str, Callable[[str], _ArgumentTypes]] = {'choice':
-    str, 'csv': _csv_transformer, 'float': float, 'int': int, 'confidence':
-    _confidence_transformer, 'non_empty_string':
-    _non_empty_string_transformer, 'path': _path_transformer,
-    'glob_paths_csv': _glob_paths_csv_transformer, 'py_version':
-    _py_version_transformer, 'regexp': _regex_transformer, 'regexp_csv':
-    _regexp_csv_transfomer, 'regexp_paths_csv':
-    _regexp_paths_csv_transfomer, 'string': pylint_utils._unquote, 'yn':
-    _yn_transformer}
+    patterns: list[Pattern[str]] = []
+    for pattern in _csv_transformer(value):
+        patterns.append(
+            re.compile(
+                str(pathlib.PureWindowsPath(pattern)).replace("\\", "\\\\")
+                + "|"
+                + pathlib.PureWindowsPath(pattern).as_posix()
+            )
+        )
+    return patterns
+
+
+_TYPE_TRANSFORMERS: dict[str, Callable[[str], _ArgumentTypes]] = {
+    "choice": str,
+    "csv": _csv_transformer,
+    "float": float,
+    "int": int,
+    "confidence": _confidence_transformer,
+    "non_empty_string": _non_empty_string_transformer,
+    "path": _path_transformer,
+    "glob_paths_csv": _glob_paths_csv_transformer,
+    "py_version": _py_version_transformer,
+    "regexp": _regex_transformer,
+    "regexp_csv": _regexp_csv_transfomer,
+    "regexp_paths_csv": _regexp_paths_csv_transfomer,
+    "string": pylint_utils._unquote,
+    "yn": _yn_transformer,
+}
 """Type transformers for all argument types.

 A transformer should accept a string and return one of the supported
@@ -101,16 +166,27 @@ class _Argument:
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], arg_help: str, hide_help: bool,
-        section: (str | None)) ->None:
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        arg_help: str,
+        hide_help: bool,
+        section: str | None,
+    ) -> None:
         self.flags = flags
         """The name of the argument."""
+
         self.hide_help = hide_help
         """Whether to hide this argument in the help message."""
-        self.help = arg_help.replace('%', '%%')
+
+        # argparse uses % formatting on help strings, so a % needs to be escaped
+        self.help = arg_help.replace("%", "%%")
         """The description of the argument."""
+
         if hide_help:
             self.help = argparse.SUPPRESS
+
         self.section = section
         """The section to add this argument to."""

@@ -123,13 +199,23 @@ class _BaseStoreArgument(_Argument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: str, default:
-        _ArgumentTypes, arg_help: str, hide_help: bool, section: (str | None)
-        ) ->None:
-        super().__init__(flags=flags, arg_help=arg_help, hide_help=
-            hide_help, section=section)
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: str,
+        default: _ArgumentTypes,
+        arg_help: str,
+        hide_help: bool,
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags, arg_help=arg_help, hide_help=hide_help, section=section
+        )
+
         self.action = action
         """The action to perform with the argument."""
+
         self.default = default
         """The default value of the argument."""

@@ -142,19 +228,38 @@ class _StoreArgument(_BaseStoreArgument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: str, default:
-        _ArgumentTypes, arg_type: str, choices: (list[str] | None),
-        arg_help: str, metavar: str, hide_help: bool, section: (str | None)
-        ) ->None:
-        super().__init__(flags=flags, action=action, default=default,
-            arg_help=arg_help, hide_help=hide_help, section=section)
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: str,
+        default: _ArgumentTypes,
+        arg_type: str,
+        choices: list[str] | None,
+        arg_help: str,
+        metavar: str,
+        hide_help: bool,
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags,
+            action=action,
+            default=default,
+            arg_help=arg_help,
+            hide_help=hide_help,
+            section=section,
+        )
+
         self.type = _TYPE_TRANSFORMERS[arg_type]
         """A transformer function that returns a transformed type of the argument."""
+
         self.choices = choices
         """A list of possible choices for the argument.

         None if there are no restrictions.
         """
+
         self.metavar = metavar
         """The metavar of the argument.

@@ -172,11 +277,25 @@ class _StoreTrueArgument(_BaseStoreArgument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: Literal['store_true'],
-        default: _ArgumentTypes, arg_help: str, hide_help: bool, section: (
-        str | None)) ->None:
-        super().__init__(flags=flags, action=action, default=default,
-            arg_help=arg_help, hide_help=hide_help, section=section)
+    # pylint: disable-next=useless-parent-delegation # We narrow down the type of action
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: Literal["store_true"],
+        default: _ArgumentTypes,
+        arg_help: str,
+        hide_help: bool,
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags,
+            action=action,
+            default=default,
+            arg_help=arg_help,
+            hide_help=hide_help,
+            section=section,
+        )


 class _DeprecationArgument(_Argument):
@@ -187,23 +306,39 @@ class _DeprecationArgument(_Argument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: type[argparse.Action],
-        default: _ArgumentTypes, arg_type: str, choices: (list[str] | None),
-        arg_help: str, metavar: str, hide_help: bool, section: (str | None)
-        ) ->None:
-        super().__init__(flags=flags, arg_help=arg_help, hide_help=
-            hide_help, section=section)
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: type[argparse.Action],
+        default: _ArgumentTypes,
+        arg_type: str,
+        choices: list[str] | None,
+        arg_help: str,
+        metavar: str,
+        hide_help: bool,
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags, arg_help=arg_help, hide_help=hide_help, section=section
+        )
+
         self.action = action
         """The action to perform with the argument."""
+
         self.default = default
         """The default value of the argument."""
+
         self.type = _TYPE_TRANSFORMERS[arg_type]
         """A transformer function that returns a transformed type of the argument."""
+
         self.choices = choices
         """A list of possible choices for the argument.

         None if there are no restrictions.
         """
+
         self.metavar = metavar
         """The metavar of the argument.

@@ -220,16 +355,37 @@ class _ExtendArgument(_DeprecationArgument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: Literal['extend'],
-        default: _ArgumentTypes, arg_type: str, metavar: str, arg_help: str,
-        hide_help: bool, section: (str | None), choices: (list[str] | None),
-        dest: (str | None)) ->None:
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: Literal["extend"],
+        default: _ArgumentTypes,
+        arg_type: str,
+        metavar: str,
+        arg_help: str,
+        hide_help: bool,
+        section: str | None,
+        choices: list[str] | None,
+        dest: str | None,
+    ) -> None:
         action_class = argparse._ExtendAction
+
         self.dest = dest
         """The destination of the argument."""
-        super().__init__(flags=flags, action=action_class, default=default,
-            arg_type=arg_type, choices=choices, arg_help=arg_help, metavar=
-            metavar, hide_help=hide_help, section=section)
+
+        super().__init__(
+            flags=flags,
+            action=action_class,
+            default=default,
+            arg_type=arg_type,
+            choices=choices,
+            arg_help=arg_help,
+            metavar=metavar,
+            hide_help=hide_help,
+            section=section,
+        )


 class _StoreOldNamesArgument(_DeprecationArgument):
@@ -240,13 +396,32 @@ class _StoreOldNamesArgument(_DeprecationArgument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], default: _ArgumentTypes,
-        arg_type: str, choices: (list[str] | None), arg_help: str, metavar:
-        str, hide_help: bool, kwargs: dict[str, Any], section: (str | None)
-        ) ->None:
-        super().__init__(flags=flags, action=_OldNamesAction, default=
-            default, arg_type=arg_type, choices=choices, arg_help=arg_help,
-            metavar=metavar, hide_help=hide_help, section=section)
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        default: _ArgumentTypes,
+        arg_type: str,
+        choices: list[str] | None,
+        arg_help: str,
+        metavar: str,
+        hide_help: bool,
+        kwargs: dict[str, Any],
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags,
+            action=_OldNamesAction,
+            default=default,
+            arg_type=arg_type,
+            choices=choices,
+            arg_help=arg_help,
+            metavar=metavar,
+            hide_help=hide_help,
+            section=section,
+        )
+
         self.kwargs = kwargs
         """Any additional arguments passed to the action."""

@@ -259,13 +434,32 @@ class _StoreNewNamesArgument(_DeprecationArgument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], default: _ArgumentTypes,
-        arg_type: str, choices: (list[str] | None), arg_help: str, metavar:
-        str, hide_help: bool, kwargs: dict[str, Any], section: (str | None)
-        ) ->None:
-        super().__init__(flags=flags, action=_NewNamesAction, default=
-            default, arg_type=arg_type, choices=choices, arg_help=arg_help,
-            metavar=metavar, hide_help=hide_help, section=section)
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        default: _ArgumentTypes,
+        arg_type: str,
+        choices: list[str] | None,
+        arg_help: str,
+        metavar: str,
+        hide_help: bool,
+        kwargs: dict[str, Any],
+        section: str | None,
+    ) -> None:
+        super().__init__(
+            flags=flags,
+            action=_NewNamesAction,
+            default=default,
+            arg_type=arg_type,
+            choices=choices,
+            arg_help=arg_help,
+            metavar=metavar,
+            hide_help=hide_help,
+            section=section,
+        )
+
         self.kwargs = kwargs
         """Any additional arguments passed to the action."""

@@ -279,15 +473,27 @@ class _CallableArgument(_Argument):
     https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.add_argument
     """

-    def __init__(self, *, flags: list[str], action: type[_CallbackAction],
-        arg_help: str, kwargs: dict[str, Any], hide_help: bool, section: (
-        str | None), metavar: str) ->None:
-        super().__init__(flags=flags, arg_help=arg_help, hide_help=
-            hide_help, section=section)
+    def __init__(
+        self,
+        *,
+        flags: list[str],
+        action: type[_CallbackAction],
+        arg_help: str,
+        kwargs: dict[str, Any],
+        hide_help: bool,
+        section: str | None,
+        metavar: str,
+    ) -> None:
+        super().__init__(
+            flags=flags, arg_help=arg_help, hide_help=hide_help, section=section
+        )
+
         self.action = action
         """The action to perform with the argument."""
+
         self.kwargs = kwargs
         """Any additional arguments passed to the action."""
+
         self.metavar = metavar
         """The metavar of the argument.

diff --git a/pylint/config/arguments_manager.py b/pylint/config/arguments_manager.py
index c9417f90a..aca8f5f87 100644
--- a/pylint/config/arguments_manager.py
+++ b/pylint/config/arguments_manager.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Arguments manager class used to handle command-line arguments and options."""
+
 from __future__ import annotations
+
 import argparse
 import re
 import sys
@@ -7,18 +13,34 @@ import textwrap
 import warnings
 from collections.abc import Sequence
 from typing import TYPE_CHECKING, Any, TextIO
+
 import tomlkit
+
 from pylint import utils
-from pylint.config.argument import _Argument, _CallableArgument, _ExtendArgument, _StoreArgument, _StoreNewNamesArgument, _StoreOldNamesArgument, _StoreTrueArgument
-from pylint.config.exceptions import UnrecognizedArgumentAction, _UnrecognizedOptionError
+from pylint.config.argument import (
+    _Argument,
+    _CallableArgument,
+    _ExtendArgument,
+    _StoreArgument,
+    _StoreNewNamesArgument,
+    _StoreOldNamesArgument,
+    _StoreTrueArgument,
+)
+from pylint.config.exceptions import (
+    UnrecognizedArgumentAction,
+    _UnrecognizedOptionError,
+)
 from pylint.config.help_formatter import _HelpFormatter
 from pylint.config.utils import _convert_option_to_argument, _parse_rich_type_value
 from pylint.constants import MAIN_CHECKER_NAME
 from pylint.typing import DirectoryNamespaceDict, OptionDict
+
 if sys.version_info >= (3, 11):
     import tomllib
 else:
     import tomli as tomllib
+
+
 if TYPE_CHECKING:
     from pylint.config.arguments_provider import _ArgumentsProvider

@@ -26,10 +48,12 @@ if TYPE_CHECKING:
 class _ArgumentsManager:
     """Arguments manager class used to handle command-line arguments and options."""

-    def __init__(self, prog: str, usage: (str | None)=None, description: (
-        str | None)=None) ->None:
+    def __init__(
+        self, prog: str, usage: str | None = None, description: str | None = None
+    ) -> None:
         self._config = argparse.Namespace()
         """Namespace for all options."""
+
         self._base_config = self._config
         """Fall back Namespace object created during initialization.

@@ -37,67 +61,342 @@ class _ArgumentsManager:
         fail to match a file with a directory we fall back to the Namespace object
         created during initialization.
         """
-        self._arg_parser = argparse.ArgumentParser(prog=prog, usage=usage or
-            '%(prog)s [options]', description=description, formatter_class=
-            _HelpFormatter, conflict_handler='resolve')
+
+        self._arg_parser = argparse.ArgumentParser(
+            prog=prog,
+            usage=usage or "%(prog)s [options]",
+            description=description,
+            formatter_class=_HelpFormatter,
+            # Needed to let 'pylint-config' overwrite the -h command
+            conflict_handler="resolve",
+        )
         """The command line argument parser."""
+
         self._argument_groups_dict: dict[str, argparse._ArgumentGroup] = {}
         """Dictionary of all the argument groups."""
+
         self._option_dicts: dict[str, OptionDict] = {}
         """All option dictionaries that have been registered."""
+
         self._directory_namespaces: DirectoryNamespaceDict = {}
         """Mapping of directories and their respective namespace objects."""

     @property
-    def config(self) ->argparse.Namespace:
+    def config(self) -> argparse.Namespace:
         """Namespace for all options."""
-        pass
+        return self._config
+
+    @config.setter
+    def config(self, value: argparse.Namespace) -> None:
+        self._config = value

-    def _register_options_provider(self, provider: _ArgumentsProvider) ->None:
+    def _register_options_provider(self, provider: _ArgumentsProvider) -> None:
         """Register an options provider and load its defaults."""
-        pass
+        for opt, optdict in provider.options:
+            self._option_dicts[opt] = optdict
+            argument = _convert_option_to_argument(opt, optdict)
+            section = argument.section or provider.name.capitalize()
+
+            section_desc = provider.option_groups_descs.get(section, None)
+
+            # We exclude main since its docstring comes from PyLinter
+            if provider.name != MAIN_CHECKER_NAME and provider.__doc__:
+                section_desc = provider.__doc__.split("\n\n")[0]
+
+            self._add_arguments_to_parser(section, section_desc, argument)

-    def _add_arguments_to_parser(self, section: str, section_desc: (str |
-        None), argument: _Argument) ->None:
+        self._load_default_argument_values()
+
+    def _add_arguments_to_parser(
+        self, section: str, section_desc: str | None, argument: _Argument
+    ) -> None:
         """Add an argument to the correct argument section/group."""
-        pass
+        try:
+            section_group = self._argument_groups_dict[section]
+        except KeyError:
+            if section_desc:
+                section_group = self._arg_parser.add_argument_group(
+                    section, section_desc
+                )
+            else:
+                section_group = self._arg_parser.add_argument_group(title=section)
+            self._argument_groups_dict[section] = section_group
+        self._add_parser_option(section_group, argument)

     @staticmethod
-    def _add_parser_option(section_group: argparse._ArgumentGroup, argument:
-        _Argument) ->None:
+    def _add_parser_option(
+        section_group: argparse._ArgumentGroup, argument: _Argument
+    ) -> None:
         """Add an argument."""
-        pass
+        if isinstance(argument, _StoreArgument):
+            section_group.add_argument(
+                *argument.flags,
+                action=argument.action,
+                default=argument.default,
+                type=argument.type,  # type: ignore[arg-type] # incorrect typing in typeshed
+                help=argument.help,
+                metavar=argument.metavar,
+                choices=argument.choices,
+            )
+        elif isinstance(argument, _StoreOldNamesArgument):
+            section_group.add_argument(
+                *argument.flags,
+                **argument.kwargs,
+                action=argument.action,
+                default=argument.default,
+                type=argument.type,  # type: ignore[arg-type] # incorrect typing in typeshed
+                help=argument.help,
+                metavar=argument.metavar,
+                choices=argument.choices,
+            )
+            # We add the old name as hidden option to make its default value get loaded when
+            # argparse initializes all options from the checker
+            assert argument.kwargs["old_names"]
+            for old_name in argument.kwargs["old_names"]:
+                section_group.add_argument(
+                    f"--{old_name}",
+                    action="store",
+                    default=argument.default,
+                    type=argument.type,  # type: ignore[arg-type] # incorrect typing in typeshed
+                    help=argparse.SUPPRESS,
+                    metavar=argument.metavar,
+                    choices=argument.choices,
+                )
+        elif isinstance(argument, _StoreNewNamesArgument):
+            section_group.add_argument(
+                *argument.flags,
+                **argument.kwargs,
+                action=argument.action,
+                default=argument.default,
+                type=argument.type,  # type: ignore[arg-type] # incorrect typing in typeshed
+                help=argument.help,
+                metavar=argument.metavar,
+                choices=argument.choices,
+            )
+        elif isinstance(argument, _StoreTrueArgument):
+            section_group.add_argument(
+                *argument.flags,
+                action=argument.action,
+                default=argument.default,
+                help=argument.help,
+            )
+        elif isinstance(argument, _CallableArgument):
+            section_group.add_argument(
+                *argument.flags,
+                **argument.kwargs,
+                action=argument.action,
+                help=argument.help,
+                metavar=argument.metavar,
+            )
+        elif isinstance(argument, _ExtendArgument):
+            section_group.add_argument(
+                *argument.flags,
+                action=argument.action,
+                default=argument.default,
+                type=argument.type,  # type: ignore[arg-type] # incorrect typing in typeshed
+                help=argument.help,
+                metavar=argument.metavar,
+                choices=argument.choices,
+                dest=argument.dest,
+            )
+        else:
+            raise UnrecognizedArgumentAction

-    def _load_default_argument_values(self) ->None:
+    def _load_default_argument_values(self) -> None:
         """Loads the default values of all registered options."""
-        pass
+        self.config = self._arg_parser.parse_args([], self.config)

-    def _parse_configuration_file(self, arguments: list[str]) ->None:
+    def _parse_configuration_file(self, arguments: list[str]) -> None:
         """Parse the arguments found in a configuration file into the namespace."""
-        pass
+        try:
+            self.config, parsed_args = self._arg_parser.parse_known_args(
+                arguments, self.config
+            )
+        except SystemExit:
+            sys.exit(32)
+        unrecognized_options: list[str] = []
+        for opt in parsed_args:
+            if opt.startswith("--"):
+                unrecognized_options.append(opt[2:])
+        if unrecognized_options:
+            raise _UnrecognizedOptionError(options=unrecognized_options)

-    def _parse_command_line_configuration(self, arguments: (Sequence[str] |
-        None)=None) ->list[str]:
+    def _parse_command_line_configuration(
+        self, arguments: Sequence[str] | None = None
+    ) -> list[str]:
         """Parse the arguments found on the command line into the namespace."""
-        pass
+        arguments = sys.argv[1:] if arguments is None else arguments
+
+        self.config, parsed_args = self._arg_parser.parse_known_args(
+            arguments, self.config
+        )
+
+        return parsed_args

-    def _generate_config(self, stream: (TextIO | None)=None, skipsections:
-        tuple[str, ...]=()) ->None:
+    def _generate_config(
+        self, stream: TextIO | None = None, skipsections: tuple[str, ...] = ()
+    ) -> None:
         """Write a configuration file according to the current configuration
         into the given stream or stdout.
         """
-        pass
+        options_by_section = {}
+        sections = []
+        for group in sorted(
+            self._arg_parser._action_groups,
+            key=lambda x: (x.title != "Main", x.title),
+        ):
+            group_name = group.title
+            assert group_name
+            if group_name in skipsections:
+                continue
+
+            options = []
+            option_actions = [
+                i
+                for i in group._group_actions
+                if not isinstance(i, argparse._SubParsersAction)
+            ]
+            for opt in sorted(option_actions, key=lambda x: x.option_strings[0][2:]):
+                if "--help" in opt.option_strings:
+                    continue
+
+                optname = opt.option_strings[0][2:]

-    def help(self) ->str:
+                try:
+                    optdict = self._option_dicts[optname]
+                except KeyError:
+                    continue
+
+                options.append(
+                    (
+                        optname,
+                        optdict,
+                        getattr(self.config, optname.replace("-", "_")),
+                    )
+                )
+
+                options = [
+                    (n, d, v) for (n, d, v) in options if not d.get("deprecated")
+                ]
+
+            if options:
+                sections.append(group_name)
+                options_by_section[group_name] = options
+        stream = stream or sys.stdout
+        printed = False
+        for section in sections:
+            if printed:
+                print("\n", file=stream)
+            with warnings.catch_warnings():
+                warnings.filterwarnings("ignore", category=DeprecationWarning)
+                utils.format_section(
+                    stream, section.upper(), sorted(options_by_section[section])
+                )
+            printed = True
+
+    def help(self) -> str:
         """Return the usage string based on the available options."""
-        pass
+        return self._arg_parser.format_help()

-    def _generate_config_file(self, *, minimal: bool=False) ->str:
+    def _generate_config_file(self, *, minimal: bool = False) -> str:
         """Write a configuration file according to the current configuration into
         stdout.
         """
-        pass
+        toml_doc = tomlkit.document()
+        tool_table = tomlkit.table(is_super_table=True)
+        toml_doc.add(tomlkit.key("tool"), tool_table)
+
+        pylint_tool_table = tomlkit.table(is_super_table=True)
+        tool_table.add(tomlkit.key("pylint"), pylint_tool_table)
+
+        for group in sorted(
+            self._arg_parser._action_groups,
+            key=lambda x: (x.title != "Main", x.title),
+        ):
+            # Skip the options section with the --help option
+            if group.title in {"options", "optional arguments", "Commands"}:
+                continue
+
+            # Skip sections without options such as "positional arguments"
+            if not group._group_actions:
+                continue
+
+            group_table = tomlkit.table()
+            option_actions = [
+                i
+                for i in group._group_actions
+                if not isinstance(i, argparse._SubParsersAction)
+            ]
+            for action in sorted(option_actions, key=lambda x: x.option_strings[0][2:]):
+                optname = action.option_strings[0][2:]
+
+                # We skip old name options that don't have their own optdict
+                try:
+                    optdict = self._option_dicts[optname]
+                except KeyError:
+                    continue
+
+                if optdict.get("hide_from_config_file"):
+                    continue
+
+                # Add help comment
+                if not minimal:
+                    help_msg = optdict.get("help", "")
+                    assert isinstance(help_msg, str)
+                    help_text = textwrap.wrap(help_msg, width=79)
+                    for line in help_text:
+                        group_table.add(tomlkit.comment(line))
+
+                # Get current value of option
+                value = getattr(self.config, optname.replace("-", "_"))
+
+                # Create a comment if the option has no value
+                if not value:
+                    if not minimal:
+                        group_table.add(tomlkit.comment(f"{optname} ="))
+                        group_table.add(tomlkit.nl())
+                    continue
+
+                # Skip deprecated options
+                if "kwargs" in optdict:
+                    assert isinstance(optdict["kwargs"], dict)
+                    if "new_names" in optdict["kwargs"]:
+                        continue
+
+                # Tomlkit doesn't support regular expressions
+                if isinstance(value, re.Pattern):
+                    value = value.pattern
+                elif isinstance(value, (list, tuple)) and isinstance(
+                    value[0], re.Pattern
+                ):
+                    value = [i.pattern for i in value]
+
+                # Handle tuples that should be strings
+                if optdict.get("type") == "py_version":
+                    value = ".".join(str(i) for i in value)
+
+                # Check if it is default value if we are in minimal mode
+                if minimal and value == optdict.get("default"):
+                    continue
+
+                # Add to table
+                group_table.add(optname, value)
+                group_table.add(tomlkit.nl())
+
+            assert group.title
+            if group_table:
+                pylint_tool_table.add(group.title.lower(), group_table)
+
+        toml_string = tomlkit.dumps(toml_doc)
+
+        # Make sure the string we produce is valid toml and can be parsed
+        tomllib.loads(toml_string)
+
+        return str(toml_string)

-    def set_option(self, optname: str, value: Any) ->None:
+    def set_option(self, optname: str, value: Any) -> None:
         """Set an option on the namespace object."""
-        pass
+        self.config = self._arg_parser.parse_known_args(
+            [f"--{optname.replace('_', '-')}", _parse_rich_type_value(value)],
+            self.config,
+        )[0]
diff --git a/pylint/config/arguments_provider.py b/pylint/config/arguments_provider.py
index 2aa77d702..7f75718ca 100644
--- a/pylint/config/arguments_provider.py
+++ b/pylint/config/arguments_provider.py
@@ -1,39 +1,65 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Arguments provider class used to expose options."""
+
 from __future__ import annotations
+
 from collections.abc import Iterator
 from typing import Any
+
 from pylint.config.arguments_manager import _ArgumentsManager
 from pylint.typing import OptionDict, Options


 class _ArgumentsProvider:
     """Base class for classes that provide arguments."""
+
     name: str
     """Name of the provider."""
+
     options: Options = ()
     """Options provided by this provider."""
+
     option_groups_descs: dict[str, str] = {}
     """Option groups of this provider and their descriptions."""

-    def __init__(self, arguments_manager: _ArgumentsManager) ->None:
+    def __init__(self, arguments_manager: _ArgumentsManager) -> None:
         self._arguments_manager = arguments_manager
         """The manager that will parse and register any options provided."""
+
         self._arguments_manager._register_options_provider(self)

-    def _option_value(self, opt: str) ->Any:
+    def _option_value(self, opt: str) -> Any:
         """Get the current value for the given option."""
-        pass
+        return getattr(self._arguments_manager.config, opt.replace("-", "_"), None)

-    def _options_by_section(self) ->Iterator[tuple[str, list[tuple[str,
-        OptionDict, Any]]] | tuple[None, dict[str, list[tuple[str,
-        OptionDict, Any]]]]]:
+    def _options_by_section(
+        self,
+    ) -> Iterator[
+        tuple[str, list[tuple[str, OptionDict, Any]]]
+        | tuple[None, dict[str, list[tuple[str, OptionDict, Any]]]]
+    ]:
         """Return an iterator on options grouped by section.

         (section, [list of (optname, optdict, optvalue)])
         """
-        pass
+        sections: dict[str, list[tuple[str, OptionDict, Any]]] = {}
+        for optname, optdict in self.options:
+            sections.setdefault(optdict.get("group"), []).append(  # type: ignore[arg-type]
+                (optname, optdict, self._option_value(optname))
+            )
+        if None in sections:
+            yield None, sections.pop(None)  # type: ignore[call-overload]
+        for section, options in sorted(sections.items()):
+            yield section.upper(), options

-    def _options_and_values(self, options: (Options | None)=None) ->Iterator[
-        tuple[str, OptionDict, Any]]:
+    def _options_and_values(
+        self, options: Options | None = None
+    ) -> Iterator[tuple[str, OptionDict, Any]]:
         """DEPRECATED."""
-        pass
+        if options is None:
+            options = self.options
+        for optname, optdict in options:
+            yield optname, optdict, self._option_value(optname)
diff --git a/pylint/config/callback_actions.py b/pylint/config/callback_actions.py
index 3fe67065b..bf2decd3c 100644
--- a/pylint/config/callback_actions.py
+++ b/pylint/config/callback_actions.py
@@ -1,12 +1,22 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
+# pylint: disable=too-many-arguments, redefined-builtin, duplicate-code
+
 """Callback actions for various options."""
+
 from __future__ import annotations
+
 import abc
 import argparse
 import sys
 from collections.abc import Callable, Sequence
 from pathlib import Path
 from typing import TYPE_CHECKING, Any
+
 from pylint import exceptions, extensions, interfaces, utils
+
 if TYPE_CHECKING:
     from pylint.config.help_formatter import _HelpFormatter
     from pylint.lint import PyLinter
@@ -17,10 +27,14 @@ class _CallbackAction(argparse.Action):
     """Custom callback action."""

     @abc.abstractmethod
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
-        raise NotImplementedError
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
+        raise NotImplementedError  # pragma: no cover


 class _DoNothingAction(_CallbackAction):
@@ -30,44 +44,97 @@ class _DoNothingAction(_CallbackAction):
     without erroring when they are then processed again by argparse.
     """

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
         return None


 class _AccessRunObjectAction(_CallbackAction):
     """Action that has access to the Run object."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', **kwargs: Run) ->None:
-        self.run = kwargs['Run']
-        super().__init__(option_strings, dest, 0, const, default, type,
-            choices, required, help, metavar)
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        **kwargs: Run,
+    ) -> None:
+        self.run = kwargs["Run"]
+
+        super().__init__(
+            option_strings,
+            dest,
+            0,
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )

     @abc.abstractmethod
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
-        raise NotImplementedError
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
+        raise NotImplementedError  # pragma: no cover


 class _MessageHelpAction(_CallbackAction):
     """Display the help message of a message."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', **kwargs: Run) ->None:
-        self.run = kwargs['Run']
-        super().__init__(option_strings, dest, '+', const, default, type,
-            choices, required, help, metavar)
-
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[str] | None), option_string: (
-        str | None)='--help-msg') ->None:
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        **kwargs: Run,
+    ) -> None:
+        self.run = kwargs["Run"]
+        super().__init__(
+            option_strings,
+            dest,
+            "+",
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )
+
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[str] | None,
+        option_string: str | None = "--help-msg",
+    ) -> None:
         assert isinstance(values, (list, tuple))
         values_to_print: list[str] = []
         for msg in values:
@@ -80,9 +147,13 @@ class _MessageHelpAction(_CallbackAction):
 class _ListMessagesAction(_AccessRunObjectAction):
     """Display all available messages."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--list-enabled') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--list-enabled",
+    ) -> None:
         self.run.linter.msgs_store.list_messages()
         sys.exit(0)

@@ -90,9 +161,13 @@ class _ListMessagesAction(_AccessRunObjectAction):
 class _ListMessagesEnabledAction(_AccessRunObjectAction):
     """Display all enabled messages."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--list-msgs-enabled') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--list-msgs-enabled",
+    ) -> None:
         self.run.linter.list_messages_enabled()
         sys.exit(0)

@@ -100,9 +175,13 @@ class _ListMessagesEnabledAction(_AccessRunObjectAction):
 class _ListCheckGroupsAction(_AccessRunObjectAction):
     """Display all the check groups that pylint knows about."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--list-groups') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--list-groups",
+    ) -> None:
         for check in self.run.linter.get_checker_names():
             print(check)
         sys.exit(0)
@@ -111,33 +190,45 @@ class _ListCheckGroupsAction(_AccessRunObjectAction):
 class _ListConfidenceLevelsAction(_AccessRunObjectAction):
     """Display all the confidence levels that pylint knows about."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--list-conf-levels') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--list-conf-levels",
+    ) -> None:
         for level in interfaces.CONFIDENCE_LEVELS:
-            print(f'%-18s: {level}')
+            print(f"%-18s: {level}")
         sys.exit(0)


 class _ListExtensionsAction(_AccessRunObjectAction):
     """Display all extensions under pylint.extensions."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--list-extensions') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--list-extensions",
+    ) -> None:
         for filename in Path(extensions.__file__).parent.iterdir():
-            if filename.suffix == '.py' and not filename.stem.startswith('_'):
-                extension_name, _, _ = filename.stem.partition('.')
-                print(f'pylint.extensions.{extension_name}')
+            if filename.suffix == ".py" and not filename.stem.startswith("_"):
+                extension_name, _, _ = filename.stem.partition(".")
+                print(f"pylint.extensions.{extension_name}")
         sys.exit(0)


 class _FullDocumentationAction(_AccessRunObjectAction):
     """Display the full documentation."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--full-documentation') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--full-documentation",
+    ) -> None:
         utils.print_full_documentation(self.run.linter)
         sys.exit(0)

@@ -145,19 +236,29 @@ class _FullDocumentationAction(_AccessRunObjectAction):
 class _GenerateRCFileAction(_AccessRunObjectAction):
     """Generate a pylintrc file."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--generate-rcfile') ->None:
-        self.run.linter._generate_config(skipsections=('Commands',))
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--generate-rcfile",
+    ) -> None:
+        # TODO: 4.x: Deprecate this after the auto-upgrade functionality of
+        # pylint-config is sufficient.
+        self.run.linter._generate_config(skipsections=("Commands",))
         sys.exit(0)


 class _GenerateConfigFileAction(_AccessRunObjectAction):
     """Generate a .toml format configuration file."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--generate-toml-config') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--generate-toml-config",
+    ) -> None:
         print(self.run.linter._generate_config_file())
         sys.exit(0)

@@ -173,96 +274,195 @@ class _ErrorsOnlyModeAction(_AccessRunObjectAction):
         * do not save execution information
     """

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--errors-only') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--errors-only",
+    ) -> None:
         self.run.linter._error_mode = True


 class _LongHelpAction(_AccessRunObjectAction):
     """Display the long help message."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--long-help') ->None:
-        formatter: _HelpFormatter = self.run.linter._arg_parser._get_formatter(
-            )
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--long-help",
+    ) -> None:
+        formatter: _HelpFormatter = self.run.linter._arg_parser._get_formatter()  # type: ignore[assignment]
+
+        # Add extra info as epilog to the help message
         self.run.linter._arg_parser.epilog = formatter.get_long_description()
         print(self.run.linter.help())
+
         sys.exit(0)


 class _AccessLinterObjectAction(_CallbackAction):
     """Action that has access to the Linter object."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', **kwargs: PyLinter) ->None:
-        self.linter = kwargs['linter']
-        super().__init__(option_strings, dest, 1, const, default, type,
-            choices, required, help, metavar)
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        **kwargs: PyLinter,
+    ) -> None:
+        self.linter = kwargs["linter"]
+
+        super().__init__(
+            option_strings,
+            dest,
+            1,
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )

     @abc.abstractmethod
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
-        raise NotImplementedError
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
+        raise NotImplementedError  # pragma: no cover


 class _XableAction(_AccessLinterObjectAction):
     """Callback action for enabling or disabling a message."""

+    def _call(
+        self,
+        xabling_function: Callable[[str], None],
+        values: str | Sequence[Any] | None,
+        option_string: str | None,
+    ) -> None:
+        assert isinstance(values, (tuple, list))
+        for msgid in utils._check_csv(values[0]):
+            try:
+                xabling_function(msgid)
+            except (
+                exceptions.DeletedMessageError,
+                exceptions.MessageBecameExtensionError,
+            ) as e:
+                self.linter._stashed_messages[
+                    (self.linter.current_name, "useless-option-value")
+                ].append((option_string, str(e)))
+            except exceptions.UnknownMessageError:
+                self.linter._stashed_messages[
+                    (self.linter.current_name, "unknown-option-value")
+                ].append((option_string, msgid))
+
     @abc.abstractmethod
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--disable') ->None:
-        raise NotImplementedError
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--disable",
+    ) -> None:
+        raise NotImplementedError  # pragma: no cover


 class _DisableAction(_XableAction):
     """Callback action for disabling a message."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--disable') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--disable",
+    ) -> None:
         self._call(self.linter.disable, values, option_string)


 class _EnableAction(_XableAction):
     """Callback action for enabling a message."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--enable') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--enable",
+    ) -> None:
         self._call(self.linter.enable, values, option_string)


 class _OutputFormatAction(_AccessLinterObjectAction):
     """Callback action for setting the output format."""

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)='--enable') ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = "--enable",
+    ) -> None:
         assert isinstance(values, (tuple, list))
-        assert isinstance(values[0], str
-            ), "'output-format' should be a comma separated string of reporters"
+        assert isinstance(
+            values[0], str
+        ), "'output-format' should be a comma separated string of reporters"
         self.linter._load_reporters(values[0])


 class _AccessParserAction(_CallbackAction):
     """Action that has access to the ArgumentParser object."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', **kwargs: argparse.ArgumentParser) ->None:
-        self.parser = kwargs['parser']
-        super().__init__(option_strings, dest, 0, const, default, type,
-            choices, required, help, metavar)
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        **kwargs: argparse.ArgumentParser,
+    ) -> None:
+        self.parser = kwargs["parser"]
+
+        super().__init__(
+            option_strings,
+            dest,
+            0,
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )

     @abc.abstractmethod
-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
-        raise NotImplementedError
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
+        raise NotImplementedError  # pragma: no cover
diff --git a/pylint/config/config_file_parser.py b/pylint/config/config_file_parser.py
index 1f6cefb0d..efc085e59 100644
--- a/pylint/config/config_file_parser.py
+++ b/pylint/config/config_file_parser.py
@@ -1,17 +1,27 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Configuration file parser class."""
+
 from __future__ import annotations
+
 import configparser
 import os
 import sys
 from pathlib import Path
 from typing import TYPE_CHECKING, Dict, List, Tuple
+
 from pylint.config.utils import _parse_rich_type_value
+
 if sys.version_info >= (3, 11):
     import tomllib
 else:
     import tomli as tomllib
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 PylintConfigFileData = Tuple[Dict[str, str], List[str]]


@@ -19,44 +29,101 @@ class _RawConfParser:
     """Class to parse various formats of configuration files."""

     @staticmethod
-    def parse_ini_file(file_path: Path) ->PylintConfigFileData:
+    def parse_ini_file(file_path: Path) -> PylintConfigFileData:
         """Parse and handle errors of an ini configuration file.

         Raises ``configparser.Error``.
         """
-        pass
+        parser = configparser.ConfigParser(inline_comment_prefixes=("#", ";"))
+        # Use this encoding in order to strip the BOM marker, if any.
+        with open(file_path, encoding="utf_8_sig") as fp:
+            parser.read_file(fp)
+
+        config_content: dict[str, str] = {}
+        options: list[str] = []
+        ini_file_with_sections = _RawConfParser._ini_file_with_sections(file_path)
+        for section in parser.sections():
+            if ini_file_with_sections and not section.startswith("pylint"):
+                continue
+            for option, value in parser[section].items():
+                config_content[option] = value
+                options += [f"--{option}", value]
+        return config_content, options

     @staticmethod
-    def _ini_file_with_sections(file_path: Path) ->bool:
+    def _ini_file_with_sections(file_path: Path) -> bool:
         """Return whether the file uses sections."""
-        pass
+        if "setup.cfg" in file_path.parts:
+            return True
+        if "tox.ini" in file_path.parts:
+            return True
+        return False

     @staticmethod
-    def parse_toml_file(file_path: Path) ->PylintConfigFileData:
+    def parse_toml_file(file_path: Path) -> PylintConfigFileData:
         """Parse and handle errors of a toml configuration file.

         Raises ``tomllib.TOMLDecodeError``.
         """
-        pass
+        with open(file_path, mode="rb") as fp:
+            content = tomllib.load(fp)
+        try:
+            sections_values = content["tool"]["pylint"]
+        except KeyError:
+            return {}, []
+
+        config_content: dict[str, str] = {}
+        options: list[str] = []
+        for opt, values in sections_values.items():
+            if isinstance(values, dict):
+                for config, value in values.items():
+                    value = _parse_rich_type_value(value)
+                    config_content[config] = value
+                    options += [f"--{config}", value]
+            else:
+                values = _parse_rich_type_value(values)
+                config_content[opt] = values
+                options += [f"--{opt}", values]
+        return config_content, options

     @staticmethod
-    def parse_config_file(file_path: (Path | None), verbose: bool
-        ) ->PylintConfigFileData:
+    def parse_config_file(
+        file_path: Path | None, verbose: bool
+    ) -> PylintConfigFileData:
         """Parse a config file and return str-str pairs.

         Raises ``tomllib.TOMLDecodeError``, ``configparser.Error``.
         """
-        pass
+        if file_path is None:
+            if verbose:
+                print(
+                    "No config file found, using default configuration", file=sys.stderr
+                )
+            return {}, []
+
+        file_path = Path(os.path.expandvars(file_path)).expanduser()
+        if not file_path.exists():
+            raise OSError(f"The config file {file_path} doesn't exist!")
+
+        if verbose:
+            print(f"Using config file {file_path}", file=sys.stderr)
+
+        if file_path.suffix == ".toml":
+            return _RawConfParser.parse_toml_file(file_path)
+        return _RawConfParser.parse_ini_file(file_path)


 class _ConfigurationFileParser:
     """Class to parse various formats of configuration files."""

-    def __init__(self, verbose: bool, linter: PyLinter) ->None:
+    def __init__(self, verbose: bool, linter: PyLinter) -> None:
         self.verbose_mode = verbose
         self.linter = linter

-    def parse_config_file(self, file_path: (Path | None)
-        ) ->PylintConfigFileData:
+    def parse_config_file(self, file_path: Path | None) -> PylintConfigFileData:
         """Parse a config file and return str-str pairs."""
-        pass
+        try:
+            return _RawConfParser.parse_config_file(file_path, self.verbose_mode)
+        except (configparser.Error, tomllib.TOMLDecodeError) as e:
+            self.linter.add_message("config-parse-error", line=0, args=str(e))
+            return {}, []
diff --git a/pylint/config/config_initialization.py b/pylint/config/config_initialization.py
index 526dbe95e..9656ea564 100644
--- a/pylint/config/config_initialization.py
+++ b/pylint/config/config_initialization.py
@@ -1,29 +1,163 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import sys
 import warnings
 from glob import glob
 from itertools import chain
 from pathlib import Path
 from typing import TYPE_CHECKING
+
 from pylint import reporters
 from pylint.config.config_file_parser import _ConfigurationFileParser
-from pylint.config.exceptions import ArgumentPreprocessingError, _UnrecognizedOptionError
+from pylint.config.exceptions import (
+    ArgumentPreprocessingError,
+    _UnrecognizedOptionError,
+)
 from pylint.utils import utils
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


-def _config_initialization(linter: PyLinter, args_list: list[str], reporter:
-    (reporters.BaseReporter | reporters.MultiReporter | None)=None,
-    config_file: (None | str | Path)=None, verbose_mode: bool=False) ->list[str
-    ]:
+def _config_initialization(
+    linter: PyLinter,
+    args_list: list[str],
+    reporter: reporters.BaseReporter | reporters.MultiReporter | None = None,
+    config_file: None | str | Path = None,
+    verbose_mode: bool = False,
+) -> list[str]:
     """Parse all available options, read config files and command line arguments and
     set options accordingly.
     """
-    pass
+    config_file = Path(config_file) if config_file else None
+
+    # Set the current module to the configuration file
+    # to allow raising messages on the configuration file.
+    linter.set_current_module(str(config_file) if config_file else "")
+
+    # Read the configuration file
+    config_file_parser = _ConfigurationFileParser(verbose_mode, linter)
+    try:
+        config_data, config_args = config_file_parser.parse_config_file(
+            file_path=config_file
+        )
+    except OSError as ex:
+        print(ex, file=sys.stderr)
+        sys.exit(32)
+
+    # Order --enable=all or --disable=all to come first.
+    config_args = _order_all_first(config_args, joined=False)
+
+    # Run init hook, if present, before loading plugins
+    if "init-hook" in config_data:
+        exec(utils._unquote(config_data["init-hook"]))  # pylint: disable=exec-used
+
+    # Load plugins if specified in the config file
+    if "load-plugins" in config_data:
+        linter.load_plugin_modules(utils._splitstrip(config_data["load-plugins"]))
+
+    unrecognized_options_message = None
+    # First we parse any options from a configuration file
+    try:
+        linter._parse_configuration_file(config_args)
+    except _UnrecognizedOptionError as exc:
+        unrecognized_options_message = ", ".join(exc.options)
+
+    # Then, if a custom reporter is provided as argument, it may be overridden
+    # by file parameters, so we re-set it here. We do this before command line
+    # parsing, so it's still overridable by command line options
+    if reporter:
+        linter.set_reporter(reporter)
+
+    # Set the current module to the command line
+    # to allow raising messages on it
+    linter.set_current_module("Command line")
+
+    # Now we parse any options from the command line, so they can override
+    # the configuration file
+    args_list = _order_all_first(args_list, joined=True)
+    parsed_args_list = linter._parse_command_line_configuration(args_list)
+
+    # Remove the positional arguments separator from the list of arguments if it exists
+    try:
+        parsed_args_list.remove("--")
+    except ValueError:
+        pass
+
+    # Check if there are any options that we do not recognize
+    unrecognized_options: list[str] = []
+    for opt in parsed_args_list:
+        if opt.startswith("--"):
+            unrecognized_options.append(opt[2:])
+        elif opt.startswith("-"):
+            unrecognized_options.append(opt[1:])
+    if unrecognized_options:
+        msg = ", ".join(unrecognized_options)
+        try:
+            linter._arg_parser.error(f"Unrecognized option found: {msg}")
+        except SystemExit:
+            sys.exit(32)
+
+    # Now that config file and command line options have been loaded
+    # with all disables, it is safe to emit messages
+    if unrecognized_options_message is not None:
+        linter.set_current_module(str(config_file) if config_file else "")
+        linter.add_message(
+            "unrecognized-option", args=unrecognized_options_message, line=0
+        )

+    # TODO: Change this to be checked only when upgrading the configuration
+    for exc_name in linter.config.overgeneral_exceptions:
+        if "." not in exc_name:
+            warnings.warn_explicit(
+                f"'{exc_name}' is not a proper value for the 'overgeneral-exceptions' option. "
+                f"Use fully qualified name (maybe 'builtins.{exc_name}' ?) instead. "
+                "This will cease to be checked at runtime when the configuration "
+                "upgrader is released.",
+                category=UserWarning,
+                filename="pylint: Command line or configuration file",
+                lineno=1,
+                module="pylint",
+            )

-def _order_all_first(config_args: list[str], *, joined: bool) ->list[str]:
+    linter._emit_stashed_messages()
+
+    # Set the current module to configuration as we don't know where
+    # the --load-plugins key is coming from
+    linter.set_current_module("Command line or configuration file")
+
+    # We have loaded configuration from config file and command line. Now, we can
+    # load plugin specific configuration.
+    linter.load_plugin_configuration()
+
+    # Now that plugins are loaded, get list of all fail_on messages, and
+    # enable them
+    linter.enable_fail_on_messages()
+
+    linter._parse_error_mode()
+
+    # Link the base Namespace object on the current directory
+    linter._directory_namespaces[Path().resolve()] = (linter.config, {})
+
+    # parsed_args_list should now only be a list of inputs to lint.
+    # All other options have been removed from the list.
+    return list(
+        chain.from_iterable(
+            # NOTE: 'or [arg]' is needed in the case the input file or directory does
+            # not exist and 'glob(arg)' cannot find anything. Without this we would
+            # not be able to output the fatal import error for this module later on,
+            # as it would get silently ignored.
+            glob(arg, recursive=True) or [arg]
+            for arg in parsed_args_list
+        )
+    )
+
+
+def _order_all_first(config_args: list[str], *, joined: bool) -> list[str]:
     """Reorder config_args such that --enable=all or --disable=all comes first.

     Raise if both are given.
@@ -31,4 +165,38 @@ def _order_all_first(config_args: list[str], *, joined: bool) ->list[str]:
     If joined is True, expect args in the form '--enable=all,for-any-all'.
     If joined is False, expect args in the form '--enable', 'all,for-any-all'.
     """
-    pass
+    indexes_to_prepend = []
+    all_action = ""
+
+    for i, arg in enumerate(config_args):
+        if joined and arg.startswith(("--enable=", "--disable=")):
+            value = arg.split("=")[1]
+        elif arg in {"--enable", "--disable"}:
+            value = config_args[i + 1]
+        else:
+            continue
+
+        if "all" not in (msg.strip() for msg in value.split(",")):
+            continue
+
+        arg = arg.split("=")[0]
+        if all_action and (arg != all_action):
+            raise ArgumentPreprocessingError(
+                "--enable=all and --disable=all are incompatible."
+            )
+        all_action = arg
+
+        indexes_to_prepend.append(i)
+        if not joined:
+            indexes_to_prepend.append(i + 1)
+
+    returned_args = []
+    for i in indexes_to_prepend:
+        returned_args.append(config_args[i])
+
+    for i, arg in enumerate(config_args):
+        if i in indexes_to_prepend:
+            continue
+        returned_args.append(arg)
+
+    return returned_args
diff --git a/pylint/config/deprecation_actions.py b/pylint/config/deprecation_actions.py
index 442ad474c..85a77cc78 100644
--- a/pylint/config/deprecation_actions.py
+++ b/pylint/config/deprecation_actions.py
@@ -1,5 +1,13 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
+# pylint: disable=too-many-arguments, redefined-builtin
+
 """Deprecated option actions."""
+
 from __future__ import annotations
+
 import argparse
 import warnings
 from collections.abc import Sequence
@@ -9,18 +17,42 @@ from typing import Any
 class _OldNamesAction(argparse._StoreAction):
     """Store action that also sets the value to old names."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', old_names: (list[str] | None)=None) ->None:
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        old_names: list[str] | None = None,
+    ) -> None:
         assert old_names
         self.old_names = old_names
-        super().__init__(option_strings, dest, 1, const, default, type,
-            choices, required, help, metavar)
+        super().__init__(
+            option_strings,
+            dest,
+            1,
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
         assert isinstance(values, list)
         setattr(namespace, self.dest, values[0])
         for old_name in self.old_names:
@@ -30,20 +62,47 @@ class _OldNamesAction(argparse._StoreAction):
 class _NewNamesAction(argparse._StoreAction):
     """Store action that also emits a deprecation warning about a new name."""

-    def __init__(self, option_strings: Sequence[str], dest: str, nargs:
-        None=None, const: None=None, default: None=None, type: None=None,
-        choices: None=None, required: bool=False, help: str='', metavar:
-        str='', new_names: (list[str] | None)=None) ->None:
+    def __init__(
+        self,
+        option_strings: Sequence[str],
+        dest: str,
+        nargs: None = None,
+        const: None = None,
+        default: None = None,
+        type: None = None,
+        choices: None = None,
+        required: bool = False,
+        help: str = "",
+        metavar: str = "",
+        new_names: list[str] | None = None,
+    ) -> None:
         assert new_names
         self.new_names = new_names
-        super().__init__(option_strings, dest, 1, const, default, type,
-            choices, required, help, metavar)
+        super().__init__(
+            option_strings,
+            dest,
+            1,
+            const,
+            default,
+            type,
+            choices,
+            required,
+            help,
+            metavar,
+        )

-    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse
-        .Namespace, values: (str | Sequence[Any] | None), option_string: (
-        str | None)=None) ->None:
+    def __call__(
+        self,
+        parser: argparse.ArgumentParser,
+        namespace: argparse.Namespace,
+        values: str | Sequence[Any] | None,
+        option_string: str | None = None,
+    ) -> None:
         assert isinstance(values, list)
         setattr(namespace, self.dest, values[0])
         warnings.warn(
-            f"{self.option_strings[0]} has been deprecated. Please look into using any of the following options: {', '.join(self.new_names)}."
-            , DeprecationWarning, stacklevel=2)
+            f"{self.option_strings[0]} has been deprecated. Please look into "
+            f"using any of the following options: {', '.join(self.new_names)}.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
diff --git a/pylint/config/exceptions.py b/pylint/config/exceptions.py
index 9b05b9670..982e3f494 100644
--- a/pylint/config/exceptions.py
+++ b/pylint/config/exceptions.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations


@@ -12,7 +16,7 @@ class _UnrecognizedOptionError(Exception):
     unknown.
     """

-    def __init__(self, options: list[str], *args: object) ->None:
+    def __init__(self, options: list[str], *args: object) -> None:
         self.options = options
         super().__init__(*args)

diff --git a/pylint/config/find_default_config_files.py b/pylint/config/find_default_config_files.py
index feee8e083..346393cf9 100644
--- a/pylint/config/find_default_config_files.py
+++ b/pylint/config/find_default_config_files.py
@@ -1,45 +1,144 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import configparser
 import os
 import sys
 from collections.abc import Iterator
 from pathlib import Path
+
 if sys.version_info >= (3, 11):
     import tomllib
 else:
     import tomli as tomllib
-RC_NAMES = Path('pylintrc'), Path('pylintrc.toml'), Path('.pylintrc'), Path(
-    '.pylintrc.toml')
-PYPROJECT_NAME = Path('pyproject.toml')
-CONFIG_NAMES = *RC_NAMES, PYPROJECT_NAME, Path('setup.cfg')
+
+RC_NAMES = (
+    Path("pylintrc"),
+    Path("pylintrc.toml"),
+    Path(".pylintrc"),
+    Path(".pylintrc.toml"),
+)
+PYPROJECT_NAME = Path("pyproject.toml")
+CONFIG_NAMES = (*RC_NAMES, PYPROJECT_NAME, Path("setup.cfg"))


-def _find_pyproject() ->Path:
+def _find_pyproject() -> Path:
     """Search for file pyproject.toml in the parent directories recursively.

     It resolves symlinks, so if there is any symlink up in the tree, it does not respect them
     """
-    pass
+    current_dir = Path.cwd().resolve()
+    is_root = False
+    while not is_root:
+        if (current_dir / PYPROJECT_NAME).is_file():
+            return current_dir / PYPROJECT_NAME
+        is_root = (
+            current_dir == current_dir.parent
+            or (current_dir / ".git").is_dir()
+            or (current_dir / ".hg").is_dir()
+        )
+        current_dir = current_dir.parent
+
+    return current_dir
+
+
+def _toml_has_config(path: Path | str) -> bool:
+    with open(path, mode="rb") as toml_handle:
+        try:
+            content = tomllib.load(toml_handle)
+        except tomllib.TOMLDecodeError as error:
+            print(f"Failed to load '{path}': {error}")
+            return False
+    return "pylint" in content.get("tool", [])
+

+def _cfg_has_config(path: Path | str) -> bool:
+    parser = configparser.ConfigParser()
+    try:
+        parser.read(path, encoding="utf-8")
+    except configparser.Error:
+        return False
+    return any(section.startswith("pylint.") for section in parser.sections())

-def _yield_default_files() ->Iterator[Path]:
+
+def _yield_default_files() -> Iterator[Path]:
     """Iterate over the default config file names and see if they exist."""
-    pass
+    for config_name in CONFIG_NAMES:
+        try:
+            if config_name.is_file():
+                if config_name.suffix == ".toml" and not _toml_has_config(config_name):
+                    continue
+                if config_name.suffix == ".cfg" and not _cfg_has_config(config_name):
+                    continue
+
+                yield config_name.resolve()
+        except OSError:
+            pass


-def _find_project_config() ->Iterator[Path]:
+def _find_project_config() -> Iterator[Path]:
     """Traverse up the directory tree to find a config file.

     Stop if no '__init__' is found and thus we are no longer in a package.
     """
-    pass
+    if Path("__init__.py").is_file():
+        curdir = Path(os.getcwd()).resolve()
+        while (curdir / "__init__.py").is_file():
+            curdir = curdir.parent
+            for rc_name in RC_NAMES:
+                rc_path = curdir / rc_name
+                if rc_path.is_file():
+                    yield rc_path.resolve()


-def _find_config_in_home_or_environment() ->Iterator[Path]:
+def _find_config_in_home_or_environment() -> Iterator[Path]:
     """Find a config file in the specified environment var or the home directory."""
-    pass
+    if "PYLINTRC" in os.environ and Path(os.environ["PYLINTRC"]).exists():
+        if Path(os.environ["PYLINTRC"]).is_file():
+            yield Path(os.environ["PYLINTRC"]).resolve()
+    else:
+        try:
+            user_home = Path.home()
+        except RuntimeError:
+            # If the home directory does not exist a RuntimeError will be raised
+            user_home = None
+
+        if user_home is not None and str(user_home) not in ("~", "/root"):
+            home_rc = user_home / ".pylintrc"
+            if home_rc.is_file():
+                yield home_rc.resolve()
+
+            home_rc = user_home / ".config" / "pylintrc"
+            if home_rc.is_file():
+                yield home_rc.resolve()


-def find_default_config_files() ->Iterator[Path]:
+def find_default_config_files() -> Iterator[Path]:
     """Find all possible config files."""
-    pass
+    yield from _yield_default_files()
+
+    try:
+        yield from _find_project_config()
+    except OSError:
+        pass
+
+    try:
+        parent_pyproject = _find_pyproject()
+        if parent_pyproject.is_file() and _toml_has_config(parent_pyproject):
+            yield parent_pyproject.resolve()
+    except OSError:
+        pass
+
+    try:
+        yield from _find_config_in_home_or_environment()
+    except OSError:
+        pass
+
+    try:
+        if os.path.isfile("/etc/pylintrc"):
+            yield Path("/etc/pylintrc").resolve()
+    except OSError:
+        pass
diff --git a/pylint/config/help_formatter.py b/pylint/config/help_formatter.py
index 0bcc08da4..78d43d178 100644
--- a/pylint/config/help_formatter.py
+++ b/pylint/config/help_formatter.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import argparse
+
 from pylint.config.callback_actions import _CallbackAction
 from pylint.constants import DEFAULT_PYLINT_HOME

@@ -7,6 +13,52 @@ from pylint.constants import DEFAULT_PYLINT_HOME
 class _HelpFormatter(argparse.RawDescriptionHelpFormatter):
     """Formatter for the help message emitted by argparse."""

-    def _get_help_string(self, action: argparse.Action) ->(str | None):
+    def _get_help_string(self, action: argparse.Action) -> str | None:
         """Copied from argparse.ArgumentDefaultsHelpFormatter."""
-        pass
+        assert action.help
+        help_string = action.help
+
+        # CallbackActions don't have a default
+        if isinstance(action, _CallbackAction):
+            return help_string
+
+        if "%(default)" not in help_string:
+            if action.default is not argparse.SUPPRESS:
+                defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
+                if action.option_strings or action.nargs in defaulting_nargs:
+                    help_string += " (default: %(default)s)"
+        return help_string
+
+    @staticmethod
+    def get_long_description() -> str:
+        return f"""
+Environment variables:
+    The following environment variables are used:
+        * PYLINTHOME    Path to the directory where persistent data for the run will
+                        be stored. If not found, it defaults to '{DEFAULT_PYLINT_HOME}'.
+        * PYLINTRC      Path to the configuration file. See the documentation for the method used
+                        to search for configuration file.
+
+Output:
+    Using the default text output, the message format is :
+
+        MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE
+
+    There are 5 kind of message types :
+        * (I) info,         for informational messages
+        * (C) convention,   for programming standard violation
+        * (R) refactor,     for bad code smell
+        * (W) warning,      for python specific problems
+        * (E) error,        for probable bugs in the code
+        * (F) fatal,        if an error occurred which prevented pylint from doing further processing.
+
+Output status code:
+    Pylint should leave with following bitwise status codes:
+        * 0 if everything went fine
+        * 1 if a fatal message was issued
+        * 2 if an error message was issued
+        * 4 if a warning message was issued
+        * 8 if a refactor message was issued
+        * 16 if a convention message was issued
+        * 32 on usage error
+"""
diff --git a/pylint/config/utils.py b/pylint/config/utils.py
index 3da9282b3..91e4ff86f 100644
--- a/pylint/config/utils.py
+++ b/pylint/config/utils.py
@@ -1,65 +1,259 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Utils for arguments/options parsing and handling."""
+
 from __future__ import annotations
+
 import re
 from collections.abc import Callable, Sequence
 from pathlib import Path
 from typing import TYPE_CHECKING, Any
+
 from pylint import extensions, utils
-from pylint.config.argument import _CallableArgument, _ExtendArgument, _StoreArgument, _StoreNewNamesArgument, _StoreOldNamesArgument, _StoreTrueArgument
+from pylint.config.argument import (
+    _CallableArgument,
+    _ExtendArgument,
+    _StoreArgument,
+    _StoreNewNamesArgument,
+    _StoreOldNamesArgument,
+    _StoreTrueArgument,
+)
 from pylint.config.callback_actions import _CallbackAction
 from pylint.config.exceptions import ArgumentPreprocessingError
+
 if TYPE_CHECKING:
     from pylint.lint.run import Run


-def _convert_option_to_argument(opt: str, optdict: dict[str, Any]) ->(
-    _StoreArgument | _StoreTrueArgument | _CallableArgument |
-    _StoreOldNamesArgument | _StoreNewNamesArgument | _ExtendArgument):
+def _convert_option_to_argument(
+    opt: str, optdict: dict[str, Any]
+) -> (
+    _StoreArgument
+    | _StoreTrueArgument
+    | _CallableArgument
+    | _StoreOldNamesArgument
+    | _StoreNewNamesArgument
+    | _ExtendArgument
+):
     """Convert an optdict to an Argument class instance."""
-    pass
+    # Get the long and short flags
+    flags = [f"--{opt}"]
+    if "short" in optdict:
+        flags += [f"-{optdict['short']}"]

+    # Get the action type
+    action = optdict.get("action", "store")

-def _parse_rich_type_value(value: Any) ->str:
+    if action == "store_true":
+        return _StoreTrueArgument(
+            flags=flags,
+            action=action,
+            default=optdict.get("default", True),
+            arg_help=optdict.get("help", ""),
+            hide_help=optdict.get("hide", False),
+            section=optdict.get("group", None),
+        )
+    if not isinstance(action, str) and issubclass(action, _CallbackAction):
+        return _CallableArgument(
+            flags=flags,
+            action=action,
+            arg_help=optdict.get("help", ""),
+            kwargs=optdict.get("kwargs", {}),
+            hide_help=optdict.get("hide", False),
+            section=optdict.get("group", None),
+            metavar=optdict.get("metavar", None),
+        )
+
+    default = optdict["default"]
+
+    if action == "extend":
+        return _ExtendArgument(
+            flags=flags,
+            action=action,
+            default=[] if default is None else default,
+            arg_type=optdict["type"],
+            choices=optdict.get("choices", None),
+            arg_help=optdict.get("help", ""),
+            metavar=optdict.get("metavar", ""),
+            hide_help=optdict.get("hide", False),
+            section=optdict.get("group", None),
+            dest=optdict.get("dest", None),
+        )
+    if "kwargs" in optdict:
+        if "old_names" in optdict["kwargs"]:
+            return _StoreOldNamesArgument(
+                flags=flags,
+                default=default,
+                arg_type=optdict["type"],
+                choices=optdict.get("choices", None),
+                arg_help=optdict.get("help", ""),
+                metavar=optdict.get("metavar", ""),
+                hide_help=optdict.get("hide", False),
+                kwargs=optdict.get("kwargs", {}),
+                section=optdict.get("group", None),
+            )
+        if "new_names" in optdict["kwargs"]:
+            return _StoreNewNamesArgument(
+                flags=flags,
+                default=default,
+                arg_type=optdict["type"],
+                choices=optdict.get("choices", None),
+                arg_help=optdict.get("help", ""),
+                metavar=optdict.get("metavar", ""),
+                hide_help=optdict.get("hide", False),
+                kwargs=optdict.get("kwargs", {}),
+                section=optdict.get("group", None),
+            )
+    if "dest" in optdict:
+        return _StoreOldNamesArgument(
+            flags=flags,
+            default=default,
+            arg_type=optdict["type"],
+            choices=optdict.get("choices", None),
+            arg_help=optdict.get("help", ""),
+            metavar=optdict.get("metavar", ""),
+            hide_help=optdict.get("hide", False),
+            kwargs={"old_names": [optdict["dest"]]},
+            section=optdict.get("group", None),
+        )
+    return _StoreArgument(
+        flags=flags,
+        action=action,
+        default=default,
+        arg_type=optdict["type"],
+        choices=optdict.get("choices", None),
+        arg_help=optdict.get("help", ""),
+        metavar=optdict.get("metavar", ""),
+        hide_help=optdict.get("hide", False),
+        section=optdict.get("group", None),
+    )
+
+
+def _parse_rich_type_value(value: Any) -> str:
     """Parse rich (toml) types into strings."""
-    pass
+    if isinstance(value, (list, tuple)):
+        return ",".join(_parse_rich_type_value(i) for i in value)
+    if isinstance(value, re.Pattern):
+        return str(value.pattern)
+    if isinstance(value, dict):
+        return ",".join(f"{k}:{v}" for k, v in value.items())
+    return str(value)


-def _init_hook(run: Run, value: (str | None)) ->None:
+# pylint: disable-next=unused-argument
+def _init_hook(run: Run, value: str | None) -> None:
     """Execute arbitrary code from the init_hook.

     This can be used to set the 'sys.path' for example.
     """
-    pass
+    assert value is not None
+    exec(value)  # pylint: disable=exec-used


-def _set_rcfile(run: Run, value: (str | None)) ->None:
+def _set_rcfile(run: Run, value: str | None) -> None:
     """Set the rcfile."""
-    pass
+    assert value is not None
+    run._rcfile = value


-def _set_output(run: Run, value: (str | None)) ->None:
+def _set_output(run: Run, value: str | None) -> None:
     """Set the output."""
-    pass
+    assert value is not None
+    run._output = value


-def _add_plugins(run: Run, value: (str | None)) ->None:
+def _add_plugins(run: Run, value: str | None) -> None:
     """Add plugins to the list of loadable plugins."""
-    pass
+    assert value is not None
+    run._plugins.extend(utils._splitstrip(value))
+

+def _set_verbose_mode(run: Run, value: str | None) -> None:
+    assert value is None
+    run.verbose = True

-def _enable_all_extensions(run: Run, value: (str | None)) ->None:
+
+def _enable_all_extensions(run: Run, value: str | None) -> None:
     """Enable all extensions."""
-    pass
+    assert value is None
+    for filename in Path(extensions.__file__).parent.iterdir():
+        if filename.suffix == ".py" and not filename.stem.startswith("_"):
+            extension_name = f"pylint.extensions.{filename.stem}"
+            if extension_name not in run._plugins:
+                run._plugins.append(extension_name)


-PREPROCESSABLE_OPTIONS: dict[str, tuple[bool, Callable[[Run, str | None],
-    None], int]] = {'--init-hook': (True, _init_hook, 8), '--rcfile': (True,
-    _set_rcfile, 4), '--output': (True, _set_output, 0), '--load-plugins':
-    (True, _add_plugins, 5), '--verbose': (False, _set_verbose_mode, 4),
-    '-v': (False, _set_verbose_mode, 2), '--enable-all-extensions': (False,
-    _enable_all_extensions, 9)}
+PREPROCESSABLE_OPTIONS: dict[
+    str, tuple[bool, Callable[[Run, str | None], None], int]
+] = {  # pylint: disable=consider-using-namedtuple-or-dataclass
+    # pylint: disable=useless-suppression, wrong-spelling-in-comment
+    # Argparse by default allows abbreviations. It behaves differently
+    # if you turn this off, so we also turn it on. We mimic this
+    # by allowing some abbreviations or incorrect spelling here.
+    # The integer at the end of the tuple indicates how many letters
+    # should match, include the '-'. 0 indicates a full match.
+    #
+    # Clashes with --init-(import)
+    "--init-hook": (True, _init_hook, 8),
+    # Clashes with --r(ecursive)
+    "--rcfile": (True, _set_rcfile, 4),
+    # Clashes with --output(-format)
+    "--output": (True, _set_output, 0),
+    # Clashes with --lo(ng-help)
+    "--load-plugins": (True, _add_plugins, 5),
+    # Clashes with --v(ariable-rgx)
+    "--verbose": (False, _set_verbose_mode, 4),
+    "-v": (False, _set_verbose_mode, 2),
+    # Clashes with --enable
+    "--enable-all-extensions": (False, _enable_all_extensions, 9),
+}
+# pylint: enable=wrong-spelling-in-comment


-def _preprocess_options(run: Run, args: Sequence[str]) ->list[str]:
+def _preprocess_options(run: Run, args: Sequence[str]) -> list[str]:
     """Pre-process options before full config parsing has started."""
-    pass
+    processed_args: list[str] = []
+
+    i = 0
+    while i < len(args):
+        argument = args[i]
+        if not argument.startswith("-"):
+            processed_args.append(argument)
+            i += 1
+            continue
+
+        try:
+            option, value = argument.split("=", 1)
+        except ValueError:
+            option, value = argument, None
+
+        matched_option = None
+        for option_name, data in PREPROCESSABLE_OPTIONS.items():
+            to_match = data[2]
+            if to_match == 0:
+                if option == option_name:
+                    matched_option = option_name
+            elif option.startswith(option_name[:to_match]):
+                matched_option = option_name
+
+        if matched_option is None:
+            processed_args.append(argument)
+            i += 1
+            continue
+
+        takearg, cb, _ = PREPROCESSABLE_OPTIONS[matched_option]
+
+        if takearg and value is None:
+            i += 1
+            if i >= len(args) or args[i].startswith("-"):
+                raise ArgumentPreprocessingError(f"Option {option} expects a value")
+            value = args[i]
+        elif not takearg and value is not None:
+            raise ArgumentPreprocessingError(f"Option {option} doesn't expect a value")
+
+        cb(run, value)
+        i += 1
+
+    return processed_args
diff --git a/pylint/constants.py b/pylint/constants.py
index 915082479..f147e5189 100644
--- a/pylint/constants.py
+++ b/pylint/constants.py
@@ -1,130 +1,279 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 import platform
 import sys
+
 import astroid
 import platformdirs
+
 from pylint.__pkginfo__ import __version__
 from pylint.typing import MessageTypesFullName
+
 PY38_PLUS = sys.version_info[:2] >= (3, 8)
 PY39_PLUS = sys.version_info[:2] >= (3, 9)
 PY310_PLUS = sys.version_info[:2] >= (3, 10)
 PY311_PLUS = sys.version_info[:2] >= (3, 11)
 PY312_PLUS = sys.version_info[:2] >= (3, 12)
-IS_PYPY = platform.python_implementation() == 'PyPy'
-PY_EXTS = '.py', '.pyc', '.pyo', '.pyw', '.so', '.dll'
+
+IS_PYPY = platform.python_implementation() == "PyPy"
+
+PY_EXTS = (".py", ".pyc", ".pyo", ".pyw", ".so", ".dll")
+
 MSG_STATE_CONFIDENCE = 2
-_MSG_ORDER = 'EWRCIF'
+_MSG_ORDER = "EWRCIF"
 MSG_STATE_SCOPE_CONFIG = 0
 MSG_STATE_SCOPE_MODULE = 1
-_SCOPE_EXEMPT = 'FR'
-MSG_TYPES: dict[str, MessageTypesFullName] = {'I': 'info', 'C':
-    'convention', 'R': 'refactor', 'W': 'warning', 'E': 'error', 'F': 'fatal'}
+
+# The line/node distinction does not apply to fatal errors and reports.
+_SCOPE_EXEMPT = "FR"
+
+MSG_TYPES: dict[str, MessageTypesFullName] = {
+    "I": "info",
+    "C": "convention",
+    "R": "refactor",
+    "W": "warning",
+    "E": "error",
+    "F": "fatal",
+}
 MSG_TYPES_LONG: dict[str, str] = {v: k for k, v in MSG_TYPES.items()}
-MSG_TYPES_STATUS = {'I': 0, 'C': 16, 'R': 8, 'W': 4, 'E': 2, 'F': 1}
-MAIN_CHECKER_NAME = 'main'
-DEFAULT_PYLINT_HOME = platformdirs.user_cache_dir('pylint')
-DEFAULT_IGNORE_LIST = 'CVS',
+
+MSG_TYPES_STATUS = {"I": 0, "C": 16, "R": 8, "W": 4, "E": 2, "F": 1}
+
+# You probably don't want to change the MAIN_CHECKER_NAME
+# This would affect rcfile generation and retro-compatibility
+# on all project using [MAIN] in their rcfile.
+MAIN_CHECKER_NAME = "main"
+
+DEFAULT_PYLINT_HOME = platformdirs.user_cache_dir("pylint")
+
+DEFAULT_IGNORE_LIST = ("CVS",)


 class WarningScope:
-    LINE = 'line-based-msg'
-    NODE = 'node-based-msg'
+    LINE = "line-based-msg"
+    NODE = "node-based-msg"


 full_version = f"""pylint {__version__}
 astroid {astroid.__version__}
 Python {sys.version}"""
-HUMAN_READABLE_TYPES = {'file': 'file', 'module': 'module', 'const':
-    'constant', 'class': 'class', 'function': 'function', 'method':
-    'method', 'attr': 'attribute', 'argument': 'argument', 'variable':
-    'variable', 'class_attribute': 'class attribute', 'class_const':
-    'class constant', 'inlinevar': 'inline iteration', 'typevar':
-    'type variable', 'typealias': 'type alias'}
-INCOMPATIBLE_WITH_USELESS_SUPPRESSION = frozenset(['R0401', 'W0402',
-    'W1505', 'W1511', 'W1512', 'W1513', 'R0801'])

+HUMAN_READABLE_TYPES = {
+    "file": "file",
+    "module": "module",
+    "const": "constant",
+    "class": "class",
+    "function": "function",
+    "method": "method",
+    "attr": "attribute",
+    "argument": "argument",
+    "variable": "variable",
+    "class_attribute": "class attribute",
+    "class_const": "class constant",
+    "inlinevar": "inline iteration",
+    "typevar": "type variable",
+    "typealias": "type alias",
+}

-def _get_pylint_home() ->str:
+# ignore some messages when emitting useless-suppression:
+# - cyclic-import: can show false positives due to incomplete context
+# - deprecated-{module, argument, class, method, decorator}:
+#   can cause false positives for multi-interpreter projects
+#   when linting with an interpreter on a lower python version
+INCOMPATIBLE_WITH_USELESS_SUPPRESSION = frozenset(
+    [
+        "R0401",  # cyclic-import
+        "W0402",  # deprecated-module
+        "W1505",  # deprecated-method
+        "W1511",  # deprecated-argument
+        "W1512",  # deprecated-class
+        "W1513",  # deprecated-decorator
+        "R0801",  # duplicate-code
+    ]
+)
+
+
+def _get_pylint_home() -> str:
     """Return the pylint home."""
-    pass
+    if "PYLINTHOME" in os.environ:
+        return os.environ["PYLINTHOME"]
+    return DEFAULT_PYLINT_HOME


 PYLINT_HOME = _get_pylint_home()
-TYPING_NORETURN = frozenset(('typing.NoReturn', 'typing_extensions.NoReturn'))
-TYPING_NEVER = frozenset(('typing.Never', 'typing_extensions.Never'))
-DUNDER_METHODS: dict[tuple[int, int], dict[str, str]] = {(0, 0): {
-    '__init__': 'Instantiate class directly', '__del__': 'Use del keyword',
-    '__repr__': 'Use repr built-in function', '__str__':
-    'Use str built-in function', '__bytes__': 'Use bytes built-in function',
-    '__format__':
-    'Use format built-in function, format string method, or f-string',
-    '__lt__': 'Use < operator', '__le__': 'Use <= operator', '__eq__':
-    'Use == operator', '__ne__': 'Use != operator', '__gt__':
-    'Use > operator', '__ge__': 'Use >= operator', '__hash__':
-    'Use hash built-in function', '__bool__': 'Use bool built-in function',
-    '__getattr__':
-    'Access attribute directly or use getattr built-in function',
-    '__getattribute__':
-    'Access attribute directly or use getattr built-in function',
-    '__setattr__':
-    'Set attribute directly or use setattr built-in function',
-    '__delattr__': 'Use del keyword', '__dir__':
-    'Use dir built-in function', '__get__': 'Use get method', '__set__':
-    'Use set method', '__delete__': 'Use del keyword', '__instancecheck__':
-    'Use isinstance built-in function', '__subclasscheck__':
-    'Use issubclass built-in function', '__call__':
-    'Invoke instance directly', '__len__': 'Use len built-in function',
-    '__length_hint__': 'Use length_hint method', '__getitem__':
-    'Access item via subscript', '__setitem__': 'Set item via subscript',
-    '__delitem__': 'Use del keyword', '__iter__':
-    'Use iter built-in function', '__next__': 'Use next built-in function',
-    '__reversed__': 'Use reversed built-in function', '__contains__':
-    'Use in keyword', '__add__': 'Use + operator', '__sub__':
-    'Use - operator', '__mul__': 'Use * operator', '__matmul__':
-    'Use @ operator', '__truediv__': 'Use / operator', '__floordiv__':
-    'Use // operator', '__mod__': 'Use % operator', '__divmod__':
-    'Use divmod built-in function', '__pow__':
-    'Use ** operator or pow built-in function', '__lshift__':
-    'Use << operator', '__rshift__': 'Use >> operator', '__and__':
-    'Use & operator', '__xor__': 'Use ^ operator', '__or__':
-    'Use | operator', '__radd__': 'Use + operator', '__rsub__':
-    'Use - operator', '__rmul__': 'Use * operator', '__rmatmul__':
-    'Use @ operator', '__rtruediv__': 'Use / operator', '__rfloordiv__':
-    'Use // operator', '__rmod__': 'Use % operator', '__rdivmod__':
-    'Use divmod built-in function', '__rpow__':
-    'Use ** operator or pow built-in function', '__rlshift__':
-    'Use << operator', '__rrshift__': 'Use >> operator', '__rand__':
-    'Use & operator', '__rxor__': 'Use ^ operator', '__ror__':
-    'Use | operator', '__iadd__': 'Use += operator', '__isub__':
-    'Use -= operator', '__imul__': 'Use *= operator', '__imatmul__':
-    'Use @= operator', '__itruediv__': 'Use /= operator', '__ifloordiv__':
-    'Use //= operator', '__imod__': 'Use %= operator', '__ipow__':
-    'Use **= operator', '__ilshift__': 'Use <<= operator', '__irshift__':
-    'Use >>= operator', '__iand__': 'Use &= operator', '__ixor__':
-    'Use ^= operator', '__ior__': 'Use |= operator', '__neg__':
-    'Multiply by -1 instead', '__pos__': 'Multiply by +1 instead',
-    '__abs__': 'Use abs built-in function', '__invert__': 'Use ~ operator',
-    '__complex__': 'Use complex built-in function', '__int__':
-    'Use int built-in function', '__float__': 'Use float built-in function',
-    '__round__': 'Use round built-in function', '__trunc__':
-    'Use math.trunc function', '__floor__': 'Use math.floor function',
-    '__ceil__': 'Use math.ceil function', '__enter__':
-    'Invoke context manager directly', '__aenter__':
-    'Invoke context manager directly', '__copy__': 'Use copy.copy function',
-    '__deepcopy__': 'Use copy.deepcopy function', '__fspath__':
-    'Use os.fspath function instead'}, (3, 10): {'__aiter__':
-    'Use aiter built-in function', '__anext__': 'Use anext built-in function'}}
-EXTRA_DUNDER_METHODS = ['__new__', '__subclasses__', '__init_subclass__',
-    '__set_name__', '__class_getitem__', '__missing__', '__exit__',
-    '__await__', '__aexit__', '__getnewargs_ex__', '__getnewargs__',
-    '__getstate__', '__index__', '__setstate__', '__reduce__',
-    '__reduce_ex__', '__post_init__']
-DUNDER_PROPERTIES = ['__class__', '__dict__', '__doc__', '__format__',
-    '__module__', '__sizeof__', '__subclasshook__', '__weakref__']
-UNNECESSARY_DUNDER_CALL_LAMBDA_EXCEPTIONS = ['__init__', '__del__',
-    '__delattr__', '__set__', '__delete__', '__setitem__', '__delitem__',
-    '__iadd__', '__isub__', '__imul__', '__imatmul__', '__itruediv__',
-    '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__', '__irshift__',
-    '__iand__', '__ixor__', '__ior__']
+
+TYPING_NORETURN = frozenset(
+    (
+        "typing.NoReturn",
+        "typing_extensions.NoReturn",
+    )
+)
+TYPING_NEVER = frozenset(
+    (
+        "typing.Never",
+        "typing_extensions.Never",
+    )
+)
+
+DUNDER_METHODS: dict[tuple[int, int], dict[str, str]] = {
+    (0, 0): {
+        "__init__": "Instantiate class directly",
+        "__del__": "Use del keyword",
+        "__repr__": "Use repr built-in function",
+        "__str__": "Use str built-in function",
+        "__bytes__": "Use bytes built-in function",
+        "__format__": "Use format built-in function, format string method, or f-string",
+        "__lt__": "Use < operator",
+        "__le__": "Use <= operator",
+        "__eq__": "Use == operator",
+        "__ne__": "Use != operator",
+        "__gt__": "Use > operator",
+        "__ge__": "Use >= operator",
+        "__hash__": "Use hash built-in function",
+        "__bool__": "Use bool built-in function",
+        "__getattr__": "Access attribute directly or use getattr built-in function",
+        "__getattribute__": "Access attribute directly or use getattr built-in function",
+        "__setattr__": "Set attribute directly or use setattr built-in function",
+        "__delattr__": "Use del keyword",
+        "__dir__": "Use dir built-in function",
+        "__get__": "Use get method",
+        "__set__": "Use set method",
+        "__delete__": "Use del keyword",
+        "__instancecheck__": "Use isinstance built-in function",
+        "__subclasscheck__": "Use issubclass built-in function",
+        "__call__": "Invoke instance directly",
+        "__len__": "Use len built-in function",
+        "__length_hint__": "Use length_hint method",
+        "__getitem__": "Access item via subscript",
+        "__setitem__": "Set item via subscript",
+        "__delitem__": "Use del keyword",
+        "__iter__": "Use iter built-in function",
+        "__next__": "Use next built-in function",
+        "__reversed__": "Use reversed built-in function",
+        "__contains__": "Use in keyword",
+        "__add__": "Use + operator",
+        "__sub__": "Use - operator",
+        "__mul__": "Use * operator",
+        "__matmul__": "Use @ operator",
+        "__truediv__": "Use / operator",
+        "__floordiv__": "Use // operator",
+        "__mod__": "Use % operator",
+        "__divmod__": "Use divmod built-in function",
+        "__pow__": "Use ** operator or pow built-in function",
+        "__lshift__": "Use << operator",
+        "__rshift__": "Use >> operator",
+        "__and__": "Use & operator",
+        "__xor__": "Use ^ operator",
+        "__or__": "Use | operator",
+        "__radd__": "Use + operator",
+        "__rsub__": "Use - operator",
+        "__rmul__": "Use * operator",
+        "__rmatmul__": "Use @ operator",
+        "__rtruediv__": "Use / operator",
+        "__rfloordiv__": "Use // operator",
+        "__rmod__": "Use % operator",
+        "__rdivmod__": "Use divmod built-in function",
+        "__rpow__": "Use ** operator or pow built-in function",
+        "__rlshift__": "Use << operator",
+        "__rrshift__": "Use >> operator",
+        "__rand__": "Use & operator",
+        "__rxor__": "Use ^ operator",
+        "__ror__": "Use | operator",
+        "__iadd__": "Use += operator",
+        "__isub__": "Use -= operator",
+        "__imul__": "Use *= operator",
+        "__imatmul__": "Use @= operator",
+        "__itruediv__": "Use /= operator",
+        "__ifloordiv__": "Use //= operator",
+        "__imod__": "Use %= operator",
+        "__ipow__": "Use **= operator",
+        "__ilshift__": "Use <<= operator",
+        "__irshift__": "Use >>= operator",
+        "__iand__": "Use &= operator",
+        "__ixor__": "Use ^= operator",
+        "__ior__": "Use |= operator",
+        "__neg__": "Multiply by -1 instead",
+        "__pos__": "Multiply by +1 instead",
+        "__abs__": "Use abs built-in function",
+        "__invert__": "Use ~ operator",
+        "__complex__": "Use complex built-in function",
+        "__int__": "Use int built-in function",
+        "__float__": "Use float built-in function",
+        "__round__": "Use round built-in function",
+        "__trunc__": "Use math.trunc function",
+        "__floor__": "Use math.floor function",
+        "__ceil__": "Use math.ceil function",
+        "__enter__": "Invoke context manager directly",
+        "__aenter__": "Invoke context manager directly",
+        "__copy__": "Use copy.copy function",
+        "__deepcopy__": "Use copy.deepcopy function",
+        "__fspath__": "Use os.fspath function instead",
+    },
+    (3, 10): {
+        "__aiter__": "Use aiter built-in function",
+        "__anext__": "Use anext built-in function",
+    },
+}
+
+EXTRA_DUNDER_METHODS = [
+    "__new__",
+    "__subclasses__",
+    "__init_subclass__",
+    "__set_name__",
+    "__class_getitem__",
+    "__missing__",
+    "__exit__",
+    "__await__",
+    "__aexit__",
+    "__getnewargs_ex__",
+    "__getnewargs__",
+    "__getstate__",
+    "__index__",
+    "__setstate__",
+    "__reduce__",
+    "__reduce_ex__",
+    "__post_init__",  # part of `dataclasses` module
+]
+
+DUNDER_PROPERTIES = [
+    "__class__",
+    "__dict__",
+    "__doc__",
+    "__format__",
+    "__module__",
+    "__sizeof__",
+    "__subclasshook__",
+    "__weakref__",
+]
+
+# C2801 rule exceptions as their corresponding function/method/operator
+# is not valid python syntax in a lambda definition
+UNNECESSARY_DUNDER_CALL_LAMBDA_EXCEPTIONS = [
+    "__init__",
+    "__del__",
+    "__delattr__",
+    "__set__",
+    "__delete__",
+    "__setitem__",
+    "__delitem__",
+    "__iadd__",
+    "__isub__",
+    "__imul__",
+    "__imatmul__",
+    "__itruediv__",
+    "__ifloordiv__",
+    "__imod__",
+    "__ipow__",
+    "__ilshift__",
+    "__irshift__",
+    "__iand__",
+    "__ixor__",
+    "__ior__",
+]
+
 MAX_NUMBER_OF_IMPORT_SHOWN = 6
diff --git a/pylint/exceptions.py b/pylint/exceptions.py
index 2721a6072..2bfbfa8cc 100644
--- a/pylint/exceptions.py
+++ b/pylint/exceptions.py
@@ -1,3 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Exception classes raised by various operations within pylint."""


@@ -17,7 +21,7 @@ class DeletedMessageError(UnknownMessageError):
     def __init__(self, msgid_or_symbol: str, removal_explanation: str):
         super().__init__(
             f"'{msgid_or_symbol}' was removed from pylint, see {removal_explanation}."
-            )
+        )


 class MessageBecameExtensionError(UnknownMessageError):
@@ -28,7 +32,7 @@ class MessageBecameExtensionError(UnknownMessageError):
     def __init__(self, msgid_or_symbol: str, moved_explanation: str):
         super().__init__(
             f"'{msgid_or_symbol}' was moved to an optional extension, see {moved_explanation}."
-            )
+        )


 class EmptyReportError(Exception):
diff --git a/pylint/extensions/_check_docs_utils.py b/pylint/extensions/_check_docs_utils.py
index 4e60e6372..9b4b3e0db 100644
--- a/pylint/extensions/_check_docs_utils.py
+++ b/pylint/extensions/_check_docs_utils.py
@@ -1,15 +1,23 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Utility methods for docstring checking."""
+
 from __future__ import annotations
+
 import itertools
 import re
 from collections.abc import Iterable
+
 import astroid
 from astroid import nodes
 from astroid.util import UninferableBase
+
 from pylint.checkers import utils


-def space_indentation(s: str) ->int:
+def space_indentation(s: str) -> int:
     """The number of leading spaces in a string.

     :param str s: input string
@@ -17,10 +25,10 @@ def space_indentation(s: str) ->int:
     :rtype: int
     :return: number of leading spaces
     """
-    pass
+    return len(s) - len(s.lstrip(" "))


-def get_setters_property_name(node: nodes.FunctionDef) ->(str | None):
+def get_setters_property_name(node: nodes.FunctionDef) -> str | None:
     """Get the name of the property that the given node is a setter for.

     :param node: The node to get the property name for.
@@ -30,10 +38,18 @@ def get_setters_property_name(node: nodes.FunctionDef) ->(str | None):
     :returns: The name of the property that the node is a setter for,
         or None if one could not be found.
     """
-    pass
-
-
-def get_setters_property(node: nodes.FunctionDef) ->(nodes.FunctionDef | None):
+    decorators = node.decorators.nodes if node.decorators else []
+    for decorator in decorators:
+        if (
+            isinstance(decorator, nodes.Attribute)
+            and decorator.attrname == "setter"
+            and isinstance(decorator.expr, nodes.Name)
+        ):
+            return decorator.expr.name  # type: ignore[no-any-return]
+    return None
+
+
+def get_setters_property(node: nodes.FunctionDef) -> nodes.FunctionDef | None:
     """Get the property node for the given setter node.

     :param node: The node to get the property for.
@@ -43,10 +59,21 @@ def get_setters_property(node: nodes.FunctionDef) ->(nodes.FunctionDef | None):
     :returns: The node relating to the property of the given setter node,
         or None if one could not be found.
     """
-    pass
+    property_ = None
+
+    property_name = get_setters_property_name(node)
+    class_node = utils.node_frame_class(node)
+    if property_name and class_node:
+        class_attrs: list[nodes.FunctionDef] = class_node.getattr(node.name)
+        for attr in class_attrs:
+            if utils.decorated_with_property(attr):
+                property_ = attr
+                break
+
+    return property_


-def returns_something(return_node: nodes.Return) ->bool:
+def returns_something(return_node: nodes.Return) -> bool:
     """Check if a return node returns a value other than None.

     :param return_node: The return node to check.
@@ -56,10 +83,28 @@ def returns_something(return_node: nodes.Return) ->bool:
     :return: True if the return node returns a value other than None,
         False otherwise.
     """
-    pass
+    returns = return_node.value

+    if returns is None:
+        return False

-def possible_exc_types(node: nodes.NodeNG) ->set[nodes.ClassDef]:
+    return not (isinstance(returns, nodes.Const) and returns.value is None)
+
+
+def _get_raise_target(node: nodes.NodeNG) -> nodes.NodeNG | UninferableBase | None:
+    if isinstance(node.exc, nodes.Call):
+        func = node.exc.func
+        if isinstance(func, (nodes.Name, nodes.Attribute)):
+            return utils.safe_infer(func)
+    return None
+
+
+def _split_multiple_exc_types(target: str) -> list[str]:
+    delimiters = r"(\s*,(?:\s*or\s)?\s*|\s+or\s+)"
+    return re.split(delimiters, target)
+
+
+def possible_exc_types(node: nodes.NodeNG) -> set[nodes.ClassDef]:
     """Gets all the possible raised exception types for the given raise node.

     .. note::
@@ -70,10 +115,69 @@ def possible_exc_types(node: nodes.NodeNG) ->set[nodes.ClassDef]:

     :returns: A list of exception types possibly raised by :param:`node`.
     """
-    pass
-
-
-def _annotations_list(args_node: nodes.Arguments) ->list[nodes.NodeNG]:
+    exceptions = []
+    if isinstance(node.exc, nodes.Name):
+        inferred = utils.safe_infer(node.exc)
+        if inferred:
+            exceptions = [inferred]
+    elif node.exc is None:
+        handler = node.parent
+        while handler and not isinstance(handler, nodes.ExceptHandler):
+            handler = handler.parent
+
+        if handler and handler.type:
+            try:
+                for exception in astroid.unpack_infer(handler.type):
+                    if not isinstance(exception, UninferableBase):
+                        exceptions.append(exception)
+            except astroid.InferenceError:
+                pass
+    else:
+        target = _get_raise_target(node)
+        if isinstance(target, nodes.ClassDef):
+            exceptions = [target]
+        elif isinstance(target, nodes.FunctionDef):
+            for ret in target.nodes_of_class(nodes.Return):
+                if ret.value is None:
+                    continue
+                if ret.frame() != target:
+                    # return from inner function - ignore it
+                    continue
+
+                val = utils.safe_infer(ret.value)
+                if val and utils.inherit_from_std_ex(val):
+                    if isinstance(val, nodes.ClassDef):
+                        exceptions.append(val)
+                    elif isinstance(val, astroid.Instance):
+                        exceptions.append(val.getattr("__class__")[0])
+
+    try:
+        return {
+            exc
+            for exc in exceptions
+            if not utils.node_ignores_exception(node, exc.name)
+        }
+    except astroid.InferenceError:
+        return set()
+
+
+def _is_ellipsis(node: nodes.NodeNG) -> bool:
+    return isinstance(node, nodes.Const) and node.value == Ellipsis
+
+
+def _merge_annotations(
+    annotations: Iterable[nodes.NodeNG], comment_annotations: Iterable[nodes.NodeNG]
+) -> Iterable[nodes.NodeNG | None]:
+    for ann, comment_ann in itertools.zip_longest(annotations, comment_annotations):
+        if ann and not _is_ellipsis(ann):
+            yield ann
+        elif comment_ann and not _is_ellipsis(comment_ann):
+            yield comment_ann
+        else:
+            yield None
+
+
+def _annotations_list(args_node: nodes.Arguments) -> list[nodes.NodeNG]:
     """Get a merged list of annotations.

     The annotations can come from:
@@ -85,103 +189,298 @@ def _annotations_list(args_node: nodes.Arguments) ->list[nodes.NodeNG]:
     :param args_node: The node to get the annotations for.
     :returns: The annotations.
     """
-    pass
+    plain_annotations = args_node.annotations or ()
+    func_comment_annotations = args_node.parent.type_comment_args or ()
+    comment_annotations = args_node.type_comment_posonlyargs
+    comment_annotations += args_node.type_comment_args or []
+    comment_annotations += args_node.type_comment_kwonlyargs
+    return list(
+        _merge_annotations(
+            plain_annotations,
+            _merge_annotations(func_comment_annotations, comment_annotations),
+        )
+    )
+
+
+def args_with_annotation(args_node: nodes.Arguments) -> set[str]:
+    result = set()
+    annotations = _annotations_list(args_node)
+    annotation_offset = 0
+
+    if args_node.posonlyargs:
+        posonlyargs_annotations = args_node.posonlyargs_annotations
+        if not any(args_node.posonlyargs_annotations):
+            num_args = len(args_node.posonlyargs)
+            posonlyargs_annotations = annotations[
+                annotation_offset : annotation_offset + num_args
+            ]
+            annotation_offset += num_args
+
+        for arg, annotation in zip(args_node.posonlyargs, posonlyargs_annotations):
+            if annotation:
+                result.add(arg.name)
+
+    if args_node.args:
+        num_args = len(args_node.args)
+        for arg, annotation in zip(
+            args_node.args,
+            annotations[annotation_offset : annotation_offset + num_args],
+        ):
+            if annotation:
+                result.add(arg.name)
+
+        annotation_offset += num_args
+
+    if args_node.vararg:
+        if args_node.varargannotation:
+            result.add(args_node.vararg)
+        elif len(annotations) > annotation_offset and annotations[annotation_offset]:
+            result.add(args_node.vararg)
+            annotation_offset += 1
+
+    if args_node.kwonlyargs:
+        kwonlyargs_annotations = args_node.kwonlyargs_annotations
+        if not any(args_node.kwonlyargs_annotations):
+            num_args = len(args_node.kwonlyargs)
+            kwonlyargs_annotations = annotations[
+                annotation_offset : annotation_offset + num_args
+            ]
+            annotation_offset += num_args
+
+        for arg, annotation in zip(args_node.kwonlyargs, kwonlyargs_annotations):
+            if annotation:
+                result.add(arg.name)
+
+    if args_node.kwarg:
+        if args_node.kwargannotation:
+            result.add(args_node.kwarg)
+        elif len(annotations) > annotation_offset and annotations[annotation_offset]:
+            result.add(args_node.kwarg)
+            annotation_offset += 1
+
+    return result
+
+
+def docstringify(
+    docstring: nodes.Const | None, default_type: str = "default"
+) -> Docstring:
+    best_match = (0, DOCSTRING_TYPES.get(default_type, Docstring)(docstring))
+    for docstring_type in (
+        SphinxDocstring,
+        EpytextDocstring,
+        GoogleDocstring,
+        NumpyDocstring,
+    ):
+        instance = docstring_type(docstring)
+        matching_sections = instance.matching_sections()
+        if matching_sections > best_match[0]:
+            best_match = (matching_sections, instance)
+
+    return best_match[1]


 class Docstring:
     re_for_parameters_see = re.compile(
-        """
-        For\\s+the\\s+(other)?\\s*parameters\\s*,\\s+see
-        """
-        , re.X | re.S)
+        r"""
+        For\s+the\s+(other)?\s*parameters\s*,\s+see
+        """,
+        re.X | re.S,
+    )
+
     supports_yields: bool = False
     """True if the docstring supports a "yield" section.

     False if the docstring uses the returns section to document generators.
     """

-    def __init__(self, doc: (nodes.Const | None)) ->None:
-        docstring: str = doc.value if doc else ''
+    # These methods are designed to be overridden
+    def __init__(self, doc: nodes.Const | None) -> None:
+        docstring: str = doc.value if doc else ""
         self.doc = docstring.expandtabs()

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return f"<{self.__class__.__name__}:'''{self.doc}'''>"

-    def matching_sections(self) ->int:
+    def matching_sections(self) -> int:
         """Returns the number of matching docstring sections."""
-        pass
+        return 0
+
+    def exceptions(self) -> set[str]:
+        return set()
+
+    def has_params(self) -> bool:
+        return False
+
+    def has_returns(self) -> bool:
+        return False
+
+    def has_rtype(self) -> bool:
+        return False
+
+    def has_property_returns(self) -> bool:
+        return False
+
+    def has_property_type(self) -> bool:
+        return False
+
+    def has_yields(self) -> bool:
+        return False
+
+    def has_yields_type(self) -> bool:
+        return False
+
+    def match_param_docs(self) -> tuple[set[str], set[str]]:
+        return set(), set()
+
+    def params_documented_elsewhere(self) -> bool:
+        return self.re_for_parameters_see.search(self.doc) is not None


 class SphinxDocstring(Docstring):
-    re_type = """
+    re_type = r"""
         [~!.]?               # Optional link style prefix
-        \\w(?:\\w|\\.[^\\.])*    # Valid python name
+        \w(?:\w|\.[^\.])*    # Valid python name
         """
-    re_simple_container_type = f"""
+
+    re_simple_container_type = rf"""
         {re_type}                     # a container type
-        [\\(\\[] [^\\n\\s]+ [\\)\\]]        # with the contents of the container
+        [\(\[] [^\n\s]+ [\)\]]        # with the contents of the container
     """
-    re_multiple_simple_type = f"""
+
+    re_multiple_simple_type = rf"""
         (?:{re_simple_container_type}|{re_type})
-        (?:(?:\\s+(?:of|or)\\s+|\\s*,\\s*|\\s+\\|\\s+)(?:{re_simple_container_type}|{re_type}))*
+        (?:(?:\s+(?:of|or)\s+|\s*,\s*|\s+\|\s+)(?:{re_simple_container_type}|{re_type}))*
     """
-    re_xref = f"""
-        (?::\\w+:)?                    # optional tag
+
+    re_xref = rf"""
+        (?::\w+:)?                    # optional tag
         `{re_type}`                   # what to reference
         """
-    re_param_raw = f"""
+
+    re_param_raw = rf"""
         :                       # initial colon
         (?:                     # Sphinx keywords
         param|parameter|
         arg|argument|
         key|keyword
         )
-        \\s+                     # whitespace
+        \s+                     # whitespace

         (?:                     # optional type declaration
         ({re_type}|{re_simple_container_type})
-        \\s+
+        \s+
         )?

-        ((\\\\\\*{{0,2}}\\w+)|(\\w+))  # Parameter name with potential asterisks
-        \\s*                       # whitespace
+        ((\\\*{{0,2}}\w+)|(\w+))  # Parameter name with potential asterisks
+        \s*                       # whitespace
         :                         # final colon
         """
     re_param_in_docstring = re.compile(re_param_raw, re.X | re.S)
-    re_type_raw = f"""
+
+    re_type_raw = rf"""
         :type                           # Sphinx keyword
-        \\s+                             # whitespace
+        \s+                             # whitespace
         ({re_multiple_simple_type})     # Parameter name
-        \\s*                             # whitespace
+        \s*                             # whitespace
         :                               # final colon
         """
     re_type_in_docstring = re.compile(re_type_raw, re.X | re.S)
-    re_property_type_raw = f"""
+
+    re_property_type_raw = rf"""
         :type:                      # Sphinx keyword
-        \\s+                         # whitespace
+        \s+                         # whitespace
         {re_multiple_simple_type}   # type declaration
         """
-    re_property_type_in_docstring = re.compile(re_property_type_raw, re.X |
-        re.S)
-    re_raise_raw = f"""
+    re_property_type_in_docstring = re.compile(re_property_type_raw, re.X | re.S)
+
+    re_raise_raw = rf"""
         :                               # initial colon
         (?:                             # Sphinx keyword
         raises?|
         except|exception
         )
-        \\s+                             # whitespace
+        \s+                             # whitespace
         ({re_multiple_simple_type})     # exception type
-        \\s*                             # whitespace
+        \s*                             # whitespace
         :                               # final colon
         """
     re_raise_in_docstring = re.compile(re_raise_raw, re.X | re.S)
-    re_rtype_in_docstring = re.compile(':rtype:')
-    re_returns_in_docstring = re.compile(':returns?:')
+
+    re_rtype_in_docstring = re.compile(r":rtype:")
+
+    re_returns_in_docstring = re.compile(r":returns?:")
+
     supports_yields = False

-    def matching_sections(self) ->int:
+    def matching_sections(self) -> int:
         """Returns the number of matching docstring sections."""
-        pass
+        return sum(
+            bool(i)
+            for i in (
+                self.re_param_in_docstring.search(self.doc),
+                self.re_raise_in_docstring.search(self.doc),
+                self.re_rtype_in_docstring.search(self.doc),
+                self.re_returns_in_docstring.search(self.doc),
+                self.re_property_type_in_docstring.search(self.doc),
+            )
+        )
+
+    def exceptions(self) -> set[str]:
+        types: set[str] = set()
+
+        for match in re.finditer(self.re_raise_in_docstring, self.doc):
+            raise_type = match.group(1)
+            types.update(_split_multiple_exc_types(raise_type))
+
+        return types
+
+    def has_params(self) -> bool:
+        if not self.doc:
+            return False
+
+        return self.re_param_in_docstring.search(self.doc) is not None
+
+    def has_returns(self) -> bool:
+        if not self.doc:
+            return False
+
+        return bool(self.re_returns_in_docstring.search(self.doc))
+
+    def has_rtype(self) -> bool:
+        if not self.doc:
+            return False
+
+        return bool(self.re_rtype_in_docstring.search(self.doc))
+
+    def has_property_returns(self) -> bool:
+        if not self.doc:
+            return False
+
+        # The summary line is the return doc,
+        # so the first line must not be a known directive.
+        return not self.doc.lstrip().startswith(":")
+
+    def has_property_type(self) -> bool:
+        if not self.doc:
+            return False
+
+        return bool(self.re_property_type_in_docstring.search(self.doc))
+
+    def match_param_docs(self) -> tuple[set[str], set[str]]:
+        params_with_doc = set()
+        params_with_type = set()
+
+        for match in re.finditer(self.re_param_in_docstring, self.doc):
+            name = match.group(2)
+            # Remove escape characters necessary for asterisks
+            name = name.replace("\\", "")
+            params_with_doc.add(name)
+            param_type = match.group(1)
+            if param_type is not None:
+                params_with_type.add(name)
+
+        params_with_type.update(re.findall(self.re_type_in_docstring, self.doc))
+        return params_with_doc, params_with_type


 class EpytextDocstring(SphinxDocstring):
@@ -195,144 +494,452 @@ class EpytextDocstring(SphinxDocstring):
         https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314
         https://www.jetbrains.com/help/pycharm/2016.1/using-docstrings-to-specify-types.html
     """
-    re_param_in_docstring = re.compile(SphinxDocstring.re_param_raw.replace
-        (':', '@', 1), re.X | re.S)
-    re_type_in_docstring = re.compile(SphinxDocstring.re_type_raw.replace(
-        ':', '@', 1), re.X | re.S)
-    re_property_type_in_docstring = re.compile(SphinxDocstring.
-        re_property_type_raw.replace(':', '@', 1), re.X | re.S)
-    re_raise_in_docstring = re.compile(SphinxDocstring.re_raise_raw.replace
-        (':', '@', 1), re.X | re.S)
+
+    re_param_in_docstring = re.compile(
+        SphinxDocstring.re_param_raw.replace(":", "@", 1), re.X | re.S
+    )
+
+    re_type_in_docstring = re.compile(
+        SphinxDocstring.re_type_raw.replace(":", "@", 1), re.X | re.S
+    )
+
+    re_property_type_in_docstring = re.compile(
+        SphinxDocstring.re_property_type_raw.replace(":", "@", 1), re.X | re.S
+    )
+
+    re_raise_in_docstring = re.compile(
+        SphinxDocstring.re_raise_raw.replace(":", "@", 1), re.X | re.S
+    )
+
     re_rtype_in_docstring = re.compile(
-        """
+        r"""
         @                       # initial "at" symbol
         (?:                     # Epytext keyword
         rtype|returntype
         )
         :                       # final colon
-        """
-        , re.X | re.S)
-    re_returns_in_docstring = re.compile('@returns?:')
+        """,
+        re.X | re.S,
+    )
+
+    re_returns_in_docstring = re.compile(r"@returns?:")
+
+    def has_property_returns(self) -> bool:
+        if not self.doc:
+            return False
+
+        # If this is a property docstring, the summary is the return doc.
+        if self.has_property_type():
+            # The summary line is the return doc,
+            # so the first line must not be a known directive.
+            return not self.doc.lstrip().startswith("@")
+
+        return False


 class GoogleDocstring(Docstring):
     re_type = SphinxDocstring.re_type
+
     re_xref = SphinxDocstring.re_xref
-    re_container_type = f"""
+
+    re_container_type = rf"""
         (?:{re_type}|{re_xref})       # a container type
-        [\\(\\[] [^\\n]+ [\\)\\]]          # with the contents of the container
+        [\(\[] [^\n]+ [\)\]]          # with the contents of the container
     """
-    re_multiple_type = f"""
+
+    re_multiple_type = rf"""
         (?:{re_container_type}|{re_type}|{re_xref})
-        (?:(?:\\s+(?:of|or)\\s+|\\s*,\\s*|\\s+\\|\\s+)(?:{re_container_type}|{re_type}|{re_xref}))*
+        (?:(?:\s+(?:of|or)\s+|\s*,\s*|\s+\|\s+)(?:{re_container_type}|{re_type}|{re_xref}))*
     """
-    _re_section_template = """
-        ^([ ]*)   {0} \\s*:   \\s*$     # Google parameter header
+
+    _re_section_template = r"""
+        ^([ ]*)   {0} \s*:   \s*$     # Google parameter header
         (  .* )                       # section
         """
-    re_param_section = re.compile(_re_section_template.format(
-        '(?:Args|Arguments|Parameters)'), re.X | re.S | re.M)
-    re_keyword_param_section = re.compile(_re_section_template.format(
-        'Keyword\\s(?:Args|Arguments|Parameters)'), re.X | re.S | re.M)
+
+    re_param_section = re.compile(
+        _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
+        re.X | re.S | re.M,
+    )
+
+    re_keyword_param_section = re.compile(
+        _re_section_template.format(r"Keyword\s(?:Args|Arguments|Parameters)"),
+        re.X | re.S | re.M,
+    )
+
     re_param_line = re.compile(
-        f"""
-        \\s*  ((?:\\\\?\\*{{0,2}})?[\\w\\\\]+) # identifier potentially with asterisks or escaped `\\`
-        \\s*  ( [(]
+        rf"""
+        \s*  ((?:\\?\*{{0,2}})?[\w\\]+) # identifier potentially with asterisks or escaped `\`
+        \s*  ( [(]
             {re_multiple_type}
-            (?:,\\s+optional)?
-            [)] )? \\s* :                # optional type declaration
-        \\s*  (.*)                       # beginning of optional description
-    """
-        , re.X | re.S | re.M)
-    re_raise_section = re.compile(_re_section_template.format('Raises'), re
-        .X | re.S | re.M)
+            (?:,\s+optional)?
+            [)] )? \s* :                # optional type declaration
+        \s*  (.*)                       # beginning of optional description
+    """,
+        re.X | re.S | re.M,
+    )
+
+    re_raise_section = re.compile(
+        _re_section_template.format(r"Raises"), re.X | re.S | re.M
+    )
+
     re_raise_line = re.compile(
-        f"""
-        \\s*  ({re_multiple_type}) \\s* :  # identifier
-        \\s*  (.*)                        # beginning of optional description
-    """
-        , re.X | re.S | re.M)
-    re_returns_section = re.compile(_re_section_template.format('Returns?'),
-        re.X | re.S | re.M)
+        rf"""
+        \s*  ({re_multiple_type}) \s* :  # identifier
+        \s*  (.*)                        # beginning of optional description
+    """,
+        re.X | re.S | re.M,
+    )
+
+    re_returns_section = re.compile(
+        _re_section_template.format(r"Returns?"), re.X | re.S | re.M
+    )
+
     re_returns_line = re.compile(
-        f"""
-        \\s* ({re_multiple_type}:)?        # identifier
-        \\s* (.*)                          # beginning of description
-    """
-        , re.X | re.S | re.M)
+        rf"""
+        \s* ({re_multiple_type}:)?        # identifier
+        \s* (.*)                          # beginning of description
+    """,
+        re.X | re.S | re.M,
+    )
+
     re_property_returns_line = re.compile(
-        f"""
+        rf"""
         ^{re_multiple_type}:           # identifier
-        \\s* (.*)                       # Summary line / description
-    """
-        , re.X | re.S | re.M)
-    re_yields_section = re.compile(_re_section_template.format('Yields?'), 
-        re.X | re.S | re.M)
+        \s* (.*)                       # Summary line / description
+    """,
+        re.X | re.S | re.M,
+    )
+
+    re_yields_section = re.compile(
+        _re_section_template.format(r"Yields?"), re.X | re.S | re.M
+    )
+
     re_yields_line = re_returns_line
+
     supports_yields = True

-    def matching_sections(self) ->int:
+    def matching_sections(self) -> int:
         """Returns the number of matching docstring sections."""
-        pass
+        return sum(
+            bool(i)
+            for i in (
+                self.re_param_section.search(self.doc),
+                self.re_raise_section.search(self.doc),
+                self.re_returns_section.search(self.doc),
+                self.re_yields_section.search(self.doc),
+                self.re_property_returns_line.search(self._first_line()),
+            )
+        )
+
+    def has_params(self) -> bool:
+        if not self.doc:
+            return False
+
+        return self.re_param_section.search(self.doc) is not None
+
+    def has_returns(self) -> bool:
+        if not self.doc:
+            return False
+
+        entries = self._parse_section(self.re_returns_section)
+        for entry in entries:
+            match = self.re_returns_line.match(entry)
+            if not match:
+                continue
+
+            return_desc = match.group(2)
+            if return_desc:
+                return True
+
+        return False
+
+    def has_rtype(self) -> bool:
+        if not self.doc:
+            return False
+
+        entries = self._parse_section(self.re_returns_section)
+        for entry in entries:
+            match = self.re_returns_line.match(entry)
+            if not match:
+                continue
+
+            return_type = match.group(1)
+            if return_type:
+                return True
+
+        return False
+
+    def has_property_returns(self) -> bool:
+        # The summary line is the return doc,
+        # so the first line must not be a known directive.
+        first_line = self._first_line()
+        return not bool(
+            self.re_param_section.search(first_line)
+            or self.re_raise_section.search(first_line)
+            or self.re_returns_section.search(first_line)
+            or self.re_yields_section.search(first_line)
+        )
+
+    def has_property_type(self) -> bool:
+        if not self.doc:
+            return False
+
+        return bool(self.re_property_returns_line.match(self._first_line()))
+
+    def has_yields(self) -> bool:
+        if not self.doc:
+            return False
+
+        entries = self._parse_section(self.re_yields_section)
+        for entry in entries:
+            match = self.re_yields_line.match(entry)
+            if not match:
+                continue
+
+            yield_desc = match.group(2)
+            if yield_desc:
+                return True
+
+        return False
+
+    def has_yields_type(self) -> bool:
+        if not self.doc:
+            return False
+
+        entries = self._parse_section(self.re_yields_section)
+        for entry in entries:
+            match = self.re_yields_line.match(entry)
+            if not match:
+                continue
+
+            yield_type = match.group(1)
+            if yield_type:
+                return True
+
+        return False
+
+    def exceptions(self) -> set[str]:
+        types: set[str] = set()
+
+        entries = self._parse_section(self.re_raise_section)
+        for entry in entries:
+            match = self.re_raise_line.match(entry)
+            if not match:
+                continue
+
+            exc_type = match.group(1)
+            exc_desc = match.group(2)
+            if exc_desc:
+                types.update(_split_multiple_exc_types(exc_type))
+
+        return types
+
+    def match_param_docs(self) -> tuple[set[str], set[str]]:
+        params_with_doc: set[str] = set()
+        params_with_type: set[str] = set()
+
+        entries = self._parse_section(self.re_param_section)
+        entries.extend(self._parse_section(self.re_keyword_param_section))
+        for entry in entries:
+            match = self.re_param_line.match(entry)
+            if not match:
+                continue
+
+            param_name = match.group(1)
+            # Remove escape characters necessary for asterisks
+            param_name = param_name.replace("\\", "")
+
+            param_type = match.group(2)
+            param_desc = match.group(3)
+
+            if param_type:
+                params_with_type.add(param_name)
+
+            if param_desc:
+                params_with_doc.add(param_name)
+
+        return params_with_doc, params_with_type
+
+    def _first_line(self) -> str:
+        return self.doc.lstrip().split("\n", 1)[0]
+
+    @staticmethod
+    def min_section_indent(section_match: re.Match[str]) -> int:
+        return len(section_match.group(1)) + 1
+
+    @staticmethod
+    def _is_section_header(_: str) -> bool:
+        # Google parsing does not need to detect section headers,
+        # because it works off of indentation level only
+        return False
+
+    def _parse_section(self, section_re: re.Pattern[str]) -> list[str]:
+        section_match = section_re.search(self.doc)
+        if section_match is None:
+            return []
+
+        min_indentation = self.min_section_indent(section_match)
+
+        entries: list[str] = []
+        entry: list[str] = []
+        is_first = True
+        for line in section_match.group(2).splitlines():
+            if not line.strip():
+                continue
+            indentation = space_indentation(line)
+            if indentation < min_indentation:
+                break
+
+            # The first line after the header defines the minimum
+            # indentation.
+            if is_first:
+                min_indentation = indentation
+                is_first = False
+
+            if indentation == min_indentation:
+                if self._is_section_header(line):
+                    break
+                # Lines with minimum indentation must contain the beginning
+                # of a new parameter documentation.
+                if entry:
+                    entries.append("\n".join(entry))
+                    entry = []
+
+            entry.append(line)
+
+        if entry:
+            entries.append("\n".join(entry))
+
+        return entries


 class NumpyDocstring(GoogleDocstring):
-    _re_section_template = """
-        ^([ ]*)   {0}   \\s*?$          # Numpy parameters header
-        \\s*     [-=]+   \\s*?$          # underline
+    _re_section_template = r"""
+        ^([ ]*)   {0}   \s*?$          # Numpy parameters header
+        \s*     [-=]+   \s*?$          # underline
         (  .* )                        # section
     """
-    re_param_section = re.compile(_re_section_template.format(
-        '(?:Args|Arguments|Parameters)'), re.X | re.S | re.M)
-    re_default_value = '(([\'"]\\w+\\s*[\'"])|(\\d+)|(True)|(False)|(None))'
+
+    re_param_section = re.compile(
+        _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
+        re.X | re.S | re.M,
+    )
+
+    re_default_value = r"""((['"]\w+\s*['"])|(\d+)|(True)|(False)|(None))"""
+
     re_param_line = re.compile(
-        f"""
-        \\s*  (?P<param_name>\\*{{0,2}}\\w+)(\\s?(:|\\n)) # identifier with potential asterisks
-        \\s*
+        rf"""
+        \s*  (?P<param_name>\*{{0,2}}\w+)(\s?(:|\n)) # identifier with potential asterisks
+        \s*
         (?P<param_type>
          (
           ({GoogleDocstring.re_multiple_type})      # default type declaration
-          (,\\s+optional)?                           # optional 'optional' indication
+          (,\s+optional)?                           # optional 'optional' indication
          )?
          (
-          {{({re_default_value},?\\s*)+}}            # set of default values
+          {{({re_default_value},?\s*)+}}            # set of default values
          )?
-         (?:$|\\n)
+         (?:$|\n)
         )?
         (
-         \\s* (?P<param_desc>.*)                     # optional description
+         \s* (?P<param_desc>.*)                     # optional description
         )?
-    """
-        , re.X | re.S)
-    re_raise_section = re.compile(_re_section_template.format('Raises'), re
-        .X | re.S | re.M)
+    """,
+        re.X | re.S,
+    )
+
+    re_raise_section = re.compile(
+        _re_section_template.format(r"Raises"), re.X | re.S | re.M
+    )
+
     re_raise_line = re.compile(
-        f"""
-        \\s* ({GoogleDocstring.re_type})$   # type declaration
-        \\s* (.*)                           # optional description
-    """
-        , re.X | re.S | re.M)
-    re_returns_section = re.compile(_re_section_template.format('Returns?'),
-        re.X | re.S | re.M)
+        rf"""
+        \s* ({GoogleDocstring.re_type})$   # type declaration
+        \s* (.*)                           # optional description
+    """,
+        re.X | re.S | re.M,
+    )
+
+    re_returns_section = re.compile(
+        _re_section_template.format(r"Returns?"), re.X | re.S | re.M
+    )
+
     re_returns_line = re.compile(
-        f"""
-        \\s* (?:\\w+\\s+:\\s+)? # optional name
+        rf"""
+        \s* (?:\w+\s+:\s+)? # optional name
         ({GoogleDocstring.re_multiple_type})$   # type declaration
-        \\s* (.*)                                # optional description
-    """
-        , re.X | re.S | re.M)
-    re_yields_section = re.compile(_re_section_template.format('Yields?'), 
-        re.X | re.S | re.M)
+        \s* (.*)                                # optional description
+    """,
+        re.X | re.S | re.M,
+    )
+
+    re_yields_section = re.compile(
+        _re_section_template.format(r"Yields?"), re.X | re.S | re.M
+    )
+
     re_yields_line = re_returns_line
+
     supports_yields = True

-    def match_param_docs(self) ->tuple[set[str], set[str]]:
+    def match_param_docs(self) -> tuple[set[str], set[str]]:
         """Matches parameter documentation section to parameter documentation rules."""
-        pass
-
-
-DOCSTRING_TYPES = {'sphinx': SphinxDocstring, 'epytext': EpytextDocstring,
-    'google': GoogleDocstring, 'numpy': NumpyDocstring, 'default': Docstring}
+        params_with_doc = set()
+        params_with_type = set()
+
+        entries = self._parse_section(self.re_param_section)
+        entries.extend(self._parse_section(self.re_keyword_param_section))
+        for entry in entries:
+            match = self.re_param_line.match(entry)
+            if not match:
+                continue
+
+            # check if parameter has description only
+            re_only_desc = re.match(r"\s*(\*{0,2}\w+)\s*:?\n\s*\w*$", entry)
+            if re_only_desc:
+                param_name = match.group("param_name")
+                param_desc = match.group("param_type")
+                param_type = None
+            else:
+                param_name = match.group("param_name")
+                param_type = match.group("param_type")
+                param_desc = match.group("param_desc")
+                # The re_param_line pattern needs to match multi-line which removes the ability
+                # to match a single line description like 'arg : a number type.'
+                # We are not trying to determine whether 'a number type' is correct typing
+                # but we do accept it as typing as it is in the place where typing
+                # should be
+                if param_type is None and re.match(r"\s*(\*{0,2}\w+)\s*:.+$", entry):
+                    param_type = param_desc
+                # If the description is "" but we have a type description
+                # we consider the description to be the type
+                if not param_desc and param_type:
+                    param_desc = param_type
+
+            if param_type:
+                params_with_type.add(param_name)
+
+            if param_desc:
+                params_with_doc.add(param_name)
+
+        return params_with_doc, params_with_type
+
+    @staticmethod
+    def min_section_indent(section_match: re.Match[str]) -> int:
+        return len(section_match.group(1))
+
+    @staticmethod
+    def _is_section_header(line: str) -> bool:
+        return bool(re.match(r"\s*-+$", line))
+
+
+DOCSTRING_TYPES = {
+    "sphinx": SphinxDocstring,
+    "epytext": EpytextDocstring,
+    "google": GoogleDocstring,
+    "numpy": NumpyDocstring,
+    "default": Docstring,
+}
 """A map of the name of the docstring type to its class.

 :type: dict(str, type)
diff --git a/pylint/extensions/bad_builtin.py b/pylint/extensions/bad_builtin.py
index 89b47f852..79cc5d9f4 100644
--- a/pylint/extensions/bad_builtin.py
+++ b/pylint/extensions/bad_builtin.py
@@ -1,22 +1,65 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checker for deprecated builtins."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-BAD_FUNCTIONS = ['map', 'filter']
-LIST_COMP_MSG = 'Using a list comprehension can be clearer.'
-BUILTIN_HINTS = {'map': LIST_COMP_MSG, 'filter': LIST_COMP_MSG}
+
+BAD_FUNCTIONS = ["map", "filter"]
+# Some hints regarding the use of bad builtins.
+LIST_COMP_MSG = "Using a list comprehension can be clearer."
+BUILTIN_HINTS = {"map": LIST_COMP_MSG, "filter": LIST_COMP_MSG}


 class BadBuiltinChecker(BaseChecker):
-    name = 'deprecated_builtins'
-    msgs = {'W0141': ('Used builtin function %s', 'bad-builtin',
-        'Used when a disallowed builtin function is used (see the bad-function option). Usual disallowed functions are the ones like map, or filter , where Python offers now some cleaner alternative like list comprehension.'
-        )}
-    options = ('bad-functions', {'default': BAD_FUNCTIONS, 'type': 'csv',
-        'metavar': '<builtin function names>', 'help':
-        'List of builtins function names that should not be used, separated by a comma'
-        }),
+    name = "deprecated_builtins"
+    msgs = {
+        "W0141": (
+            "Used builtin function %s",
+            "bad-builtin",
+            "Used when a disallowed builtin function is used (see the "
+            "bad-function option). Usual disallowed functions are the ones "
+            "like map, or filter , where Python offers now some cleaner "
+            "alternative like list comprehension.",
+        )
+    }
+
+    options = (
+        (
+            "bad-functions",
+            {
+                "default": BAD_FUNCTIONS,
+                "type": "csv",
+                "metavar": "<builtin function names>",
+                "help": "List of builtins function names that should not be "
+                "used, separated by a comma",
+            },
+        ),
+    )
+
+    @only_required_for_messages("bad-builtin")
+    def visit_call(self, node: nodes.Call) -> None:
+        if isinstance(node.func, nodes.Name):
+            name = node.func.name
+            # ignore the name if it's not a builtin (i.e. not defined in the
+            # locals nor globals scope)
+            if not (name in node.frame() or name in node.root()):
+                if name in self.linter.config.bad_functions:
+                    hint = BUILTIN_HINTS.get(name)
+                    args = f"{name!r}. {hint}" if hint else repr(name)
+                    self.add_message("bad-builtin", node=node, args=args)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(BadBuiltinChecker(linter))
diff --git a/pylint/extensions/broad_try_clause.py b/pylint/extensions/broad_try_clause.py
index 554ff6a38..90168909a 100644
--- a/pylint/extensions/broad_try_clause.py
+++ b/pylint/extensions/broad_try_clause.py
@@ -1,8 +1,17 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Looks for try/except statements with too much code in the try clause."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint import checkers
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -14,9 +23,51 @@ class BroadTryClauseChecker(checkers.BaseChecker):
     amount of code. This checker enforces a maximum number of statements within
     ``try`` clauses.
     """
-    name = 'broad_try_clause'
-    msgs = {'W0717': ('%s', 'too-many-try-statements',
-        'Try clause contains too many statements.')}
-    options = ('max-try-statements', {'default': 1, 'type': 'int',
-        'metavar': '<int>', 'help':
-        'Maximum number of statements allowed in a try clause'}),
+
+    # configuration section name
+    name = "broad_try_clause"
+    msgs = {
+        "W0717": (
+            "%s",
+            "too-many-try-statements",
+            "Try clause contains too many statements.",
+        )
+    }
+
+    options = (
+        (
+            "max-try-statements",
+            {
+                "default": 1,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "Maximum number of statements allowed in a try clause",
+            },
+        ),
+    )
+
+    def _count_statements(
+        self, node: nodes.For | nodes.If | nodes.Try | nodes.While | nodes.With
+    ) -> int:
+        statement_count = len(node.body)
+
+        for body_node in node.body:
+            if isinstance(body_node, (nodes.For, nodes.If, nodes.While, nodes.With)):
+                statement_count += self._count_statements(body_node)
+
+        return statement_count
+
+    def visit_try(self, node: nodes.Try) -> None:
+        try_clause_statements = self._count_statements(node)
+        if try_clause_statements > self.linter.config.max_try_statements:
+            msg = (
+                f"try clause contains {try_clause_statements} statements, expected at"
+                f" most {self.linter.config.max_try_statements}"
+            )
+            self.add_message(
+                "too-many-try-statements", node.lineno, node=node, args=msg
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(BroadTryClauseChecker(linter))
diff --git a/pylint/extensions/check_elif.py b/pylint/extensions/check_elif.py
index b9e5190a4..de20ed8ec 100644
--- a/pylint/extensions/check_elif.py
+++ b/pylint/extensions/check_elif.py
@@ -1,33 +1,64 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import tokenize
 from tokenize import TokenInfo
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseTokenChecker
 from pylint.checkers.utils import only_required_for_messages
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class ElseifUsedChecker(BaseTokenChecker):
     """Checks for use of "else if" when an "elif" could be used."""
-    name = 'else_if_used'
-    msgs = {'R5501': (
-        'Consider using "elif" instead of "else" then "if" to remove one indentation level'
-        , 'else-if-used',
-        'Used when an else statement is immediately followed by an if statement and does not contain statements that would be unrelated to it.'
-        )}
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "else_if_used"
+    msgs = {
+        "R5501": (
+            'Consider using "elif" instead of "else" then "if" to remove one indentation level',
+            "else-if-used",
+            "Used when an else statement is immediately followed by "
+            "an if statement and does not contain statements that "
+            "would be unrelated to it.",
+        )
+    }
+
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._init()

-    def process_tokens(self, tokens: list[TokenInfo]) ->None:
+    def _init(self) -> None:
+        self._elifs: dict[tokenize._Position, str] = {}
+
+    def process_tokens(self, tokens: list[TokenInfo]) -> None:
         """Process tokens and look for 'if' or 'elif'."""
-        pass
+        self._elifs = {
+            begin: token for _, token, begin, _, _ in tokens if token in {"elif", "if"}
+        }
+
+    def leave_module(self, _: nodes.Module) -> None:
+        self._init()

-    @only_required_for_messages('else-if-used')
-    def visit_if(self, node: nodes.If) ->None:
+    @only_required_for_messages("else-if-used")
+    def visit_if(self, node: nodes.If) -> None:
         """Current if node must directly follow an 'else'."""
-        pass
+        if (
+            isinstance(node.parent, nodes.If)
+            and node.parent.orelse == [node]
+            and (node.lineno, node.col_offset) in self._elifs
+            and self._elifs[(node.lineno, node.col_offset)] == "if"
+        ):
+            self.add_message("else-if-used", node=node, confidence=HIGH)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ElseifUsedChecker(linter))
diff --git a/pylint/extensions/code_style.py b/pylint/extensions/code_style.py
index cc8634462..622601c75 100644
--- a/pylint/extensions/code_style.py
+++ b/pylint/extensions/code_style.py
@@ -1,12 +1,21 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import sys
 from typing import TYPE_CHECKING, Tuple, Type, cast
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker, utils
 from pylint.checkers.utils import only_required_for_messages, safe_infer
 from pylint.interfaces import INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 if sys.version_info >= (3, 10):
     from typing import TypeGuard
 else:
@@ -28,37 +37,177 @@ class CodeStyleChecker(BaseChecker):
        and therefore preferred => this is the right place
     3. Everything else should go into another extension
     """
-    name = 'code_style'
-    msgs = {'R6101': (
-        'Consider using namedtuple or dataclass for dictionary values',
-        'consider-using-namedtuple-or-dataclass',
-        'Emitted when dictionary values can be replaced by namedtuples or dataclass instances.'
-        ), 'R6102': ('Consider using an in-place tuple instead of list',
-        'consider-using-tuple',
-        'Only for style consistency! Emitted where an in-place defined ``list`` can be replaced by a ``tuple``. Due to optimizations by CPython, there is no performance benefit from it.'
-        ), 'R6103': ("Use '%s' instead", 'consider-using-assignment-expr',
-        'Emitted when an if assignment is directly followed by an if statement and both can be combined by using an assignment expression ``:=``. Requires Python 3.8 and ``py-version >= 3.8``.'
-        ), 'R6104': ("Use '%s' to do an augmented assign directly",
-        'consider-using-augmented-assign',
-        """Emitted when an assignment is referring to the object that it is assigning to. This can be changed to be an augmented assign.
-Disabled by default!"""
-        , {'default_enabled': False}), 'R6105': (
-        "Prefer 'typing.NamedTuple' over 'collections.namedtuple'",
-        'prefer-typing-namedtuple',
-        """'typing.NamedTuple' uses the well-known 'class' keyword with type-hints for readability (it's also faster as it avoids an internal exec call).
-Disabled by default!"""
-        , {'default_enabled': False})}
-    options = ('max-line-length-suggestions', {'type': 'int', 'default': 0,
-        'metavar': '<int>', 'help':
-        'Max line length for which to sill emit suggestions. Used to prevent optional suggestions which would get split by a code formatter (e.g., black). Will default to the setting for ``max-line-length``.'
-        }),
-
-    def _check_dict_consider_namedtuple_dataclass(self, node: nodes.Dict
-        ) ->None:
+
+    name = "code_style"
+    msgs = {
+        "R6101": (
+            "Consider using namedtuple or dataclass for dictionary values",
+            "consider-using-namedtuple-or-dataclass",
+            "Emitted when dictionary values can be replaced by namedtuples or dataclass instances.",
+        ),
+        "R6102": (
+            "Consider using an in-place tuple instead of list",
+            "consider-using-tuple",
+            "Only for style consistency! "
+            "Emitted where an in-place defined ``list`` can be replaced by a ``tuple``. "
+            "Due to optimizations by CPython, there is no performance benefit from it.",
+        ),
+        "R6103": (
+            "Use '%s' instead",
+            "consider-using-assignment-expr",
+            "Emitted when an if assignment is directly followed by an if statement and "
+            "both can be combined by using an assignment expression ``:=``. "
+            "Requires Python 3.8 and ``py-version >= 3.8``.",
+        ),
+        "R6104": (
+            "Use '%s' to do an augmented assign directly",
+            "consider-using-augmented-assign",
+            "Emitted when an assignment is referring to the object that it is assigning "
+            "to. This can be changed to be an augmented assign.\n"
+            "Disabled by default!",
+            {
+                "default_enabled": False,
+            },
+        ),
+        "R6105": (
+            "Prefer 'typing.NamedTuple' over 'collections.namedtuple'",
+            "prefer-typing-namedtuple",
+            "'typing.NamedTuple' uses the well-known 'class' keyword "
+            "with type-hints for readability (it's also faster as it avoids "
+            "an internal exec call).\n"
+            "Disabled by default!",
+            {
+                "default_enabled": False,
+            },
+        ),
+    }
+    options = (
+        (
+            "max-line-length-suggestions",
+            {
+                "type": "int",
+                "default": 0,
+                "metavar": "<int>",
+                "help": (
+                    "Max line length for which to sill emit suggestions. "
+                    "Used to prevent optional suggestions which would get split "
+                    "by a code formatter (e.g., black). "
+                    "Will default to the setting for ``max-line-length``."
+                ),
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        py_version = self.linter.config.py_version
+        self._py36_plus = py_version >= (3, 6)
+        self._py38_plus = py_version >= (3, 8)
+        self._max_length: int = (
+            self.linter.config.max_line_length_suggestions
+            or self.linter.config.max_line_length
+        )
+
+    @only_required_for_messages("prefer-typing-namedtuple")
+    def visit_call(self, node: nodes.Call) -> None:
+        if self._py36_plus:
+            called = safe_infer(node.func)
+            if called and called.qname() == "collections.namedtuple":
+                self.add_message(
+                    "prefer-typing-namedtuple", node=node, confidence=INFERENCE
+                )
+
+    @only_required_for_messages("consider-using-namedtuple-or-dataclass")
+    def visit_dict(self, node: nodes.Dict) -> None:
+        self._check_dict_consider_namedtuple_dataclass(node)
+
+    @only_required_for_messages("consider-using-tuple")
+    def visit_for(self, node: nodes.For) -> None:
+        if isinstance(node.iter, nodes.List):
+            self.add_message("consider-using-tuple", node=node.iter)
+
+    @only_required_for_messages("consider-using-tuple")
+    def visit_comprehension(self, node: nodes.Comprehension) -> None:
+        if isinstance(node.iter, nodes.List):
+            self.add_message("consider-using-tuple", node=node.iter)
+
+    @only_required_for_messages("consider-using-assignment-expr")
+    def visit_if(self, node: nodes.If) -> None:
+        if self._py38_plus:
+            self._check_consider_using_assignment_expr(node)
+
+    def _check_dict_consider_namedtuple_dataclass(self, node: nodes.Dict) -> None:
         """Check if dictionary values can be replaced by Namedtuple or Dataclass."""
-        pass
+        if not (
+            isinstance(node.parent, (nodes.Assign, nodes.AnnAssign))
+            and isinstance(node.parent.parent, nodes.Module)
+            or isinstance(node.parent, nodes.AnnAssign)
+            and isinstance(node.parent.target, nodes.AssignName)
+            and utils.is_assign_name_annotated_with(node.parent.target, "Final")
+        ):
+            # If dict is not part of an 'Assign' or 'AnnAssign' node in
+            # a module context OR 'AnnAssign' with 'Final' annotation, skip check.
+            return
+
+        # All dict_values are itself dict nodes
+        if len(node.items) > 1 and all(
+            isinstance(dict_value, nodes.Dict) for _, dict_value in node.items
+        ):
+            KeyTupleT = Tuple[Type[nodes.NodeNG], str]

-    def _check_consider_using_assignment_expr(self, node: nodes.If) ->None:
+            # Makes sure all keys are 'Const' string nodes
+            keys_checked: set[KeyTupleT] = set()
+            for _, dict_value in node.items:
+                dict_value = cast(nodes.Dict, dict_value)
+                for key, _ in dict_value.items:
+                    key_tuple = (type(key), key.as_string())
+                    if key_tuple in keys_checked:
+                        continue
+                    inferred = safe_infer(key)
+                    if not (
+                        isinstance(inferred, nodes.Const)
+                        and inferred.pytype() == "builtins.str"
+                    ):
+                        return
+                    keys_checked.add(key_tuple)
+
+            # Makes sure all subdicts have at least 1 common key
+            key_tuples: list[tuple[KeyTupleT, ...]] = []
+            for _, dict_value in node.items:
+                dict_value = cast(nodes.Dict, dict_value)
+                key_tuples.append(
+                    tuple((type(key), key.as_string()) for key, _ in dict_value.items)
+                )
+            keys_intersection: set[KeyTupleT] = set(key_tuples[0])
+            for sub_key_tuples in key_tuples[1:]:
+                keys_intersection.intersection_update(sub_key_tuples)
+            if not keys_intersection:
+                return
+
+            self.add_message("consider-using-namedtuple-or-dataclass", node=node)
+            return
+
+        # All dict_values are itself either list or tuple nodes
+        if len(node.items) > 1 and all(
+            isinstance(dict_value, (nodes.List, nodes.Tuple))
+            for _, dict_value in node.items
+        ):
+            # Make sure all sublists have the same length > 0
+            list_length = len(node.items[0][1].elts)
+            if list_length == 0:
+                return
+            for _, dict_value in node.items[1:]:
+                if len(dict_value.elts) != list_length:
+                    return
+
+            # Make sure at least one list entry isn't a dict
+            for _, dict_value in node.items:
+                if all(isinstance(entry, nodes.Dict) for entry in dict_value.elts):
+                    return
+
+            self.add_message("consider-using-namedtuple-or-dataclass", node=node)
+            return
+
+    def _check_consider_using_assignment_expr(self, node: nodes.If) -> None:
         """Check if an assignment expression (walrus operator) can be used.

         For example if an assignment is directly followed by an if statement:
@@ -72,23 +221,130 @@ Disabled by default!"""

         Note: Assignment expressions were added in Python 3.8
         """
-        pass
+        # Check if `node.test` contains a `Name` node
+        node_name: nodes.Name | None = None
+        if isinstance(node.test, nodes.Name):
+            node_name = node.test
+        elif (
+            isinstance(node.test, nodes.UnaryOp)
+            and node.test.op == "not"
+            and isinstance(node.test.operand, nodes.Name)
+        ):
+            node_name = node.test.operand
+        elif (
+            isinstance(node.test, nodes.Compare)
+            and isinstance(node.test.left, nodes.Name)
+            and len(node.test.ops) == 1
+        ):
+            node_name = node.test.left
+        else:
+            return
+
+        # Make sure the previous node is an assignment to the same name
+        # used in `node.test`. Furthermore, ignore if assignment spans multiple lines.
+        prev_sibling = node.previous_sibling()
+        if CodeStyleChecker._check_prev_sibling_to_if_stmt(
+            prev_sibling, node_name.name
+        ):
+            # Check if match statement would be a better fit.
+            # I.e. multiple ifs that test the same name.
+            if CodeStyleChecker._check_ignore_assignment_expr_suggestion(
+                node, node_name.name
+            ):
+                return
+
+            # Build suggestion string. Check length of suggestion
+            # does not exceed max-line-length-suggestions
+            test_str = node.test.as_string().replace(
+                node_name.name,
+                f"({node_name.name} := {prev_sibling.value.as_string()})",
+                1,
+            )
+            suggestion = f"if {test_str}:"
+            if (
+                node.col_offset is not None
+                and len(suggestion) + node.col_offset > self._max_length
+                or len(suggestion) > self._max_length
+            ):
+                return
+
+            self.add_message(
+                "consider-using-assignment-expr",
+                node=node_name,
+                args=(suggestion,),
+            )

     @staticmethod
-    def _check_prev_sibling_to_if_stmt(prev_sibling: (nodes.NodeNG | None),
-        name: (str | None)) ->TypeGuard[nodes.Assign | nodes.AnnAssign]:
+    def _check_prev_sibling_to_if_stmt(
+        prev_sibling: nodes.NodeNG | None, name: str | None
+    ) -> TypeGuard[nodes.Assign | nodes.AnnAssign]:
         """Check if previous sibling is an assignment with the same name.

         Ignore statements which span multiple lines.
         """
-        pass
+        if prev_sibling is None or prev_sibling.tolineno - prev_sibling.fromlineno != 0:
+            return False
+
+        if (
+            isinstance(prev_sibling, nodes.Assign)
+            and len(prev_sibling.targets) == 1
+            and isinstance(prev_sibling.targets[0], nodes.AssignName)
+            and prev_sibling.targets[0].name == name
+        ):
+            return True
+        if (
+            isinstance(prev_sibling, nodes.AnnAssign)
+            and isinstance(prev_sibling.target, nodes.AssignName)
+            and prev_sibling.target.name == name
+        ):
+            return True
+        return False

     @staticmethod
-    def _check_ignore_assignment_expr_suggestion(node: nodes.If, name: (str |
-        None)) ->bool:
+    def _check_ignore_assignment_expr_suggestion(
+        node: nodes.If, name: str | None
+    ) -> bool:
         """Return True if suggestion for assignment expr should be ignored.

         E.g., in cases where a match statement would be a better fit
         (multiple conditions).
         """
-        pass
+        if isinstance(node.test, nodes.Compare):
+            next_if_node: nodes.If | None = None
+            next_sibling = node.next_sibling()
+            if len(node.orelse) == 1 and isinstance(node.orelse[0], nodes.If):
+                # elif block
+                next_if_node = node.orelse[0]
+            elif isinstance(next_sibling, nodes.If):
+                # separate if block
+                next_if_node = next_sibling
+
+            if (  # pylint: disable=too-many-boolean-expressions
+                next_if_node is not None
+                and (
+                    isinstance(next_if_node.test, nodes.Compare)
+                    and isinstance(next_if_node.test.left, nodes.Name)
+                    and next_if_node.test.left.name == name
+                    or isinstance(next_if_node.test, nodes.Name)
+                    and next_if_node.test.name == name
+                )
+            ):
+                return True
+        return False
+
+    @only_required_for_messages("consider-using-augmented-assign")
+    def visit_assign(self, node: nodes.Assign) -> None:
+        is_aug, op = utils.is_augmented_assign(node)
+        if is_aug:
+            self.add_message(
+                "consider-using-augmented-assign",
+                args=f"{op}=",
+                node=node,
+                line=node.lineno,
+                col_offset=node.col_offset,
+                confidence=INFERENCE,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(CodeStyleChecker(linter))
diff --git a/pylint/extensions/comparison_placement.py b/pylint/extensions/comparison_placement.py
index 205610473..f7ecceae3 100644
--- a/pylint/extensions/comparison_placement.py
+++ b/pylint/extensions/comparison_placement.py
@@ -1,21 +1,69 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checks for yoda comparisons (variable before constant)
 See https://en.wikipedia.org/wiki/Yoda_conditions.
 """
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker, utils
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-REVERSED_COMPS = {'<': '>', '<=': '>=', '>': '<', '>=': '<='}
-COMPARISON_OPERATORS = frozenset(('==', '!=', '<', '>', '<=', '>='))
+
+REVERSED_COMPS = {"<": ">", "<=": ">=", ">": "<", ">=": "<="}
+COMPARISON_OPERATORS = frozenset(("==", "!=", "<", ">", "<=", ">="))


 class MisplacedComparisonConstantChecker(BaseChecker):
     """Checks the placement of constants in comparisons."""
-    name = 'comparison-placement'
-    msgs = {'C2201': ('Comparison should be %s',
-        'misplaced-comparison-constant',
-        'Used when the constant is placed on the left side of a comparison. It is usually clearer in intent to place it in the right hand side of the comparison.'
-        , {'old_names': [('C0122', 'old-misplaced-comparison-constant')]})}
+
+    # configuration section name
+    name = "comparison-placement"
+    msgs = {
+        "C2201": (
+            "Comparison should be %s",
+            "misplaced-comparison-constant",
+            "Used when the constant is placed on the left side "
+            "of a comparison. It is usually clearer in intent to "
+            "place it in the right hand side of the comparison.",
+            {"old_names": [("C0122", "old-misplaced-comparison-constant")]},
+        )
+    }
+
     options = ()
+
+    def _check_misplaced_constant(
+        self,
+        node: nodes.Compare,
+        left: nodes.NodeNG,
+        right: nodes.NodeNG,
+        operator: str,
+    ) -> None:
+        if isinstance(right, nodes.Const):
+            return
+        operator = REVERSED_COMPS.get(operator, operator)
+        suggestion = f"{right.as_string()} {operator} {left.value!r}"
+        self.add_message("misplaced-comparison-constant", node=node, args=(suggestion,))
+
+    @utils.only_required_for_messages("misplaced-comparison-constant")
+    def visit_compare(self, node: nodes.Compare) -> None:
+        # NOTE: this checker only works with binary comparisons like 'x == 42'
+        # but not 'x == y == 42'
+        if len(node.ops) != 1:
+            return
+
+        left = node.left
+        operator, right = node.ops[0]
+        if operator in COMPARISON_OPERATORS and isinstance(left, nodes.Const):
+            self._check_misplaced_constant(node, left, right, operator)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(MisplacedComparisonConstantChecker(linter))
diff --git a/pylint/extensions/confusing_elif.py b/pylint/extensions/confusing_elif.py
index d45d9ec2c..287547eaa 100644
--- a/pylint/extensions/confusing_elif.py
+++ b/pylint/extensions/confusing_elif.py
@@ -1,8 +1,16 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -11,9 +19,37 @@ class ConfusingConsecutiveElifChecker(BaseChecker):
     """Checks if "elif" is used right after an indented block that finishes with "if" or
     "elif" itself.
     """
-    name = 'confusing_elif'
-    msgs = {'R5601': (
-        'Consecutive elif with differing indentation level, consider creating a function to separate the inner elif'
-        , 'confusing-consecutive-elif',
-        'Used when an elif statement follows right after an indented block which itself ends with if or elif. It may not be obvious if the elif statement was willingly or mistakenly unindented. Extracting the indented if statement into a separate function might avoid confusion and prevent errors.'
-        )}
+
+    name = "confusing_elif"
+    msgs = {
+        "R5601": (
+            "Consecutive elif with differing indentation level, consider creating a function to separate the inner"
+            " elif",
+            "confusing-consecutive-elif",
+            "Used when an elif statement follows right after an indented block which itself ends with if or elif. "
+            "It may not be obvious if the elif statement was willingly or mistakenly unindented. "
+            "Extracting the indented if statement into a separate function might avoid confusion and prevent "
+            "errors.",
+        )
+    }
+
+    @only_required_for_messages("confusing-consecutive-elif")
+    def visit_if(self, node: nodes.If) -> None:
+        body_ends_with_if = isinstance(
+            node.body[-1], nodes.If
+        ) and self._has_no_else_clause(node.body[-1])
+        if node.has_elif_block() and body_ends_with_if:
+            self.add_message("confusing-consecutive-elif", node=node.orelse[0])
+
+    @staticmethod
+    def _has_no_else_clause(node: nodes.If) -> bool:
+        orelse = node.orelse
+        while orelse and isinstance(orelse[0], nodes.If):
+            orelse = orelse[0].orelse
+        if not orelse or isinstance(orelse[0], nodes.If):
+            return True
+        return False
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ConfusingConsecutiveElifChecker(linter))
diff --git a/pylint/extensions/consider_refactoring_into_while_condition.py b/pylint/extensions/consider_refactoring_into_while_condition.py
index c2135c90d..b7e905e8a 100644
--- a/pylint/extensions/consider_refactoring_into_while_condition.py
+++ b/pylint/extensions/consider_refactoring_into_while_condition.py
@@ -1,10 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Looks for try/except statements with too much code in the try clause."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -20,13 +29,65 @@ class ConsiderRefactorIntoWhileConditionChecker(checkers.BaseChecker):

     The if statement(s) can be refactored into the while loop.
     """
-    name = 'consider_refactoring_into_while'
-    msgs = {'R3501': (
-        "Consider using 'while %s' instead of 'while %s:' an 'if', and a 'break'"
-        , 'consider-refactoring-into-while-condition',
-        'Emitted when `while True:` loop is used and the first statement is a break condition. The ``if / break`` construct can be removed if the check is inverted and moved to the ``while`` statement.'
-        )}
-
-    def _check_breaking_after_while_true(self, node: nodes.While) ->None:
+
+    name = "consider_refactoring_into_while"
+    msgs = {
+        "R3501": (
+            "Consider using 'while %s' instead of 'while %s:' an 'if', and a 'break'",
+            "consider-refactoring-into-while-condition",
+            "Emitted when `while True:` loop is used and the first statement is a break condition. "
+            "The ``if / break`` construct can be removed if the check is inverted and moved to "
+            "the ``while`` statement.",
+        ),
+    }
+
+    @utils.only_required_for_messages("consider-refactoring-into-while-condition")
+    def visit_while(self, node: nodes.While) -> None:
+        self._check_breaking_after_while_true(node)
+
+    def _check_breaking_after_while_true(self, node: nodes.While) -> None:
         """Check that any loop with an ``if`` clause has a break statement."""
-        pass
+        if not isinstance(node.test, nodes.Const) or not node.test.bool_value():
+            return
+        pri_candidates: list[nodes.If] = []
+        for n in node.body:
+            if not isinstance(n, nodes.If):
+                break
+            pri_candidates.append(n)
+        candidates = []
+        tainted = False
+        for c in pri_candidates:
+            if tainted or not isinstance(c.body[0], nodes.Break):
+                break
+            candidates.append(c)
+            orelse = c.orelse
+            while orelse:
+                orelse_node = orelse[0]
+                if not isinstance(orelse_node, nodes.If):
+                    tainted = True
+                else:
+                    candidates.append(orelse_node)
+                if not isinstance(orelse_node, nodes.If):
+                    break
+                orelse = orelse_node.orelse
+
+        candidates = [n for n in candidates if isinstance(n.body[0], nodes.Break)]
+        msg = " and ".join(
+            [f"({utils.not_condition_as_string(c.test)})" for c in candidates]
+        )
+        if len(candidates) == 1:
+            msg = utils.not_condition_as_string(candidates[0].test)
+        if not msg:
+            return
+
+        self.add_message(
+            "consider-refactoring-into-while-condition",
+            node=node,
+            line=node.lineno,
+            args=(msg, node.test.as_string()),
+            confidence=HIGH,
+        )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ConsiderRefactorIntoWhileConditionChecker(linter))
diff --git a/pylint/extensions/consider_ternary_expression.py b/pylint/extensions/consider_ternary_expression.py
index ffe0b05dc..83046ce38 100644
--- a/pylint/extensions/consider_ternary_expression.py
+++ b/pylint/extensions/consider_ternary_expression.py
@@ -1,15 +1,56 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for if / assign blocks that can be rewritten with if-expressions."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class ConsiderTernaryExpressionChecker(BaseChecker):
-    name = 'consider_ternary_expression'
-    msgs = {'W0160': ('Consider rewriting as a ternary expression',
-        'consider-ternary-expression',
-        'Multiple assign statements spread across if/else blocks can be rewritten with a single assignment and ternary expression'
-        )}
+    name = "consider_ternary_expression"
+    msgs = {
+        "W0160": (
+            "Consider rewriting as a ternary expression",
+            "consider-ternary-expression",
+            "Multiple assign statements spread across if/else blocks can be "
+            "rewritten with a single assignment and ternary expression",
+        )
+    }
+
+    def visit_if(self, node: nodes.If) -> None:
+        if isinstance(node.parent, nodes.If):
+            return
+
+        if len(node.body) != 1 or len(node.orelse) != 1:
+            return
+
+        bst = node.body[0]
+        ost = node.orelse[0]
+
+        if not isinstance(bst, nodes.Assign) or not isinstance(ost, nodes.Assign):
+            return
+
+        for bname, oname in zip(bst.targets, ost.targets):
+            if not isinstance(bname, nodes.AssignName) or not isinstance(
+                oname, nodes.AssignName
+            ):
+                return
+
+            if bname.name != oname.name:
+                return
+
+        self.add_message("consider-ternary-expression", node=node)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ConsiderTernaryExpressionChecker(linter))
diff --git a/pylint/extensions/dict_init_mutate.py b/pylint/extensions/dict_init_mutate.py
index e54d20198..4977e234b 100644
--- a/pylint/extensions/dict_init_mutate.py
+++ b/pylint/extensions/dict_init_mutate.py
@@ -1,27 +1,66 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for use of dictionary mutation after initialization."""
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


 class DictInitMutateChecker(BaseChecker):
-    name = 'dict-init-mutate'
-    msgs = {'C3401': (
-        'Declare all known key/values when initializing the dictionary.',
-        'dict-init-mutate',
-        'Dictionaries can be initialized with a single statement using dictionary literal syntax.'
-        )}
-
-    @only_required_for_messages('dict-init-mutate')
-    def visit_assign(self, node: nodes.Assign) ->None:
+    name = "dict-init-mutate"
+    msgs = {
+        "C3401": (
+            "Declare all known key/values when initializing the dictionary.",
+            "dict-init-mutate",
+            "Dictionaries can be initialized with a single statement "
+            "using dictionary literal syntax.",
+        )
+    }
+
+    @only_required_for_messages("dict-init-mutate")
+    def visit_assign(self, node: nodes.Assign) -> None:
         """
         Detect dictionary mutation immediately after initialization.

         At this time, detecting nested mutation is not supported.
         """
-        pass
+        if not isinstance(node.value, nodes.Dict):
+            return
+
+        dict_name = node.targets[0]
+        if len(node.targets) != 1 or not isinstance(dict_name, nodes.AssignName):
+            return
+
+        first_sibling = node.next_sibling()
+        if (
+            not first_sibling
+            or not isinstance(first_sibling, nodes.Assign)
+            or len(first_sibling.targets) != 1
+        ):
+            return
+
+        sibling_target = first_sibling.targets[0]
+        if not isinstance(sibling_target, nodes.Subscript):
+            return
+
+        sibling_name = sibling_target.value
+        if not isinstance(sibling_name, nodes.Name):
+            return
+
+        if sibling_name.name == dict_name.name:
+            self.add_message("dict-init-mutate", node=node, confidence=HIGH)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DictInitMutateChecker(linter))
diff --git a/pylint/extensions/docparams.py b/pylint/extensions/docparams.py
index 2c5c09d6b..5d672131a 100644
--- a/pylint/extensions/docparams.py
+++ b/pylint/extensions/docparams.py
@@ -1,14 +1,23 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Pylint plugin for checking in Sphinx, Google, or Numpy style docstrings."""
+
 from __future__ import annotations
+
 import re
 from typing import TYPE_CHECKING
+
 import astroid
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers import utils as checker_utils
 from pylint.extensions import _check_docs_utils as utils
 from pylint.extensions._check_docs_utils import Docstring
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -33,80 +42,361 @@ class DocstringParameterChecker(BaseChecker):

     to the ``MAIN`` section of your ``.pylintrc``.
     """
-    name = 'parameter_documentation'
-    msgs = {'W9005': (
-        '"%s" has constructor parameters documented in class and __init__',
-        'multiple-constructor-doc',
-        'Please remove parameter declarations in the class or constructor.'
-        ), 'W9006': ('"%s" not documented as being raised',
-        'missing-raises-doc',
-        'Please document exceptions for all raised exception types.'),
-        'W9008': ('Redundant returns documentation',
-        'redundant-returns-doc',
-        'Please remove the return/rtype documentation from this method.'),
-        'W9010': ('Redundant yields documentation', 'redundant-yields-doc',
-        'Please remove the yields documentation from this method.'),
-        'W9011': ('Missing return documentation', 'missing-return-doc',
-        'Please add documentation about what this method returns.', {
-        'old_names': [('W9007', 'old-missing-returns-doc')]}), 'W9012': (
-        'Missing return type documentation', 'missing-return-type-doc',
-        'Please document the type returned by this method.'), 'W9013': (
-        'Missing yield documentation', 'missing-yield-doc',
-        'Please add documentation about what this generator yields.', {
-        'old_names': [('W9009', 'old-missing-yields-doc')]}), 'W9014': (
-        'Missing yield type documentation', 'missing-yield-type-doc',
-        'Please document the type yielded by this method.'), 'W9015': (
-        '"%s" missing in parameter documentation', 'missing-param-doc',
-        'Please add parameter declarations for all parameters.', {
-        'old_names': [('W9003', 'old-missing-param-doc')]}), 'W9016': (
-        '"%s" missing in parameter type documentation', 'missing-type-doc',
-        'Please add parameter type declarations for all parameters.', {
-        'old_names': [('W9004', 'old-missing-type-doc')]}), 'W9017': (
-        '"%s" differing in parameter documentation', 'differing-param-doc',
-        'Please check parameter names in declarations.'), 'W9018': (
-        '"%s" differing in parameter type documentation',
-        'differing-type-doc',
-        'Please check parameter names in type declarations.'), 'W9019': (
-        '"%s" useless ignored parameter documentation', 'useless-param-doc',
-        'Please remove the ignored parameter documentation.'), 'W9020': (
-        '"%s" useless ignored parameter type documentation',
-        'useless-type-doc',
-        'Please remove the ignored parameter type documentation.'), 'W9021':
-        ('Missing any documentation in "%s"', 'missing-any-param-doc',
-        'Please add parameter and/or type documentation.')}
-    options = ('accept-no-param-doc', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Whether to accept totally missing parameter documentation in the docstring of a function that has parameters.'
-        }), ('accept-no-raise-doc', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Whether to accept totally missing raises documentation in the docstring of a function that raises an exception.'
-        }), ('accept-no-return-doc', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Whether to accept totally missing return documentation in the docstring of a function that returns a statement.'
-        }), ('accept-no-yields-doc', {'default': True, 'type': 'yn',
-        'metavar': '<y or n>', 'help':
-        'Whether to accept totally missing yields documentation in the docstring of a generator.'
-        }), ('default-docstring-type', {'type': 'choice', 'default':
-        'default', 'metavar': '<docstring type>', 'choices': list(utils.
-        DOCSTRING_TYPES), 'help':
-        'If the docstring type cannot be guessed the specified docstring type will be used.'
-        })
-    constructor_names = {'__init__', '__new__'}
-    not_needed_param_in_docstring = {'self', 'cls'}
-
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+
+    name = "parameter_documentation"
+    msgs = {
+        "W9005": (
+            '"%s" has constructor parameters documented in class and __init__',
+            "multiple-constructor-doc",
+            "Please remove parameter declarations in the class or constructor.",
+        ),
+        "W9006": (
+            '"%s" not documented as being raised',
+            "missing-raises-doc",
+            "Please document exceptions for all raised exception types.",
+        ),
+        "W9008": (
+            "Redundant returns documentation",
+            "redundant-returns-doc",
+            "Please remove the return/rtype documentation from this method.",
+        ),
+        "W9010": (
+            "Redundant yields documentation",
+            "redundant-yields-doc",
+            "Please remove the yields documentation from this method.",
+        ),
+        "W9011": (
+            "Missing return documentation",
+            "missing-return-doc",
+            "Please add documentation about what this method returns.",
+            {"old_names": [("W9007", "old-missing-returns-doc")]},
+        ),
+        "W9012": (
+            "Missing return type documentation",
+            "missing-return-type-doc",
+            "Please document the type returned by this method.",
+            # we can't use the same old_name for two different warnings
+            # {'old_names': [('W9007', 'missing-returns-doc')]},
+        ),
+        "W9013": (
+            "Missing yield documentation",
+            "missing-yield-doc",
+            "Please add documentation about what this generator yields.",
+            {"old_names": [("W9009", "old-missing-yields-doc")]},
+        ),
+        "W9014": (
+            "Missing yield type documentation",
+            "missing-yield-type-doc",
+            "Please document the type yielded by this method.",
+            # we can't use the same old_name for two different warnings
+            # {'old_names': [('W9009', 'missing-yields-doc')]},
+        ),
+        "W9015": (
+            '"%s" missing in parameter documentation',
+            "missing-param-doc",
+            "Please add parameter declarations for all parameters.",
+            {"old_names": [("W9003", "old-missing-param-doc")]},
+        ),
+        "W9016": (
+            '"%s" missing in parameter type documentation',
+            "missing-type-doc",
+            "Please add parameter type declarations for all parameters.",
+            {"old_names": [("W9004", "old-missing-type-doc")]},
+        ),
+        "W9017": (
+            '"%s" differing in parameter documentation',
+            "differing-param-doc",
+            "Please check parameter names in declarations.",
+        ),
+        "W9018": (
+            '"%s" differing in parameter type documentation',
+            "differing-type-doc",
+            "Please check parameter names in type declarations.",
+        ),
+        "W9019": (
+            '"%s" useless ignored parameter documentation',
+            "useless-param-doc",
+            "Please remove the ignored parameter documentation.",
+        ),
+        "W9020": (
+            '"%s" useless ignored parameter type documentation',
+            "useless-type-doc",
+            "Please remove the ignored parameter type documentation.",
+        ),
+        "W9021": (
+            'Missing any documentation in "%s"',
+            "missing-any-param-doc",
+            "Please add parameter and/or type documentation.",
+        ),
+    }
+
+    options = (
+        (
+            "accept-no-param-doc",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Whether to accept totally missing parameter "
+                "documentation in the docstring of a function that has "
+                "parameters.",
+            },
+        ),
+        (
+            "accept-no-raise-doc",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Whether to accept totally missing raises "
+                "documentation in the docstring of a function that "
+                "raises an exception.",
+            },
+        ),
+        (
+            "accept-no-return-doc",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Whether to accept totally missing return "
+                "documentation in the docstring of a function that "
+                "returns a statement.",
+            },
+        ),
+        (
+            "accept-no-yields-doc",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Whether to accept totally missing yields "
+                "documentation in the docstring of a generator.",
+            },
+        ),
+        (
+            "default-docstring-type",
+            {
+                "type": "choice",
+                "default": "default",
+                "metavar": "<docstring type>",
+                "choices": list(utils.DOCSTRING_TYPES),
+                "help": "If the docstring type cannot be guessed "
+                "the specified docstring type will be used.",
+            },
+        ),
+    )
+
+    constructor_names = {"__init__", "__new__"}
+    not_needed_param_in_docstring = {"self", "cls"}
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Called for function and method definitions (def).

         :param node: Node for a function or method definition in the AST
         :type node: :class:`astroid.scoped_nodes.Function`
         """
-        pass
+        if checker_utils.is_overload_stub(node):
+            return
+
+        node_doc = utils.docstringify(
+            node.doc_node, self.linter.config.default_docstring_type
+        )
+
+        # skip functions that match the 'no-docstring-rgx' config option
+        no_docstring_rgx = self.linter.config.no_docstring_rgx
+        if no_docstring_rgx and re.match(no_docstring_rgx, node.name):
+            return
+
+        # skip functions smaller than 'docstring-min-length'
+        lines = checker_utils.get_node_last_lineno(node) - node.lineno
+        max_lines = self.linter.config.docstring_min_length
+        if max_lines > -1 and lines < max_lines:
+            return
+
+        self.check_functiondef_params(node, node_doc)
+        self.check_functiondef_returns(node, node_doc)
+        self.check_functiondef_yields(node, node_doc)
+
     visit_asyncfunctiondef = visit_functiondef
+
+    def check_functiondef_params(
+        self, node: nodes.FunctionDef, node_doc: Docstring
+    ) -> None:
+        node_allow_no_param = None
+        if node.name in self.constructor_names:
+            class_node = checker_utils.node_frame_class(node)
+            if class_node is not None:
+                class_doc = utils.docstringify(
+                    class_node.doc_node, self.linter.config.default_docstring_type
+                )
+                self.check_single_constructor_params(class_doc, node_doc, class_node)
+
+                # __init__ or class docstrings can have no parameters documented
+                # as long as the other documents them.
+                node_allow_no_param = (
+                    class_doc.has_params()
+                    or class_doc.params_documented_elsewhere()
+                    or None
+                )
+                class_allow_no_param = (
+                    node_doc.has_params()
+                    or node_doc.params_documented_elsewhere()
+                    or None
+                )
+
+                self.check_arguments_in_docstring(
+                    class_doc, node.args, class_node, class_allow_no_param
+                )
+
+        self.check_arguments_in_docstring(
+            node_doc, node.args, node, node_allow_no_param
+        )
+
+    def check_functiondef_returns(
+        self, node: nodes.FunctionDef, node_doc: Docstring
+    ) -> None:
+        if (not node_doc.supports_yields and node.is_generator()) or node.is_abstract():
+            return
+
+        return_nodes = node.nodes_of_class(astroid.Return)
+        if (node_doc.has_returns() or node_doc.has_rtype()) and not any(
+            utils.returns_something(ret_node) for ret_node in return_nodes
+        ):
+            self.add_message("redundant-returns-doc", node=node, confidence=HIGH)
+
+    def check_functiondef_yields(
+        self, node: nodes.FunctionDef, node_doc: Docstring
+    ) -> None:
+        if not node_doc.supports_yields or node.is_abstract():
+            return
+
+        if (
+            node_doc.has_yields() or node_doc.has_yields_type()
+        ) and not node.is_generator():
+            self.add_message("redundant-yields-doc", node=node)
+
+    def visit_raise(self, node: nodes.Raise) -> None:
+        func_node = node.frame()
+        if not isinstance(func_node, astroid.FunctionDef):
+            return
+
+        # skip functions that match the 'no-docstring-rgx' config option
+        no_docstring_rgx = self.linter.config.no_docstring_rgx
+        if no_docstring_rgx and re.match(no_docstring_rgx, func_node.name):
+            return
+
+        expected_excs = utils.possible_exc_types(node)
+
+        if not expected_excs:
+            return
+
+        if not func_node.doc_node:
+            # If this is a property setter,
+            # the property should have the docstring instead.
+            property_ = utils.get_setters_property(func_node)
+            if property_:
+                func_node = property_
+
+        doc = utils.docstringify(
+            func_node.doc_node, self.linter.config.default_docstring_type
+        )
+
+        if self.linter.config.accept_no_raise_doc and not doc.exceptions():
+            return
+
+        if not doc.matching_sections():
+            if doc.doc:
+                missing = {exc.name for exc in expected_excs}
+                self._add_raise_message(missing, func_node)
+            return
+
+        found_excs_full_names = doc.exceptions()
+
+        # Extract just the class name, e.g. "error" from "re.error"
+        found_excs_class_names = {exc.split(".")[-1] for exc in found_excs_full_names}
+
+        missing_excs = set()
+        for expected in expected_excs:
+            for found_exc in found_excs_class_names:
+                if found_exc == expected.name:
+                    break
+                if any(found_exc == ancestor.name for ancestor in expected.ancestors()):
+                    break
+            else:
+                missing_excs.add(expected.name)
+
+        self._add_raise_message(missing_excs, func_node)
+
+    def visit_return(self, node: nodes.Return) -> None:
+        if not utils.returns_something(node):
+            return
+
+        if self.linter.config.accept_no_return_doc:
+            return
+
+        func_node: astroid.FunctionDef = node.frame()
+
+        # skip functions that match the 'no-docstring-rgx' config option
+        no_docstring_rgx = self.linter.config.no_docstring_rgx
+        if no_docstring_rgx and re.match(no_docstring_rgx, func_node.name):
+            return
+
+        doc = utils.docstringify(
+            func_node.doc_node, self.linter.config.default_docstring_type
+        )
+
+        is_property = checker_utils.decorated_with_property(func_node)
+
+        if not (doc.has_returns() or (doc.has_property_returns() and is_property)):
+            self.add_message("missing-return-doc", node=func_node, confidence=HIGH)
+
+        if func_node.returns or func_node.type_comment_returns:
+            return
+
+        if not (doc.has_rtype() or (doc.has_property_type() and is_property)):
+            self.add_message("missing-return-type-doc", node=func_node, confidence=HIGH)
+
+    def visit_yield(self, node: nodes.Yield | nodes.YieldFrom) -> None:
+        if self.linter.config.accept_no_yields_doc:
+            return
+
+        func_node: astroid.FunctionDef = node.frame()
+
+        # skip functions that match the 'no-docstring-rgx' config option
+        no_docstring_rgx = self.linter.config.no_docstring_rgx
+        if no_docstring_rgx and re.match(no_docstring_rgx, func_node.name):
+            return
+
+        doc = utils.docstringify(
+            func_node.doc_node, self.linter.config.default_docstring_type
+        )
+
+        if doc.supports_yields:
+            doc_has_yields = doc.has_yields()
+            doc_has_yields_type = doc.has_yields_type()
+        else:
+            doc_has_yields = doc.has_returns()
+            doc_has_yields_type = doc.has_rtype()
+
+        if not doc_has_yields:
+            self.add_message("missing-yield-doc", node=func_node, confidence=HIGH)
+
+        if not (
+            doc_has_yields_type or func_node.returns or func_node.type_comment_returns
+        ):
+            self.add_message("missing-yield-type-doc", node=func_node, confidence=HIGH)
+
     visit_yieldfrom = visit_yield

-    def _compare_missing_args(self, found_argument_names: set[str],
-        message_id: str, not_needed_names: set[str],
-        expected_argument_names: set[str], warning_node: nodes.NodeNG) ->None:
+    def _compare_missing_args(
+        self,
+        found_argument_names: set[str],
+        message_id: str,
+        not_needed_names: set[str],
+        expected_argument_names: set[str],
+        warning_node: nodes.NodeNG,
+    ) -> None:
         """Compare the found argument names with the expected ones and
         generate a message if there are arguments missing.

@@ -120,11 +410,33 @@ class DocstringParameterChecker(BaseChecker):

         :param warning_node: The node to be analyzed
         """
-        pass
+        potential_missing_argument_names = (
+            expected_argument_names - found_argument_names
+        ) - not_needed_names
+
+        # Handle variadic and keyword args without asterisks
+        missing_argument_names = set()
+        for name in potential_missing_argument_names:
+            if name.replace("*", "") in found_argument_names:
+                continue
+            missing_argument_names.add(name)

-    def _compare_different_args(self, found_argument_names: set[str],
-        message_id: str, not_needed_names: set[str],
-        expected_argument_names: set[str], warning_node: nodes.NodeNG) ->None:
+        if missing_argument_names:
+            self.add_message(
+                message_id,
+                args=(", ".join(sorted(missing_argument_names)),),
+                node=warning_node,
+                confidence=HIGH,
+            )
+
+    def _compare_different_args(
+        self,
+        found_argument_names: set[str],
+        message_id: str,
+        not_needed_names: set[str],
+        expected_argument_names: set[str],
+        warning_node: nodes.NodeNG,
+    ) -> None:
         """Compare the found argument names with the expected ones and
         generate a message if there are extra arguments found.

@@ -138,11 +450,35 @@ class DocstringParameterChecker(BaseChecker):

         :param warning_node: The node to be analyzed
         """
-        pass
+        # Handle variadic and keyword args without asterisks
+        modified_expected_argument_names: set[str] = set()
+        for name in expected_argument_names:
+            if name.replace("*", "") in found_argument_names:
+                modified_expected_argument_names.add(name.replace("*", ""))
+            else:
+                modified_expected_argument_names.add(name)
+
+        differing_argument_names = (
+            (modified_expected_argument_names ^ found_argument_names)
+            - not_needed_names
+            - expected_argument_names
+        )

-    def _compare_ignored_args(self, found_argument_names: set[str],
-        message_id: str, ignored_argument_names: set[str], warning_node:
-        nodes.NodeNG) ->None:
+        if differing_argument_names:
+            self.add_message(
+                message_id,
+                args=(", ".join(sorted(differing_argument_names)),),
+                node=warning_node,
+                confidence=HIGH,
+            )
+
+    def _compare_ignored_args(  # pylint: disable=useless-param-doc
+        self,
+        found_argument_names: set[str],
+        message_id: str,
+        ignored_argument_names: set[str],
+        warning_node: nodes.NodeNG,
+    ) -> None:
         """Compare the found argument names with the ignored ones and
         generate a message if there are ignored arguments found.

@@ -151,11 +487,23 @@ class DocstringParameterChecker(BaseChecker):
         :param ignored_argument_names: Expected argument names
         :param warning_node: The node to be analyzed
         """
-        pass
+        existing_ignored_argument_names = ignored_argument_names & found_argument_names
+
+        if existing_ignored_argument_names:
+            self.add_message(
+                message_id,
+                args=(", ".join(sorted(existing_ignored_argument_names)),),
+                node=warning_node,
+                confidence=HIGH,
+            )

-    def check_arguments_in_docstring(self, doc: Docstring, arguments_node:
-        astroid.Arguments, warning_node: astroid.NodeNG,
-        accept_no_param_doc: (bool | None)=None) ->None:
+    def check_arguments_in_docstring(
+        self,
+        doc: Docstring,
+        arguments_node: astroid.Arguments,
+        warning_node: astroid.NodeNG,
+        accept_no_param_doc: bool | None = None,
+    ) -> None:
         """Check that all parameters are consistent with the parameters mentioned
         in the parameter documentation (e.g. the Sphinx tags 'param' and 'type').

@@ -186,13 +534,140 @@ class DocstringParameterChecker(BaseChecker):
             documented. If None then this value is read from the configuration.
         :type accept_no_param_doc: bool or None
         """
-        pass
+        # Tolerate missing param or type declarations if there is a link to
+        # another method carrying the same name.
+        if not doc.doc:
+            return
+
+        if accept_no_param_doc is None:
+            accept_no_param_doc = self.linter.config.accept_no_param_doc
+        tolerate_missing_params = doc.params_documented_elsewhere()
+
+        # Collect the function arguments.
+        expected_argument_names = {arg.name for arg in arguments_node.args}
+        expected_argument_names.update(
+            a.name for a in arguments_node.posonlyargs + arguments_node.kwonlyargs
+        )
+        not_needed_type_in_docstring = self.not_needed_param_in_docstring.copy()

-    def _add_raise_message(self, missing_exceptions: set[str], node: nodes.
-        FunctionDef) ->None:
+        expected_but_ignored_argument_names = set()
+        ignored_argument_names = self.linter.config.ignored_argument_names
+        if ignored_argument_names:
+            expected_but_ignored_argument_names = {
+                arg
+                for arg in expected_argument_names
+                if ignored_argument_names.match(arg)
+            }
+
+        if arguments_node.vararg is not None:
+            expected_argument_names.add(f"*{arguments_node.vararg}")
+            not_needed_type_in_docstring.add(f"*{arguments_node.vararg}")
+        if arguments_node.kwarg is not None:
+            expected_argument_names.add(f"**{arguments_node.kwarg}")
+            not_needed_type_in_docstring.add(f"**{arguments_node.kwarg}")
+        params_with_doc, params_with_type = doc.match_param_docs()
+        # Tolerate no parameter documentation at all.
+        if not params_with_doc and not params_with_type and accept_no_param_doc:
+            tolerate_missing_params = True
+
+        # This is before the update of params_with_type because this must check only
+        # the type documented in a docstring, not the one using pep484
+        # See #4117 and #4593
+        self._compare_ignored_args(
+            params_with_type,
+            "useless-type-doc",
+            expected_but_ignored_argument_names,
+            warning_node,
+        )
+        params_with_type |= utils.args_with_annotation(arguments_node)
+
+        if not tolerate_missing_params:
+            missing_param_doc = (expected_argument_names - params_with_doc) - (
+                self.not_needed_param_in_docstring | expected_but_ignored_argument_names
+            )
+            missing_type_doc = (expected_argument_names - params_with_type) - (
+                not_needed_type_in_docstring | expected_but_ignored_argument_names
+            )
+            if (
+                missing_param_doc == expected_argument_names == missing_type_doc
+                and len(expected_argument_names) != 0
+            ):
+                self.add_message(
+                    "missing-any-param-doc",
+                    args=(warning_node.name,),
+                    node=warning_node,
+                    confidence=HIGH,
+                )
+            else:
+                self._compare_missing_args(
+                    params_with_doc,
+                    "missing-param-doc",
+                    self.not_needed_param_in_docstring
+                    | expected_but_ignored_argument_names,
+                    expected_argument_names,
+                    warning_node,
+                )
+                self._compare_missing_args(
+                    params_with_type,
+                    "missing-type-doc",
+                    not_needed_type_in_docstring | expected_but_ignored_argument_names,
+                    expected_argument_names,
+                    warning_node,
+                )
+
+        self._compare_different_args(
+            params_with_doc,
+            "differing-param-doc",
+            self.not_needed_param_in_docstring,
+            expected_argument_names,
+            warning_node,
+        )
+        self._compare_different_args(
+            params_with_type,
+            "differing-type-doc",
+            not_needed_type_in_docstring,
+            expected_argument_names,
+            warning_node,
+        )
+        self._compare_ignored_args(
+            params_with_doc,
+            "useless-param-doc",
+            expected_but_ignored_argument_names,
+            warning_node,
+        )
+
+    def check_single_constructor_params(
+        self, class_doc: Docstring, init_doc: Docstring, class_node: nodes.ClassDef
+    ) -> None:
+        if class_doc.has_params() and init_doc.has_params():
+            self.add_message(
+                "multiple-constructor-doc",
+                args=(class_node.name,),
+                node=class_node,
+                confidence=HIGH,
+            )
+
+    def _add_raise_message(
+        self, missing_exceptions: set[str], node: nodes.FunctionDef
+    ) -> None:
         """Adds a message on :param:`node` for the missing exception type.

         :param missing_exceptions: A list of missing exception types.
         :param node: The node show the message on.
         """
-        pass
+        if node.is_abstract():
+            try:
+                missing_exceptions.remove("NotImplementedError")
+            except KeyError:
+                pass
+        if missing_exceptions:
+            self.add_message(
+                "missing-raises-doc",
+                args=(", ".join(sorted(missing_exceptions)),),
+                node=node,
+                confidence=HIGH,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DocstringParameterChecker(linter))
diff --git a/pylint/extensions/docstyle.py b/pylint/extensions/docstyle.py
index c00ca565d..c54ab93b2 100644
--- a/pylint/extensions/docstyle.py
+++ b/pylint/extensions/docstyle.py
@@ -1,21 +1,89 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import linecache
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint import checkers
 from pylint.checkers.utils import only_required_for_messages
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class DocStringStyleChecker(checkers.BaseChecker):
     """Checks format of docstrings based on PEP 0257."""
-    name = 'docstyle'
-    msgs = {'C0198': ('Bad docstring quotes in %s, expected """, given %s',
-        'bad-docstring-quotes',
-        'Used when a docstring does not have triple double quotes.'),
-        'C0199': ('First line empty in %s docstring',
-        'docstring-first-line-empty',
-        'Used when a blank line is found at the beginning of a docstring.')}
+
+    name = "docstyle"
+
+    msgs = {
+        "C0198": (
+            'Bad docstring quotes in %s, expected """, given %s',
+            "bad-docstring-quotes",
+            "Used when a docstring does not have triple double quotes.",
+        ),
+        "C0199": (
+            "First line empty in %s docstring",
+            "docstring-first-line-empty",
+            "Used when a blank line is found at the beginning of a docstring.",
+        ),
+    }
+
+    @only_required_for_messages("docstring-first-line-empty", "bad-docstring-quotes")
+    def visit_module(self, node: nodes.Module) -> None:
+        self._check_docstring("module", node)
+
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        self._check_docstring("class", node)
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        ftype = "method" if node.is_method() else "function"
+        self._check_docstring(ftype, node)
+
     visit_asyncfunctiondef = visit_functiondef
+
+    def _check_docstring(
+        self, node_type: str, node: nodes.Module | nodes.ClassDef | nodes.FunctionDef
+    ) -> None:
+        docstring = node.doc_node.value if node.doc_node else None
+        if docstring and docstring[0] == "\n":
+            self.add_message(
+                "docstring-first-line-empty",
+                node=node,
+                args=(node_type,),
+                confidence=HIGH,
+            )
+
+        # Use "linecache", instead of node.as_string(), because the latter
+        # looses the original form of the docstrings.
+
+        if docstring:
+            lineno = node.fromlineno + 1
+            line = linecache.getline(node.root().file, lineno).lstrip()
+            if line and line.find('"""') == 0:
+                return
+            if line and "'''" in line:
+                quotes = "'''"
+            elif line and line[0] == '"':
+                quotes = '"'
+            elif line and line[0] == "'":
+                quotes = "'"
+            else:
+                quotes = ""
+            if quotes:
+                self.add_message(
+                    "bad-docstring-quotes",
+                    node=node,
+                    args=(node_type, quotes),
+                    confidence=HIGH,
+                )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DocStringStyleChecker(linter))
diff --git a/pylint/extensions/dunder.py b/pylint/extensions/dunder.py
index 40e8f7fe8..1683f8147 100644
--- a/pylint/extensions/dunder.py
+++ b/pylint/extensions/dunder.py
@@ -1,26 +1,76 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.constants import DUNDER_METHODS, DUNDER_PROPERTIES, EXTRA_DUNDER_METHODS
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class DunderChecker(BaseChecker):
     """Checks related to dunder methods."""
-    name = 'dunder'
-    msgs = {'W3201': ('Bad or misspelled dunder method name %s.',
-        'bad-dunder-name',
-        'Used when a dunder method is misspelled or defined with a name not within the predefined list of dunder names.'
-        )}
-    options = ('good-dunder-names', {'default': [], 'type': 'csv',
-        'metavar': '<comma-separated names>', 'help':
-        'Good dunder names which should always be accepted.'}),
-
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+
+    name = "dunder"
+    msgs = {
+        "W3201": (
+            "Bad or misspelled dunder method name %s.",
+            "bad-dunder-name",
+            "Used when a dunder method is misspelled or defined with a name "
+            "not within the predefined list of dunder names.",
+        ),
+    }
+    options = (
+        (
+            "good-dunder-names",
+            {
+                "default": [],
+                "type": "csv",
+                "metavar": "<comma-separated names>",
+                "help": "Good dunder names which should always be accepted.",
+            },
+        ),
+    )
+
+    def open(self) -> None:
+        self._dunder_methods = (
+            EXTRA_DUNDER_METHODS
+            + DUNDER_PROPERTIES
+            + self.linter.config.good_dunder_names
+        )
+        for since_vers, dunder_methods in DUNDER_METHODS.items():
+            if since_vers <= self.linter.config.py_version:
+                self._dunder_methods.extend(list(dunder_methods.keys()))
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Check if known dunder method is misspelled or dunder name is not one
         of the pre-defined names.
         """
-        pass
+        # ignore module-level functions
+        if not node.is_method():
+            return
+
+        # Detect something that could be a bad dunder method
+        if (
+            node.name.startswith("_")
+            and node.name.endswith("_")
+            and node.name not in self._dunder_methods
+        ):
+            self.add_message(
+                "bad-dunder-name",
+                node=node,
+                args=(node.name),
+                confidence=HIGH,
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(DunderChecker(linter))
diff --git a/pylint/extensions/empty_comment.py b/pylint/extensions/empty_comment.py
index 54f595eb6..7f54322ae 100644
--- a/pylint/extensions/empty_comment.py
+++ b/pylint/extensions/empty_comment.py
@@ -1,24 +1,63 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseRawFileChecker
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


-def is_line_commented(line: bytes) ->bool:
+def is_line_commented(line: bytes) -> bool:
     """Checks if a `# symbol that is not part of a string was found in line."""
-    pass
+    comment_idx = line.find(b"#")
+    if comment_idx == -1:
+        return False
+    if comment_part_of_string(line, comment_idx):
+        return is_line_commented(line[:comment_idx] + line[comment_idx + 1 :])
+    return True


-def comment_part_of_string(line: bytes, comment_idx: int) ->bool:
+def comment_part_of_string(line: bytes, comment_idx: int) -> bool:
     """Checks if the symbol at comment_idx is part of a string."""
-    pass
+    if (
+        line[:comment_idx].count(b"'") % 2 == 1
+        and line[comment_idx:].count(b"'") % 2 == 1
+    ) or (
+        line[:comment_idx].count(b'"') % 2 == 1
+        and line[comment_idx:].count(b'"') % 2 == 1
+    ):
+        return True
+    return False


 class CommentChecker(BaseRawFileChecker):
-    name = 'empty-comment'
-    msgs = {'R2044': ('Line with empty comment', 'empty-comment',
-        'Used when a # symbol appears on a line not followed by an actual comment'
-        )}
+    name = "empty-comment"
+    msgs = {
+        "R2044": (
+            "Line with empty comment",
+            "empty-comment",
+            (
+                "Used when a # symbol appears on a line not followed by an actual comment"
+            ),
+        )
+    }
     options = ()
+
+    def process_module(self, node: nodes.Module) -> None:
+        with node.stream() as stream:
+            for line_num, line in enumerate(stream):
+                line = line.rstrip()
+                if line.endswith(b"#"):
+                    if not is_line_commented(line[:-1]):
+                        self.add_message("empty-comment", line=line_num + 1)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(CommentChecker(linter))
diff --git a/pylint/extensions/eq_without_hash.py b/pylint/extensions/eq_without_hash.py
index 39c430d15..5f39dfa3e 100644
--- a/pylint/extensions/eq_without_hash.py
+++ b/pylint/extensions/eq_without_hash.py
@@ -1,19 +1,39 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """This is the remnant of the python3 checker.

 It was removed because the transition from python 2 to python3 is
 behind us, but some checks are still useful in python3 after all.
 See https://github.com/pylint-dev/pylint/issues/5025
 """
+
 from astroid import nodes
+
 from pylint import checkers, interfaces
 from pylint.checkers import utils
 from pylint.lint import PyLinter


 class EqWithoutHash(checkers.BaseChecker):
-    name = 'eq-without-hash'
-    msgs = {'W1641': (
-        'Implementing __eq__ without also implementing __hash__',
-        'eq-without-hash',
-        'Used when a class implements __eq__ but not __hash__. Objects get None as their default __hash__ implementation if they also implement __eq__.'
-        )}
+    name = "eq-without-hash"
+
+    msgs = {
+        "W1641": (
+            "Implementing __eq__ without also implementing __hash__",
+            "eq-without-hash",
+            "Used when a class implements __eq__ but not __hash__. Objects get "
+            "None as their default __hash__ implementation if they also implement __eq__.",
+        ),
+    }
+
+    @utils.only_required_for_messages("eq-without-hash")
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
+        locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
+        if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods:
+            self.add_message("eq-without-hash", node=node, confidence=interfaces.HIGH)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(EqWithoutHash(linter))
diff --git a/pylint/extensions/for_any_all.py b/pylint/extensions/for_any_all.py
index 8176cac2e..2369a595d 100644
--- a/pylint/extensions/for_any_all.py
+++ b/pylint/extensions/for_any_all.py
@@ -1,24 +1,78 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for use of for loops that only check for a condition."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import assigned_bool, only_required_for_messages, returns_bool
+from pylint.checkers.utils import (
+    assigned_bool,
+    only_required_for_messages,
+    returns_bool,
+)
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


 class ConsiderUsingAnyOrAllChecker(BaseChecker):
-    name = 'consider-using-any-or-all'
-    msgs = {'C0501': ('`for` loop could be `%s`',
-        'consider-using-any-or-all',
-        'A for loop that checks for a condition and return a bool can be replaced with any or all.'
-        )}
+    name = "consider-using-any-or-all"
+    msgs = {
+        "C0501": (
+            "`for` loop could be `%s`",
+            "consider-using-any-or-all",
+            "A for loop that checks for a condition and return a bool can be replaced with any or all.",
+        )
+    }
+
+    @only_required_for_messages("consider-using-any-or-all")
+    def visit_for(self, node: nodes.For) -> None:
+        if len(node.body) != 1:  # Only If node with no Else
+            return
+        if not isinstance(node.body[0], nodes.If):
+            return
+
+        if_children = list(node.body[0].get_children())
+        if any(isinstance(child, nodes.If) for child in if_children):
+            # an if node within the if-children indicates an elif clause,
+            # suggesting complex logic.
+            return
+
+        node_after_loop = node.next_sibling()
+
+        if self._assigned_reassigned_returned(node, if_children, node_after_loop):
+            final_return_bool = node_after_loop.value.name
+            suggested_string = self._build_suggested_string(node, final_return_bool)
+            self.add_message(
+                "consider-using-any-or-all",
+                node=node,
+                args=suggested_string,
+                confidence=HIGH,
+            )
+            return
+
+        if self._if_statement_returns_bool(if_children, node_after_loop):
+            final_return_bool = node_after_loop.value.value
+            suggested_string = self._build_suggested_string(node, final_return_bool)
+            self.add_message(
+                "consider-using-any-or-all",
+                node=node,
+                args=suggested_string,
+                confidence=HIGH,
+            )
+            return

     @staticmethod
-    def _if_statement_returns_bool(if_children: list[nodes.NodeNG],
-        node_after_loop: nodes.NodeNG) ->bool:
+    def _if_statement_returns_bool(
+        if_children: list[nodes.NodeNG], node_after_loop: nodes.NodeNG
+    ) -> bool:
         """Detect for-loop, if-statement, return pattern:

         Ex:
@@ -28,11 +82,19 @@ class ConsiderUsingAnyOrAllChecker(BaseChecker):
                         return True
                 return False
         """
-        pass
+        if not len(if_children) == 2:
+            # The If node has only a comparison and return
+            return False
+        if not returns_bool(if_children[1]):
+            return False
+
+        # Check for terminating boolean return right after the loop
+        return returns_bool(node_after_loop)

     @staticmethod
-    def _assigned_reassigned_returned(node: nodes.For, if_children: list[
-        nodes.NodeNG], node_after_loop: nodes.NodeNG) ->bool:
+    def _assigned_reassigned_returned(
+        node: nodes.For, if_children: list[nodes.NodeNG], node_after_loop: nodes.NodeNG
+    ) -> bool:
         """Detect boolean-assign, for-loop, re-assign, return pattern:

         Ex:
@@ -44,15 +106,57 @@ class ConsiderUsingAnyOrAllChecker(BaseChecker):
                     # no elif / else statement
                 return long_line
         """
-        pass
+        node_before_loop = node.previous_sibling()
+
+        if not assigned_bool(node_before_loop):
+            # node before loop isn't assigning to boolean
+            return False
+
+        assign_children = [x for x in if_children if isinstance(x, nodes.Assign)]
+        if not assign_children:
+            # if-nodes inside loop aren't assignments
+            return False
+
+        # We only care for the first assign node of the if-children. Otherwise it breaks the pattern.
+        first_target = assign_children[0].targets[0]
+        target_before_loop = node_before_loop.targets[0]
+
+        if not (
+            isinstance(first_target, nodes.AssignName)
+            and isinstance(target_before_loop, nodes.AssignName)
+        ):
+            return False
+
+        node_before_loop_name = node_before_loop.targets[0].name
+        return (
+            first_target.name == node_before_loop_name
+            and isinstance(node_after_loop, nodes.Return)
+            and isinstance(node_after_loop.value, nodes.Name)
+            and node_after_loop.value.name == node_before_loop_name
+        )

     @staticmethod
-    def _build_suggested_string(node: nodes.For, final_return_bool: bool
-        ) ->str:
+    def _build_suggested_string(node: nodes.For, final_return_bool: bool) -> str:
         """When a nodes.For node can be rewritten as an any/all statement, return a
         suggestion for that statement.

         'final_return_bool' is the boolean literal returned after the for loop if all
         conditions fail.
         """
-        pass
+        loop_var = node.target.as_string()
+        loop_iter = node.iter.as_string()
+        test_node = next(node.body[0].get_children())
+
+        if isinstance(test_node, nodes.UnaryOp) and test_node.op == "not":
+            # The condition is negated. Advance the node to the operand and modify the suggestion
+            test_node = test_node.operand
+            suggested_function = "all" if final_return_bool else "not all"
+        else:
+            suggested_function = "not any" if final_return_bool else "any"
+
+        test = test_node.as_string()
+        return f"{suggested_function}({test} for {loop_var} in {loop_iter})"
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(ConsiderUsingAnyOrAllChecker(linter))
diff --git a/pylint/extensions/magic_value.py b/pylint/extensions/magic_value.py
index 285fb59f4..fd18476ae 100644
--- a/pylint/extensions/magic_value.py
+++ b/pylint/extensions/magic_value.py
@@ -1,35 +1,119 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Checks for magic values instead of literals."""
+
 from __future__ import annotations
+
 from re import match as regex_match
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker, utils
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class MagicValueChecker(BaseChecker):
     """Checks for constants in comparisons."""
-    name = 'magic-value'
-    msgs = {'R2004': (
-        "Consider using a named constant or an enum instead of '%s'.",
-        'magic-value-comparison',
-        'Using named constants instead of magic values helps improve readability and maintainability of your code, try to avoid them in comparisons.'
-        )}
-    options = ('valid-magic-values', {'default': (0, -1, 1, '', '__main__'),
-        'type': 'csv', 'metavar': '<argument names>', 'help':
-        "List of valid magic values that `magic-value-compare` will not detect. Supports integers, floats, negative numbers, for empty string enter ``''``, for backslash values just use one backslash e.g \\n."
-        }),
-
-    def __init__(self, linter: PyLinter) ->None:
+
+    name = "magic-value"
+    msgs = {
+        "R2004": (
+            "Consider using a named constant or an enum instead of '%s'.",
+            "magic-value-comparison",
+            "Using named constants instead of magic values helps improve readability and maintainability of your"
+            " code, try to avoid them in comparisons.",
+        )
+    }
+
+    options = (
+        (
+            "valid-magic-values",
+            {
+                "default": (0, -1, 1, "", "__main__"),
+                "type": "csv",
+                "metavar": "<argument names>",
+                "help": "List of valid magic values that `magic-value-compare` will not detect. "
+                "Supports integers, floats, negative numbers, for empty string enter ``''``,"
+                " for backslash values just use one backslash e.g \\n.",
+            },
+        ),
+    )
+
+    def __init__(self, linter: PyLinter) -> None:
         """Initialize checker instance."""
         super().__init__(linter=linter)
         self.valid_magic_vals: tuple[float | str, ...] = ()

-    def _check_constants_comparison(self, node: nodes.Compare) ->None:
+    def open(self) -> None:
+        # Extra manipulation is needed in case of using external configuration like an rcfile
+        if self._magic_vals_ext_configured():
+            self.valid_magic_vals = tuple(
+                self._parse_rcfile_magic_numbers(value)
+                for value in self.linter.config.valid_magic_values
+            )
+        else:
+            self.valid_magic_vals = self.linter.config.valid_magic_values
+
+    def _magic_vals_ext_configured(self) -> bool:
+        return not isinstance(self.linter.config.valid_magic_values, tuple)
+
+    def _check_constants_comparison(self, node: nodes.Compare) -> None:
         """
         Magic values in any side of the comparison should be avoided,
         Detects comparisons that `comparison-of-constants` core checker cannot detect.
         """
-        pass
+        const_operands = []
+        LEFT_OPERAND = 0
+        RIGHT_OPERAND = 1
+
+        left_operand = node.left
+        const_operands.append(isinstance(left_operand, nodes.Const))
+
+        right_operand = node.ops[0][1]
+        const_operands.append(isinstance(right_operand, nodes.Const))
+
+        if all(const_operands):
+            # `comparison-of-constants` avoided
+            return
+
+        operand_value = None
+        if const_operands[LEFT_OPERAND] and self._is_magic_value(left_operand):
+            operand_value = left_operand.as_string()
+        elif const_operands[RIGHT_OPERAND] and self._is_magic_value(right_operand):
+            operand_value = right_operand.as_string()
+        if operand_value is not None:
+            self.add_message(
+                "magic-value-comparison",
+                node=node,
+                args=(operand_value),
+                confidence=HIGH,
+            )
+
+    def _is_magic_value(self, node: nodes.Const) -> bool:
+        return (not utils.is_singleton_const(node)) and (
+            node.value not in (self.valid_magic_vals)
+        )
+
+    @staticmethod
+    def _parse_rcfile_magic_numbers(parsed_val: str) -> float | str:
+        parsed_val = parsed_val.encode().decode("unicode_escape")
+
+        if parsed_val.startswith("'") and parsed_val.endswith("'"):
+            return parsed_val[1:-1]
+
+        is_number = regex_match(r"[-+]?\d+(\.0*)?$", parsed_val)
+        return float(parsed_val) if is_number else parsed_val
+
+    @utils.only_required_for_messages("magic-comparison")
+    def visit_compare(self, node: nodes.Compare) -> None:
+        self._check_constants_comparison(node)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(MagicValueChecker(linter))
diff --git a/pylint/extensions/mccabe.py b/pylint/extensions/mccabe.py
index 789246edf..9489f24d6 100644
--- a/pylint/extensions/mccabe.py
+++ b/pylint/extensions/mccabe.py
@@ -1,71 +1,212 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Module to add McCabe checker class for pylint."""
+
 from __future__ import annotations
+
 from collections.abc import Sequence
 from typing import TYPE_CHECKING, Any, TypeVar, Union
+
 from astroid import nodes
 from mccabe import PathGraph as Mccabe_PathGraph
 from mccabe import PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor
+
 from pylint import checkers
 from pylint.checkers.utils import only_required_for_messages
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
-_StatementNodes = Union[nodes.Assert, nodes.Assign, nodes.AugAssign, nodes.
-    Delete, nodes.Raise, nodes.Yield, nodes.Import, nodes.Call, nodes.
-    Subscript, nodes.Pass, nodes.Continue, nodes.Break, nodes.Global, nodes
-    .Return, nodes.Expr, nodes.Await]
-_SubGraphNodes = Union[nodes.If, nodes.Try, nodes.For, nodes.While]
-_AppendableNodeT = TypeVar('_AppendableNodeT', bound=Union[_StatementNodes,
-    nodes.While, nodes.FunctionDef])

+_StatementNodes = Union[
+    nodes.Assert,
+    nodes.Assign,
+    nodes.AugAssign,
+    nodes.Delete,
+    nodes.Raise,
+    nodes.Yield,
+    nodes.Import,
+    nodes.Call,
+    nodes.Subscript,
+    nodes.Pass,
+    nodes.Continue,
+    nodes.Break,
+    nodes.Global,
+    nodes.Return,
+    nodes.Expr,
+    nodes.Await,
+]

-class PathGraph(Mccabe_PathGraph):
+_SubGraphNodes = Union[nodes.If, nodes.Try, nodes.For, nodes.While]
+_AppendableNodeT = TypeVar(
+    "_AppendableNodeT", bound=Union[_StatementNodes, nodes.While, nodes.FunctionDef]
+)

-    def __init__(self, node: (_SubGraphNodes | nodes.FunctionDef)):
-        super().__init__(name='', entity='', lineno=1)
-        self.root = node

+class PathGraph(Mccabe_PathGraph):  # type: ignore[misc]
+    def __init__(self, node: _SubGraphNodes | nodes.FunctionDef):
+        super().__init__(name="", entity="", lineno=1)
+        self.root = node

-class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):

-    def __init__(self) ->None:
+class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):  # type: ignore[misc]
+    def __init__(self) -> None:
         super().__init__()
         self._bottom_counter = 0
         self.graph: PathGraph | None = None
+
+    def default(self, node: nodes.NodeNG, *args: Any) -> None:
+        for child in node.get_children():
+            self.dispatch(child, *args)
+
+    def dispatch(self, node: nodes.NodeNG, *args: Any) -> Any:
+        self.node = node
+        klass = node.__class__
+        meth = self._cache.get(klass)
+        if meth is None:
+            class_name = klass.__name__
+            meth = getattr(self.visitor, "visit" + class_name, self.default)
+            self._cache[klass] = meth
+        return meth(node, *args)
+
+    def visitFunctionDef(self, node: nodes.FunctionDef) -> None:
+        if self.graph is not None:
+            # closure
+            pathnode = self._append_node(node)
+            self.tail = pathnode
+            self.dispatch_list(node.body)
+            bottom = f"{self._bottom_counter}"
+            self._bottom_counter += 1
+            self.graph.connect(self.tail, bottom)
+            self.graph.connect(node, bottom)
+            self.tail = bottom
+        else:
+            self.graph = PathGraph(node)
+            self.tail = node
+            self.dispatch_list(node.body)
+            self.graphs[f"{self.classname}{node.name}"] = self.graph
+            self.reset()
+
     visitAsyncFunctionDef = visitFunctionDef
-    (visitAssert) = (visitAssign) = (visitAugAssign) = (visitDelete) = (
-        visitRaise) = (visitYield) = (visitImport) = (visitCall) = (
-        visitSubscript) = (visitPass) = (visitContinue) = (visitBreak) = (
-        visitGlobal) = (visitReturn) = (visitExpr) = (visitAwait
-        ) = visitSimpleStatement
+
+    def visitSimpleStatement(self, node: _StatementNodes) -> None:
+        self._append_node(node)
+
+    visitAssert = visitAssign = visitAugAssign = visitDelete = visitRaise = (
+        visitYield
+    ) = visitImport = visitCall = visitSubscript = visitPass = visitContinue = (
+        visitBreak
+    ) = visitGlobal = visitReturn = visitExpr = visitAwait = visitSimpleStatement
+
+    def visitWith(self, node: nodes.With) -> None:
+        self._append_node(node)
+        self.dispatch_list(node.body)
+
     visitAsyncWith = visitWith

-    def _subgraph(self, node: _SubGraphNodes, name: str, extra_blocks:
-        Sequence[nodes.ExceptHandler]=()) ->None:
+    def _append_node(self, node: _AppendableNodeT) -> _AppendableNodeT | None:
+        if not self.tail or not self.graph:
+            return None
+        self.graph.connect(self.tail, node)
+        self.tail = node
+        return node
+
+    def _subgraph(
+        self,
+        node: _SubGraphNodes,
+        name: str,
+        extra_blocks: Sequence[nodes.ExceptHandler] = (),
+    ) -> None:
         """Create the subgraphs representing any `if` and `for` statements."""
-        pass
+        if self.graph is None:
+            # global loop
+            self.graph = PathGraph(node)
+            self._subgraph_parse(node, node, extra_blocks)
+            self.graphs[f"{self.classname}{name}"] = self.graph
+            self.reset()
+        else:
+            self._append_node(node)
+            self._subgraph_parse(node, node, extra_blocks)

-    def _subgraph_parse(self, node: _SubGraphNodes, pathnode:
-        _SubGraphNodes, extra_blocks: Sequence[nodes.ExceptHandler]) ->None:
+    def _subgraph_parse(
+        self,
+        node: _SubGraphNodes,
+        pathnode: _SubGraphNodes,
+        extra_blocks: Sequence[nodes.ExceptHandler],
+    ) -> None:
         """Parse the body and any `else` block of `if` and `for` statements."""
-        pass
+        loose_ends = []
+        self.tail = node
+        self.dispatch_list(node.body)
+        loose_ends.append(self.tail)
+        for extra in extra_blocks:
+            self.tail = node
+            self.dispatch_list(extra.body)
+            loose_ends.append(self.tail)
+        if node.orelse:
+            self.tail = node
+            self.dispatch_list(node.orelse)
+            loose_ends.append(self.tail)
+        else:
+            loose_ends.append(node)
+        if node and self.graph:
+            bottom = f"{self._bottom_counter}"
+            self._bottom_counter += 1
+            for end in loose_ends:
+                self.graph.connect(end, bottom)
+            self.tail = bottom


 class McCabeMethodChecker(checkers.BaseChecker):
     """Checks McCabe complexity cyclomatic threshold in methods and functions
     to validate a too complex code.
     """
-    name = 'design'
-    msgs = {'R1260': ('%s is too complex. The McCabe rating is %d',
-        'too-complex',
-        'Used when a method or function is too complex based on McCabe Complexity Cyclomatic'
-        )}
-    options = ('max-complexity', {'default': 10, 'type': 'int', 'metavar':
-        '<int>', 'help': 'McCabe complexity cyclomatic threshold'}),
-
-    @only_required_for_messages('too-complex')
-    def visit_module(self, node: nodes.Module) ->None:
+
+    name = "design"
+
+    msgs = {
+        "R1260": (
+            "%s is too complex. The McCabe rating is %d",
+            "too-complex",
+            "Used when a method or function is too complex based on "
+            "McCabe Complexity Cyclomatic",
+        )
+    }
+    options = (
+        (
+            "max-complexity",
+            {
+                "default": 10,
+                "type": "int",
+                "metavar": "<int>",
+                "help": "McCabe complexity cyclomatic threshold",
+            },
+        ),
+    )
+
+    @only_required_for_messages("too-complex")
+    def visit_module(self, node: nodes.Module) -> None:
         """Visit an astroid.Module node to check too complex rating and
         add message if is greater than max_complexity stored from options.
         """
-        pass
+        visitor = PathGraphingAstVisitor()
+        for child in node.body:
+            visitor.preorder(child, visitor)
+        for graph in visitor.graphs.values():
+            complexity = graph.complexity()
+            node = graph.root
+            if hasattr(node, "name"):
+                node_name = f"'{node.name}'"
+            else:
+                node_name = f"This '{node.__class__.__name__.lower()}'"
+            if complexity <= self.linter.config.max_complexity:
+                continue
+            self.add_message(
+                "too-complex", node=node, confidence=HIGH, args=(node_name, complexity)
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(McCabeMethodChecker(linter))
diff --git a/pylint/extensions/no_self_use.py b/pylint/extensions/no_self_use.py
index 2f2372c39..28a6620a8 100644
--- a/pylint/extensions/no_self_use.py
+++ b/pylint/extensions/no_self_use.py
@@ -1,40 +1,111 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import PYMETHODS, decorated_with_property, is_overload_stub, is_protocol_class, overrides_a_method
+from pylint.checkers.utils import (
+    PYMETHODS,
+    decorated_with_property,
+    is_overload_stub,
+    is_protocol_class,
+    overrides_a_method,
+)
 from pylint.interfaces import INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


 class NoSelfUseChecker(BaseChecker):
-    name = 'no_self_use'
-    msgs = {'R6301': ('Method could be a function', 'no-self-use',
-        "Used when a method doesn't use its bound instance, and so could be written as a function."
-        , {'old_names': [('R0201', 'old-no-self-use')]})}
+    name = "no_self_use"
+    msgs = {
+        "R6301": (
+            "Method could be a function",
+            "no-self-use",
+            "Used when a method doesn't use its bound instance, and so could "
+            "be written as a function.",
+            {"old_names": [("R0201", "old-no-self-use")]},
+        ),
+    }

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
         self._first_attrs: list[str | None] = []
         self._meth_could_be_func: bool | None = None

-    def visit_name(self, node: nodes.Name) ->None:
+    def visit_name(self, node: nodes.Name) -> None:
         """Check if the name handle an access to a class member
         if so, register it.
         """
-        pass
+        if self._first_attrs and (
+            node.name == self._first_attrs[-1] or not self._first_attrs[-1]
+        ):
+            self._meth_could_be_func = False
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
+        if not node.is_method():
+            return
+        self._meth_could_be_func = True
+        self._check_first_arg_for_type(node)
+
     visit_asyncfunctiondef = visit_functiondef

-    def _check_first_arg_for_type(self, node: nodes.FunctionDef) ->None:
+    def _check_first_arg_for_type(self, node: nodes.FunctionDef) -> None:
         """Check the name of first argument."""
-        pass
+        # pylint: disable=duplicate-code
+        if node.args.posonlyargs:
+            first_arg = node.args.posonlyargs[0].name
+        elif node.args.args:
+            first_arg = node.argnames()[0]
+        else:
+            first_arg = None
+        self._first_attrs.append(first_arg)
+        # static method
+        if node.type == "staticmethod":
+            self._first_attrs[-1] = None

-    def leave_functiondef(self, node: nodes.FunctionDef) ->None:
+    def leave_functiondef(self, node: nodes.FunctionDef) -> None:
         """On method node, check if this method couldn't be a function.

         ignore class, static and abstract methods, initializer,
         methods overridden from a parent class.
         """
-        pass
+        if node.is_method():
+            first = self._first_attrs.pop()
+            if first is None:
+                return
+            class_node = node.parent.frame()
+            if (
+                self._meth_could_be_func
+                and node.type == "method"
+                and node.name not in PYMETHODS
+                and not (
+                    node.is_abstract()
+                    or overrides_a_method(class_node, node.name)
+                    or decorated_with_property(node)
+                    or _has_bare_super_call(node)
+                    or is_protocol_class(class_node)
+                    or is_overload_stub(node)
+                )
+            ):
+                self.add_message("no-self-use", node=node, confidence=INFERENCE)
+
     leave_asyncfunctiondef = leave_functiondef
+
+
+def _has_bare_super_call(fundef_node: nodes.FunctionDef) -> bool:
+    for call in fundef_node.nodes_of_class(nodes.Call):
+        func = call.func
+        if isinstance(func, nodes.Name) and func.name == "super" and not call.args:
+            return True
+    return False
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(NoSelfUseChecker(linter))
diff --git a/pylint/extensions/overlapping_exceptions.py b/pylint/extensions/overlapping_exceptions.py
index 518cd8c09..8d35e4ce3 100644
--- a/pylint/extensions/overlapping_exceptions.py
+++ b/pylint/extensions/overlapping_exceptions.py
@@ -1,11 +1,20 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Looks for overlapping exceptions."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING, Any
+
 import astroid
 from astroid import nodes, util
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.checkers.exceptions import _annotated_unpack_infer
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -16,12 +25,66 @@ class OverlappingExceptionsChecker(checkers.BaseChecker):

     (i.e. overlapping).
     """
-    name = 'overlap-except'
-    msgs = {'W0714': ('Overlapping exceptions (%s)', 'overlapping-except',
-        'Used when exceptions in handler overlap or are identical')}
+
+    name = "overlap-except"
+    msgs = {
+        "W0714": (
+            "Overlapping exceptions (%s)",
+            "overlapping-except",
+            "Used when exceptions in handler overlap or are identical",
+        )
+    }
     options = ()

-    @utils.only_required_for_messages('overlapping-except')
-    def visit_try(self, node: nodes.Try) ->None:
+    @utils.only_required_for_messages("overlapping-except")
+    def visit_try(self, node: nodes.Try) -> None:
         """Check for empty except."""
-        pass
+        for handler in node.handlers:
+            if handler.type is None:
+                continue
+            if isinstance(handler.type, astroid.BoolOp):
+                continue
+            try:
+                excs = list(_annotated_unpack_infer(handler.type))
+            except astroid.InferenceError:
+                continue
+
+            handled_in_clause: list[tuple[Any, Any]] = []
+            for part, exc in excs:
+                if isinstance(exc, util.UninferableBase):
+                    continue
+                if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(exc):
+                    exc = exc._proxied
+
+                if not isinstance(exc, astroid.ClassDef):
+                    continue
+
+                exc_ancestors = [
+                    anc for anc in exc.ancestors() if isinstance(anc, astroid.ClassDef)
+                ]
+
+                for prev_part, prev_exc in handled_in_clause:
+                    prev_exc_ancestors = [
+                        anc
+                        for anc in prev_exc.ancestors()
+                        if isinstance(anc, astroid.ClassDef)
+                    ]
+                    if exc == prev_exc:
+                        self.add_message(
+                            "overlapping-except",
+                            node=handler.type,
+                            args=f"{prev_part.as_string()} and {part.as_string()} are the same",
+                        )
+                    elif prev_exc in exc_ancestors or exc in prev_exc_ancestors:
+                        ancestor = part if exc in prev_exc_ancestors else prev_part
+                        descendant = part if prev_exc in exc_ancestors else prev_part
+                        self.add_message(
+                            "overlapping-except",
+                            node=handler.type,
+                            args=f"{ancestor.as_string()} is an ancestor class of {descendant.as_string()}",
+                        )
+                handled_in_clause += [(part, exc)]
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(OverlappingExceptionsChecker(linter))
diff --git a/pylint/extensions/private_import.py b/pylint/extensions/private_import.py
index 3390b4be5..962bfe1f1 100644
--- a/pylint/extensions/private_import.py
+++ b/pylint/extensions/private_import.py
@@ -1,73 +1,264 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for imports on private external modules and names."""
+
 from __future__ import annotations
+
 from pathlib import Path
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker, utils
 from pylint.interfaces import HIGH
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


 class PrivateImportChecker(BaseChecker):
-    name = 'import-private-name'
-    msgs = {'C2701': ('Imported private %s (%s)', 'import-private-name',
-        'Used when a private module or object prefixed with _ is imported. PEP8 guidance on Naming Conventions states that public attributes with leading underscores should be considered private.'
-        )}
+    name = "import-private-name"
+    msgs = {
+        "C2701": (
+            "Imported private %s (%s)",
+            "import-private-name",
+            "Used when a private module or object prefixed with _ is imported. "
+            "PEP8 guidance on Naming Conventions states that public attributes with "
+            "leading underscores should be considered private.",
+        ),
+    }

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         BaseChecker.__init__(self, linter)
+
+        # A mapping of private names used as a type annotation to whether it is an acceptable import
         self.all_used_type_annotations: dict[str, bool] = {}
         self.populated_annotations = False

-    def _get_private_imports(self, names: list[str]) ->list[str]:
+    @utils.only_required_for_messages("import-private-name")
+    def visit_import(self, node: nodes.Import) -> None:
+        if utils.in_type_checking_block(node):
+            return
+        names = [name[0] for name in node.names]
+        private_names = self._get_private_imports(names)
+        private_names = self._get_type_annotation_names(node, private_names)
+        if private_names:
+            imported_identifier = "modules" if len(private_names) > 1 else "module"
+            private_name_string = ", ".join(private_names)
+            self.add_message(
+                "import-private-name",
+                node=node,
+                args=(imported_identifier, private_name_string),
+                confidence=HIGH,
+            )
+
+    @utils.only_required_for_messages("import-private-name")
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
+        if utils.in_type_checking_block(node):
+            return
+        # Only check imported names if the module is external
+        if self.same_root_dir(node, node.modname):
+            return
+
+        names = [n[0] for n in node.names]
+
+        # Check the imported objects first. If they are all valid type annotations,
+        # the package can be private
+        private_names = self._get_type_annotation_names(node, names)
+        if not private_names:
+            return
+
+        # There are invalid imported objects, so check the name of the package
+        private_module_imports = self._get_private_imports([node.modname])
+        private_module_imports = self._get_type_annotation_names(
+            node, private_module_imports
+        )
+        if private_module_imports:
+            self.add_message(
+                "import-private-name",
+                node=node,
+                args=("module", private_module_imports[0]),
+                confidence=HIGH,
+            )
+            return  # Do not emit messages on the objects if the package is private
+
+        private_names = self._get_private_imports(private_names)
+
+        if private_names:
+            imported_identifier = "objects" if len(private_names) > 1 else "object"
+            private_name_string = ", ".join(private_names)
+            self.add_message(
+                "import-private-name",
+                node=node,
+                args=(imported_identifier, private_name_string),
+                confidence=HIGH,
+            )
+
+    def _get_private_imports(self, names: list[str]) -> list[str]:
         """Returns the private names from input names by a simple string check."""
-        pass
+        return [name for name in names if self._name_is_private(name)]

     @staticmethod
-    def _name_is_private(name: str) ->bool:
+    def _name_is_private(name: str) -> bool:
         """Returns true if the name exists, starts with `_`, and if len(name) > 4
         it is not a dunder, i.e. it does not begin and end with two underscores.
         """
-        pass
+        return (
+            bool(name)
+            and name[0] == "_"
+            and (len(name) <= 4 or name[1] != "_" or name[-2:] != "__")
+        )

-    def _get_type_annotation_names(self, node: (nodes.Import | nodes.
-        ImportFrom), names: list[str]) ->list[str]:
+    def _get_type_annotation_names(
+        self, node: nodes.Import | nodes.ImportFrom, names: list[str]
+    ) -> list[str]:
         """Removes from names any names that are used as type annotations with no other
         illegal usages.
         """
-        pass
+        if names and not self.populated_annotations:
+            self._populate_type_annotations(node.root(), self.all_used_type_annotations)
+            self.populated_annotations = True
+
+        return [
+            n
+            for n in names
+            if n not in self.all_used_type_annotations
+            or (
+                n in self.all_used_type_annotations
+                and not self.all_used_type_annotations[n]
+            )
+        ]

-    def _populate_type_annotations(self, node: nodes.LocalsDictNodeNG,
-        all_used_type_annotations: dict[str, bool]) ->None:
+    def _populate_type_annotations(
+        self, node: nodes.LocalsDictNodeNG, all_used_type_annotations: dict[str, bool]
+    ) -> None:
         """Adds to `all_used_type_annotations` all names ever used as a type annotation
         in the node's (nested) scopes and whether they are only used as annotation.
         """
-        pass
+        for name in node.locals:
+            # If we find a private type annotation, make sure we do not mask illegal usages
+            private_name = None
+            # All the assignments using this variable that we might have to check for
+            # illegal usages later
+            name_assignments = []
+            for usage_node in node.locals[name]:
+                if isinstance(usage_node, nodes.AssignName) and isinstance(
+                    usage_node.parent, (nodes.AnnAssign, nodes.Assign)
+                ):
+                    assign_parent = usage_node.parent
+                    if isinstance(assign_parent, nodes.AnnAssign):
+                        name_assignments.append(assign_parent)
+                        private_name = self._populate_type_annotations_annotation(
+                            usage_node.parent.annotation, all_used_type_annotations
+                        )
+                    elif isinstance(assign_parent, nodes.Assign):
+                        name_assignments.append(assign_parent)

-    def _populate_type_annotations_function(self, node: nodes.FunctionDef,
-        all_used_type_annotations: dict[str, bool]) ->None:
+                if isinstance(usage_node, nodes.FunctionDef):
+                    self._populate_type_annotations_function(
+                        usage_node, all_used_type_annotations
+                    )
+                if isinstance(usage_node, nodes.LocalsDictNodeNG):
+                    self._populate_type_annotations(
+                        usage_node, all_used_type_annotations
+                    )
+            if private_name is not None:
+                # Found a new private annotation, make sure we are not accessing it elsewhere
+                all_used_type_annotations[private_name] = (
+                    self._assignments_call_private_name(name_assignments, private_name)
+                )
+
+    def _populate_type_annotations_function(
+        self, node: nodes.FunctionDef, all_used_type_annotations: dict[str, bool]
+    ) -> None:
         """Adds all names used as type annotation in the arguments and return type of
         the function node into the dict `all_used_type_annotations`.
         """
-        pass
+        if node.args and node.args.annotations:
+            for annotation in node.args.annotations:
+                self._populate_type_annotations_annotation(
+                    annotation, all_used_type_annotations
+                )
+        if node.returns:
+            self._populate_type_annotations_annotation(
+                node.returns, all_used_type_annotations
+            )

-    def _populate_type_annotations_annotation(self, node: (nodes.Attribute |
-        nodes.Subscript | nodes.Name | None), all_used_type_annotations:
-        dict[str, bool]) ->(str | None):
+    def _populate_type_annotations_annotation(
+        self,
+        node: nodes.Attribute | nodes.Subscript | nodes.Name | None,
+        all_used_type_annotations: dict[str, bool],
+    ) -> str | None:
         """Handles the possibility of an annotation either being a Name, i.e. just type,
         or a Subscript e.g. `Optional[type]` or an Attribute, e.g. `pylint.lint.linter`.
         """
-        pass
+        if isinstance(node, nodes.Name) and node.name not in all_used_type_annotations:
+            all_used_type_annotations[node.name] = True
+            return node.name  # type: ignore[no-any-return]
+        if isinstance(node, nodes.Subscript):  # e.g. Optional[List[str]]
+            # slice is the next nested type
+            self._populate_type_annotations_annotation(
+                node.slice, all_used_type_annotations
+            )
+            # value is the current type name: could be a Name or Attribute
+            return self._populate_type_annotations_annotation(
+                node.value, all_used_type_annotations
+            )
+        if isinstance(node, nodes.Attribute):
+            # An attribute is a type like `pylint.lint.pylinter`. node.expr is the next level
+            # up, could be another attribute
+            return self._populate_type_annotations_annotation(
+                node.expr, all_used_type_annotations
+            )
+        return None

     @staticmethod
-    def _assignments_call_private_name(assignments: list[nodes.AnnAssign |
-        nodes.Assign], private_name: str) ->bool:
+    def _assignments_call_private_name(
+        assignments: list[nodes.AnnAssign | nodes.Assign], private_name: str
+    ) -> bool:
         """Returns True if no assignments involve accessing `private_name`."""
-        pass
+        if all(not assignment.value for assignment in assignments):
+            # Variable annotated but unassigned is not allowed because there may be
+            # possible illegal access elsewhere
+            return False
+        for assignment in assignments:
+            current_attribute = None
+            if isinstance(assignment.value, nodes.Call):
+                current_attribute = assignment.value.func
+            elif isinstance(assignment.value, nodes.Attribute):
+                current_attribute = assignment.value
+            elif isinstance(assignment.value, nodes.Name):
+                current_attribute = assignment.value.name
+            if not current_attribute:
+                continue
+            while isinstance(current_attribute, (nodes.Attribute, nodes.Call)):
+                if isinstance(current_attribute, nodes.Call):
+                    current_attribute = current_attribute.func
+                if not isinstance(current_attribute, nodes.Name):
+                    current_attribute = current_attribute.expr
+            if (
+                isinstance(current_attribute, nodes.Name)
+                and current_attribute.name == private_name
+            ):
+                return False
+        return True

     @staticmethod
-    def same_root_dir(node: (nodes.Import | nodes.ImportFrom),
-        import_mod_name: str) ->bool:
+    def same_root_dir(
+        node: nodes.Import | nodes.ImportFrom, import_mod_name: str
+    ) -> bool:
         """Does the node's file's path contain the base name of `import_mod_name`?"""
-        pass
+        if not import_mod_name:  # from . import ...
+            return True
+        if node.level:  # from .foo import ..., from ..bar import ...
+            return True
+
+        base_import_package = import_mod_name.split(".")[0]
+
+        return base_import_package in Path(node.root().file).parent.parts
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(PrivateImportChecker(linter))
diff --git a/pylint/extensions/redefined_loop_name.py b/pylint/extensions/redefined_loop_name.py
index 4ab31da68..d03b80be3 100644
--- a/pylint/extensions/redefined_loop_name.py
+++ b/pylint/extensions/redefined_loop_name.py
@@ -1,6 +1,13 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Optional checker to warn when loop variables are overwritten in the loop's body."""
+
 from __future__ import annotations
+
 from astroid import nodes
+
 from pylint import checkers
 from pylint.checkers import utils
 from pylint.interfaces import HIGH
@@ -8,12 +15,74 @@ from pylint.lint import PyLinter


 class RedefinedLoopNameChecker(checkers.BaseChecker):
-    name = 'redefined-loop-name'
-    msgs = {'W2901': ('Redefining %r from loop (line %s)',
-        'redefined-loop-name',
-        'Used when a loop variable is overwritten in the loop body.')}
+    name = "redefined-loop-name"
+
+    msgs = {
+        "W2901": (
+            "Redefining %r from loop (line %s)",
+            "redefined-loop-name",
+            "Used when a loop variable is overwritten in the loop body.",
+        ),
+    }

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         super().__init__(linter)
-        self._loop_variables: list[tuple[nodes.For, list[str], nodes.
-            LocalsDictNodeNG]] = []
+        self._loop_variables: list[
+            tuple[nodes.For, list[str], nodes.LocalsDictNodeNG]
+        ] = []
+
+    @utils.only_required_for_messages("redefined-loop-name")
+    def visit_assignname(self, node: nodes.AssignName) -> None:
+        assign_type = node.assign_type()
+        if not isinstance(assign_type, (nodes.Assign, nodes.AugAssign)):
+            return
+        node_scope = node.scope()
+        for outer_for, outer_variables, outer_for_scope in self._loop_variables:
+            if node_scope is not outer_for_scope:
+                continue
+            if node.name in outer_variables and not utils.in_for_else_branch(
+                outer_for, node
+            ):
+                self.add_message(
+                    "redefined-loop-name",
+                    args=(node.name, outer_for.fromlineno),
+                    node=node,
+                    confidence=HIGH,
+                )
+                break
+
+    @utils.only_required_for_messages("redefined-loop-name")
+    def visit_for(self, node: nodes.For) -> None:
+        assigned_to = [a.name for a in node.target.nodes_of_class(nodes.AssignName)]
+        # Only check variables that are used
+        assigned_to = [
+            var
+            for var in assigned_to
+            if not self.linter.config.dummy_variables_rgx.match(var)
+        ]
+
+        node_scope = node.scope()
+        for variable in assigned_to:
+            for outer_for, outer_variables, outer_for_scope in self._loop_variables:
+                if node_scope is not outer_for_scope:
+                    continue
+                if variable in outer_variables and not utils.in_for_else_branch(
+                    outer_for, node
+                ):
+                    self.add_message(
+                        "redefined-loop-name",
+                        args=(variable, outer_for.fromlineno),
+                        node=node,
+                        confidence=HIGH,
+                    )
+                    break
+
+        self._loop_variables.append((node, assigned_to, node.scope()))
+
+    @utils.only_required_for_messages("redefined-loop-name")
+    def leave_for(self, node: nodes.For) -> None:  # pylint: disable=unused-argument
+        self._loop_variables.pop()
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(RedefinedLoopNameChecker(linter))
diff --git a/pylint/extensions/redefined_variable_type.py b/pylint/extensions/redefined_variable_type.py
index 12432d681..ba5af3136 100644
--- a/pylint/extensions/redefined_variable_type.py
+++ b/pylint/extensions/redefined_variable_type.py
@@ -1,8 +1,16 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import is_none, node_type, only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -20,10 +28,81 @@ class MultipleTypesChecker(BaseChecker):
       ifexpr, etc. Also, it would be great to have support for inference on
       str.split()
     """
-    name = 'multiple_types'
-    msgs = {'R0204': ('Redefinition of %s type from %s to %s',
-        'redefined-variable-type',
-        'Used when the type of a variable changes inside a method or a function.'
-        )}
+
+    name = "multiple_types"
+    msgs = {
+        "R0204": (
+            "Redefinition of %s type from %s to %s",
+            "redefined-variable-type",
+            "Used when the type of a variable changes inside a "
+            "method or a function.",
+        )
+    }
+
+    def visit_classdef(self, _: nodes.ClassDef) -> None:
+        self._assigns.append({})
+
+    @only_required_for_messages("redefined-variable-type")
+    def leave_classdef(self, _: nodes.ClassDef) -> None:
+        self._check_and_add_messages()
+
     visit_functiondef = visit_asyncfunctiondef = visit_classdef
     leave_functiondef = leave_asyncfunctiondef = leave_module = leave_classdef
+
+    def visit_module(self, _: nodes.Module) -> None:
+        self._assigns: list[dict[str, list[tuple[nodes.Assign, str]]]] = [{}]
+
+    def _check_and_add_messages(self) -> None:
+        assigns = self._assigns.pop()
+        for name, args in assigns.items():
+            if len(args) <= 1:
+                continue
+            orig_node, orig_type = args[0]
+            # Check if there is a type in the following nodes that would be
+            # different from orig_type.
+            for redef_node, redef_type in args[1:]:
+                if redef_type == orig_type:
+                    continue
+                # if a variable is defined to several types in an if node,
+                # this is not actually redefining.
+                orig_parent = orig_node.parent
+                redef_parent = redef_node.parent
+                if isinstance(orig_parent, nodes.If):
+                    if orig_parent == redef_parent:
+                        if (
+                            redef_node in orig_parent.orelse
+                            and orig_node not in orig_parent.orelse
+                        ):
+                            orig_node, orig_type = redef_node, redef_type
+                            continue
+                    elif isinstance(
+                        redef_parent, nodes.If
+                    ) and redef_parent in orig_parent.nodes_of_class(nodes.If):
+                        orig_node, orig_type = redef_node, redef_type
+                        continue
+                orig_type = orig_type.replace("builtins.", "")
+                redef_type = redef_type.replace("builtins.", "")
+                self.add_message(
+                    "redefined-variable-type",
+                    node=redef_node,
+                    args=(name, orig_type, redef_type),
+                )
+                break
+
+    def visit_assign(self, node: nodes.Assign) -> None:
+        # we don't handle multiple assignment nor slice assignment
+        target = node.targets[0]
+        if isinstance(target, (nodes.Tuple, nodes.Subscript)):
+            return
+        # ignore NoneType
+        if is_none(node):
+            return
+        _type = node_type(node.value)
+        if _type:
+            self._assigns[-1].setdefault(target.as_string(), []).append(
+                (node, _type.pytype())
+            )
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(MultipleTypesChecker(linter))
diff --git a/pylint/extensions/set_membership.py b/pylint/extensions/set_membership.py
index c38482bbd..b72f5aa18 100644
--- a/pylint/extensions/set_membership.py
+++ b/pylint/extensions/set_membership.py
@@ -1,23 +1,52 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class SetMembershipChecker(BaseChecker):
-    name = 'set_membership'
-    msgs = {'R6201': ('Consider using set for membership test',
-        'use-set-for-membership',
-        'Membership tests are more efficient when performed on a lookup optimized datatype like ``sets``.'
-        )}
+    name = "set_membership"
+    msgs = {
+        "R6201": (
+            "Consider using set for membership test",
+            "use-set-for-membership",
+            "Membership tests are more efficient when performed on "
+            "a lookup optimized datatype like ``sets``.",
+        ),
+    }

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         """Initialize checker instance."""
         super().__init__(linter=linter)

-    def _check_in_comparison(self, comparator: nodes.NodeNG) ->None:
+    @only_required_for_messages("use-set-for-membership")
+    def visit_compare(self, node: nodes.Compare) -> None:
+        for op, comparator in node.ops:
+            if op == "in":
+                self._check_in_comparison(comparator)
+
+    def _check_in_comparison(self, comparator: nodes.NodeNG) -> None:
         """Checks for membership comparisons with in-place container objects."""
-        pass
+        if not isinstance(comparator, nodes.BaseContainer) or isinstance(
+            comparator, nodes.Set
+        ):
+            return
+
+        # Heuristic - We need to be sure all items in set are hashable
+        if all(isinstance(item, nodes.Const) for item in comparator.elts):
+            self.add_message("use-set-for-membership", node=comparator)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(SetMembershipChecker(linter))
diff --git a/pylint/extensions/typing.py b/pylint/extensions/typing.py
index 6458902ba..2956465cf 100644
--- a/pylint/extensions/typing.py
+++ b/pylint/extensions/typing.py
@@ -1,11 +1,25 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING, NamedTuple
+
 import astroid.bases
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
-from pylint.checkers.utils import in_type_checking_block, is_node_in_type_annotation_context, is_postponed_evaluation_enabled, only_required_for_messages, safe_infer
+from pylint.checkers.utils import (
+    in_type_checking_block,
+    is_node_in_type_annotation_context,
+    is_postponed_evaluation_enabled,
+    only_required_for_messages,
+    safe_infer,
+)
 from pylint.constants import TYPING_NORETURN
 from pylint.interfaces import HIGH, INFERENCE
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter

@@ -15,48 +29,53 @@ class TypingAlias(NamedTuple):
     name_collision: bool


-DEPRECATED_TYPING_ALIASES: dict[str, TypingAlias] = {'typing.Tuple':
-    TypingAlias('tuple', False), 'typing.List': TypingAlias('list', False),
-    'typing.Dict': TypingAlias('dict', False), 'typing.Set': TypingAlias(
-    'set', False), 'typing.FrozenSet': TypingAlias('frozenset', False),
-    'typing.Type': TypingAlias('type', False), 'typing.Deque': TypingAlias(
-    'collections.deque', True), 'typing.DefaultDict': TypingAlias(
-    'collections.defaultdict', True), 'typing.OrderedDict': TypingAlias(
-    'collections.OrderedDict', True), 'typing.Counter': TypingAlias(
-    'collections.Counter', True), 'typing.ChainMap': TypingAlias(
-    'collections.ChainMap', True), 'typing.Awaitable': TypingAlias(
-    'collections.abc.Awaitable', True), 'typing.Coroutine': TypingAlias(
-    'collections.abc.Coroutine', True), 'typing.AsyncIterable': TypingAlias
-    ('collections.abc.AsyncIterable', True), 'typing.AsyncIterator':
-    TypingAlias('collections.abc.AsyncIterator', True),
-    'typing.AsyncGenerator': TypingAlias('collections.abc.AsyncGenerator', 
-    True), 'typing.Iterable': TypingAlias('collections.abc.Iterable', True),
-    'typing.Iterator': TypingAlias('collections.abc.Iterator', True),
-    'typing.Generator': TypingAlias('collections.abc.Generator', True),
-    'typing.Reversible': TypingAlias('collections.abc.Reversible', True),
-    'typing.Container': TypingAlias('collections.abc.Container', True),
-    'typing.Collection': TypingAlias('collections.abc.Collection', True),
-    'typing.Callable': TypingAlias('collections.abc.Callable', True),
-    'typing.AbstractSet': TypingAlias('collections.abc.Set', False),
-    'typing.MutableSet': TypingAlias('collections.abc.MutableSet', True),
-    'typing.Mapping': TypingAlias('collections.abc.Mapping', True),
-    'typing.MutableMapping': TypingAlias('collections.abc.MutableMapping', 
-    True), 'typing.Sequence': TypingAlias('collections.abc.Sequence', True),
-    'typing.MutableSequence': TypingAlias('collections.abc.MutableSequence',
-    True), 'typing.ByteString': TypingAlias('collections.abc.ByteString', 
-    True), 'typing.MappingView': TypingAlias('collections.abc.MappingView',
-    True), 'typing.KeysView': TypingAlias('collections.abc.KeysView', True),
-    'typing.ItemsView': TypingAlias('collections.abc.ItemsView', True),
-    'typing.ValuesView': TypingAlias('collections.abc.ValuesView', True),
-    'typing.ContextManager': TypingAlias(
-    'contextlib.AbstractContextManager', False),
-    'typing.AsyncContextManager': TypingAlias(
-    'contextlib.AbstractAsyncContextManager', False), 'typing.Pattern':
-    TypingAlias('re.Pattern', True), 'typing.Match': TypingAlias('re.Match',
-    True), 'typing.Hashable': TypingAlias('collections.abc.Hashable', True),
-    'typing.Sized': TypingAlias('collections.abc.Sized', True)}
-ALIAS_NAMES = frozenset(key.split('.')[1] for key in DEPRECATED_TYPING_ALIASES)
-UNION_NAMES = 'Optional', 'Union'
+DEPRECATED_TYPING_ALIASES: dict[str, TypingAlias] = {
+    "typing.Tuple": TypingAlias("tuple", False),
+    "typing.List": TypingAlias("list", False),
+    "typing.Dict": TypingAlias("dict", False),
+    "typing.Set": TypingAlias("set", False),
+    "typing.FrozenSet": TypingAlias("frozenset", False),
+    "typing.Type": TypingAlias("type", False),
+    "typing.Deque": TypingAlias("collections.deque", True),
+    "typing.DefaultDict": TypingAlias("collections.defaultdict", True),
+    "typing.OrderedDict": TypingAlias("collections.OrderedDict", True),
+    "typing.Counter": TypingAlias("collections.Counter", True),
+    "typing.ChainMap": TypingAlias("collections.ChainMap", True),
+    "typing.Awaitable": TypingAlias("collections.abc.Awaitable", True),
+    "typing.Coroutine": TypingAlias("collections.abc.Coroutine", True),
+    "typing.AsyncIterable": TypingAlias("collections.abc.AsyncIterable", True),
+    "typing.AsyncIterator": TypingAlias("collections.abc.AsyncIterator", True),
+    "typing.AsyncGenerator": TypingAlias("collections.abc.AsyncGenerator", True),
+    "typing.Iterable": TypingAlias("collections.abc.Iterable", True),
+    "typing.Iterator": TypingAlias("collections.abc.Iterator", True),
+    "typing.Generator": TypingAlias("collections.abc.Generator", True),
+    "typing.Reversible": TypingAlias("collections.abc.Reversible", True),
+    "typing.Container": TypingAlias("collections.abc.Container", True),
+    "typing.Collection": TypingAlias("collections.abc.Collection", True),
+    "typing.Callable": TypingAlias("collections.abc.Callable", True),
+    "typing.AbstractSet": TypingAlias("collections.abc.Set", False),
+    "typing.MutableSet": TypingAlias("collections.abc.MutableSet", True),
+    "typing.Mapping": TypingAlias("collections.abc.Mapping", True),
+    "typing.MutableMapping": TypingAlias("collections.abc.MutableMapping", True),
+    "typing.Sequence": TypingAlias("collections.abc.Sequence", True),
+    "typing.MutableSequence": TypingAlias("collections.abc.MutableSequence", True),
+    "typing.ByteString": TypingAlias("collections.abc.ByteString", True),
+    "typing.MappingView": TypingAlias("collections.abc.MappingView", True),
+    "typing.KeysView": TypingAlias("collections.abc.KeysView", True),
+    "typing.ItemsView": TypingAlias("collections.abc.ItemsView", True),
+    "typing.ValuesView": TypingAlias("collections.abc.ValuesView", True),
+    "typing.ContextManager": TypingAlias("contextlib.AbstractContextManager", False),
+    "typing.AsyncContextManager": TypingAlias(
+        "contextlib.AbstractAsyncContextManager", False
+    ),
+    "typing.Pattern": TypingAlias("re.Pattern", True),
+    "typing.Match": TypingAlias("re.Match", True),
+    "typing.Hashable": TypingAlias("collections.abc.Hashable", True),
+    "typing.Sized": TypingAlias("collections.abc.Sized", True),
+}
+
+ALIAS_NAMES = frozenset(key.split(".")[1] for key in DEPRECATED_TYPING_ALIASES)
+UNION_NAMES = ("Optional", "Union")


 class DeprecatedTypingAliasMsg(NamedTuple):
@@ -68,44 +87,81 @@ class DeprecatedTypingAliasMsg(NamedTuple):

 class TypingChecker(BaseChecker):
     """Find issue specifically related to type annotations."""
-    name = 'typing'
-    msgs = {'W6001': ("'%s' is deprecated, use '%s' instead",
-        'deprecated-typing-alias',
-        'Emitted when a deprecated typing alias is used.'), 'R6002': (
-        "'%s' will be deprecated with PY39, consider using '%s' instead%s",
-        'consider-using-alias',
-        "Only emitted if 'runtime-typing=no' and a deprecated typing alias is used in a type annotation context in Python 3.7 or 3.8."
-        ), 'R6003': (
-        "Consider using alternative Union syntax instead of '%s'%s",
-        'consider-alternative-union-syntax',
-        "Emitted when 'typing.Union' or 'typing.Optional' is used instead of the alternative Union syntax 'int | None'."
-        ), 'E6004': (
-        "'NoReturn' inside compound types is broken in 3.7.0 / 3.7.1",
-        'broken-noreturn',
-        "``typing.NoReturn`` inside compound types is broken in Python 3.7.0 and 3.7.1. If not dependent on runtime introspection, use string annotation instead. E.g. ``Callable[..., 'NoReturn']``. https://bugs.python.org/issue34921"
-        ), 'E6005': (
-        "'collections.abc.Callable' inside Optional and Union is broken in 3.9.0 / 3.9.1 (use 'typing.Callable' instead)"
-        , 'broken-collections-callable',
-        '``collections.abc.Callable`` inside Optional and Union is broken in Python 3.9.0 and 3.9.1. Use ``typing.Callable`` for these cases instead. https://bugs.python.org/issue42965'
-        ), 'R6006': (
-        'Type `%s` is used more than once in union type annotation. Remove redundant typehints.'
-        , 'redundant-typehint-argument',
-        'Duplicated type arguments will be skipped by `mypy` tool, therefore should be removed to avoid confusion.'
-        )}
-    options = ('runtime-typing', {'default': True, 'type': 'yn', 'metavar':
-        '<y or n>', 'help':
-        "Set to ``no`` if the app / library does **NOT** need to support runtime introspection of type annotations. If you use type annotations **exclusively** for type checking of an application, you're probably fine. For libraries, evaluate if some users want to access the type hints at runtime first, e.g., through ``typing.get_type_hints``. Applies to Python versions 3.7 - 3.9"
-        }),
+
+    name = "typing"
+    msgs = {
+        "W6001": (
+            "'%s' is deprecated, use '%s' instead",
+            "deprecated-typing-alias",
+            "Emitted when a deprecated typing alias is used.",
+        ),
+        "R6002": (
+            "'%s' will be deprecated with PY39, consider using '%s' instead%s",
+            "consider-using-alias",
+            "Only emitted if 'runtime-typing=no' and a deprecated "
+            "typing alias is used in a type annotation context in "
+            "Python 3.7 or 3.8.",
+        ),
+        "R6003": (
+            "Consider using alternative Union syntax instead of '%s'%s",
+            "consider-alternative-union-syntax",
+            "Emitted when 'typing.Union' or 'typing.Optional' is used "
+            "instead of the alternative Union syntax 'int | None'.",
+        ),
+        "E6004": (
+            "'NoReturn' inside compound types is broken in 3.7.0 / 3.7.1",
+            "broken-noreturn",
+            "``typing.NoReturn`` inside compound types is broken in "
+            "Python 3.7.0 and 3.7.1. If not dependent on runtime introspection, "
+            "use string annotation instead. E.g. "
+            "``Callable[..., 'NoReturn']``. https://bugs.python.org/issue34921",
+        ),
+        "E6005": (
+            "'collections.abc.Callable' inside Optional and Union is broken in "
+            "3.9.0 / 3.9.1 (use 'typing.Callable' instead)",
+            "broken-collections-callable",
+            "``collections.abc.Callable`` inside Optional and Union is broken in "
+            "Python 3.9.0 and 3.9.1. Use ``typing.Callable`` for these cases instead. "
+            "https://bugs.python.org/issue42965",
+        ),
+        "R6006": (
+            "Type `%s` is used more than once in union type annotation. Remove redundant typehints.",
+            "redundant-typehint-argument",
+            "Duplicated type arguments will be skipped by `mypy` tool, therefore should be "
+            "removed to avoid confusion.",
+        ),
+    }
+    options = (
+        (
+            "runtime-typing",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": (
+                    "Set to ``no`` if the app / library does **NOT** need to "
+                    "support runtime introspection of type annotations. "
+                    "If you use type annotations **exclusively** for type checking "
+                    "of an application, you're probably fine. For libraries, "
+                    "evaluate if some users want to access the type hints "
+                    "at runtime first, e.g., through ``typing.get_type_hints``. "
+                    "Applies to Python versions 3.7 - 3.9"
+                ),
+            },
+        ),
+    )
+
     _should_check_typing_alias: bool
     """The use of type aliases (PEP 585) requires Python 3.9
     or Python 3.7+ with postponed evaluation.
     """
+
     _should_check_alternative_union_syntax: bool
     """The use of alternative union syntax (PEP 604) requires Python 3.10
     or Python 3.7+ with postponed evaluation.
     """

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         """Initialize checker instance."""
         super().__init__(linter=linter)
         self._found_broken_callable_location: bool = False
@@ -113,12 +169,140 @@ class TypingChecker(BaseChecker):
         self._deprecated_typing_alias_msgs: list[DeprecatedTypingAliasMsg] = []
         self._consider_using_alias_msgs: list[DeprecatedTypingAliasMsg] = []

-    def _msg_postponed_eval_hint(self, node: nodes.NodeNG) ->str:
+    def open(self) -> None:
+        py_version = self.linter.config.py_version
+        self._py37_plus = py_version >= (3, 7)
+        self._py39_plus = py_version >= (3, 9)
+        self._py310_plus = py_version >= (3, 10)
+
+        self._should_check_typing_alias = self._py39_plus or (
+            self._py37_plus and self.linter.config.runtime_typing is False
+        )
+        self._should_check_alternative_union_syntax = self._py310_plus or (
+            self._py37_plus and self.linter.config.runtime_typing is False
+        )
+
+        self._should_check_noreturn = py_version < (3, 7, 2)
+        self._should_check_callable = py_version < (3, 9, 2)
+
+    def _msg_postponed_eval_hint(self, node: nodes.NodeNG) -> str:
         """Message hint if postponed evaluation isn't enabled."""
-        pass
+        if self._py310_plus or "annotations" in node.root().future_imports:
+            return ""
+        return ". Add 'from __future__ import annotations' as well"
+
+    @only_required_for_messages(
+        "deprecated-typing-alias",
+        "consider-using-alias",
+        "consider-alternative-union-syntax",
+        "broken-noreturn",
+        "broken-collections-callable",
+    )
+    def visit_name(self, node: nodes.Name) -> None:
+        if self._should_check_typing_alias and node.name in ALIAS_NAMES:
+            self._check_for_typing_alias(node)
+        if self._should_check_alternative_union_syntax and node.name in UNION_NAMES:
+            self._check_for_alternative_union_syntax(node, node.name)
+        if self._should_check_noreturn and node.name == "NoReturn":
+            self._check_broken_noreturn(node)
+        if self._should_check_callable and node.name == "Callable":
+            self._check_broken_callable(node)
+
+    @only_required_for_messages(
+        "deprecated-typing-alias",
+        "consider-using-alias",
+        "consider-alternative-union-syntax",
+        "broken-noreturn",
+        "broken-collections-callable",
+    )
+    def visit_attribute(self, node: nodes.Attribute) -> None:
+        if self._should_check_typing_alias and node.attrname in ALIAS_NAMES:
+            self._check_for_typing_alias(node)
+        if self._should_check_alternative_union_syntax and node.attrname in UNION_NAMES:
+            self._check_for_alternative_union_syntax(node, node.attrname)
+        if self._should_check_noreturn and node.attrname == "NoReturn":
+            self._check_broken_noreturn(node)
+        if self._should_check_callable and node.attrname == "Callable":
+            self._check_broken_callable(node)
+
+    @only_required_for_messages("redundant-typehint-argument")
+    def visit_annassign(self, node: nodes.AnnAssign) -> None:
+        annotation = node.annotation
+        if self._is_deprecated_union_annotation(annotation, "Optional"):
+            if self._is_optional_none_annotation(annotation):
+                self.add_message(
+                    "redundant-typehint-argument",
+                    node=annotation,
+                    args="None",
+                    confidence=HIGH,
+                )
+            return
+        if self._is_deprecated_union_annotation(annotation, "Union") and isinstance(
+            annotation.slice, nodes.Tuple
+        ):
+            types = annotation.slice.elts
+        elif self._is_binop_union_annotation(annotation):
+            types = self._parse_binops_typehints(annotation)
+        else:
+            return
+
+        self._check_union_types(types, node)
+
+    @staticmethod
+    def _is_deprecated_union_annotation(
+        annotation: nodes.NodeNG, union_name: str
+    ) -> bool:
+        return (
+            isinstance(annotation, nodes.Subscript)
+            and isinstance(annotation.value, nodes.Name)
+            and annotation.value.name == union_name
+        )

-    def _check_for_alternative_union_syntax(self, node: (nodes.Name | nodes
-        .Attribute), name: str) ->None:
+    def _is_binop_union_annotation(self, annotation: nodes.NodeNG) -> bool:
+        return self._should_check_alternative_union_syntax and isinstance(
+            annotation, nodes.BinOp
+        )
+
+    @staticmethod
+    def _is_optional_none_annotation(annotation: nodes.Subscript) -> bool:
+        return (
+            isinstance(annotation.slice, nodes.Const) and annotation.slice.value is None
+        )
+
+    def _parse_binops_typehints(
+        self, binop_node: nodes.BinOp, typehints_list: list[nodes.NodeNG] | None = None
+    ) -> list[nodes.NodeNG]:
+        typehints_list = typehints_list or []
+        if isinstance(binop_node.left, nodes.BinOp):
+            typehints_list.extend(
+                self._parse_binops_typehints(binop_node.left, typehints_list)
+            )
+        else:
+            typehints_list.append(binop_node.left)
+        typehints_list.append(binop_node.right)
+        return typehints_list
+
+    def _check_union_types(
+        self, types: list[nodes.NodeNG], annotation: nodes.NodeNG
+    ) -> None:
+        types_set = set()
+        for typehint in types:
+            typehint_str = typehint.as_string()
+            if typehint_str in types_set:
+                self.add_message(
+                    "redundant-typehint-argument",
+                    node=annotation,
+                    args=(typehint_str),
+                    confidence=HIGH,
+                )
+            else:
+                types_set.add(typehint_str)
+
+    def _check_for_alternative_union_syntax(
+        self,
+        node: nodes.Name | nodes.Attribute,
+        name: str,
+    ) -> None:
         """Check if alternative union syntax could be used.

         Requires
@@ -126,10 +310,27 @@ class TypingChecker(BaseChecker):
         - OR: Python 3.7+ with postponed evaluation in
               a type annotation context
         """
-        pass
+        inferred = safe_infer(node)
+        if not (
+            isinstance(inferred, nodes.FunctionDef)
+            and inferred.qname() in {"typing.Optional", "typing.Union"}
+            or isinstance(inferred, astroid.bases.Instance)
+            and inferred.qname() == "typing._SpecialForm"
+        ):
+            return
+        if not (self._py310_plus or is_node_in_type_annotation_context(node)):
+            return
+        self.add_message(
+            "consider-alternative-union-syntax",
+            node=node,
+            args=(name, self._msg_postponed_eval_hint(node)),
+            confidence=INFERENCE,
+        )

-    def _check_for_typing_alias(self, node: (nodes.Name | nodes.Attribute)
-        ) ->None:
+    def _check_for_typing_alias(
+        self,
+        node: nodes.Name | nodes.Attribute,
+    ) -> None:
         """Check if typing alias is deprecated or could be replaced.

         Requires
@@ -141,29 +342,163 @@ class TypingChecker(BaseChecker):
             any name collisions, only ever used in a type annotation
             context, and can safely be replaced.
         """
-        pass
+        inferred = safe_infer(node)
+        if not isinstance(inferred, nodes.ClassDef):
+            return
+        alias = DEPRECATED_TYPING_ALIASES.get(inferred.qname(), None)
+        if alias is None:
+            return
+
+        if self._py39_plus:
+            if inferred.qname() == "typing.Callable" and self._broken_callable_location(
+                node
+            ):
+                self._found_broken_callable_location = True
+            self._deprecated_typing_alias_msgs.append(
+                DeprecatedTypingAliasMsg(
+                    node,
+                    inferred.qname(),
+                    alias.name,
+                )
+            )
+            return

-    @only_required_for_messages('consider-using-alias',
-        'deprecated-typing-alias')
-    def leave_module(self, node: nodes.Module) ->None:
+        # For PY37+, check for type annotation context first
+        if not is_node_in_type_annotation_context(node) and isinstance(
+            node.parent, nodes.Subscript
+        ):
+            if alias.name_collision is True:
+                self._alias_name_collisions.add(inferred.qname())
+            return
+        self._consider_using_alias_msgs.append(
+            DeprecatedTypingAliasMsg(
+                node,
+                inferred.qname(),
+                alias.name,
+                isinstance(node.parent, nodes.Subscript),
+            )
+        )
+
+    @only_required_for_messages("consider-using-alias", "deprecated-typing-alias")
+    def leave_module(self, node: nodes.Module) -> None:
         """After parsing of module is complete, add messages for
         'consider-using-alias' check.

         Make sure results are safe to recommend / collision free.
         """
-        pass
+        if self._py39_plus:
+            for msg in self._deprecated_typing_alias_msgs:
+                if (
+                    self._found_broken_callable_location
+                    and msg.qname == "typing.Callable"
+                ):
+                    continue
+                self.add_message(
+                    "deprecated-typing-alias",
+                    node=msg.node,
+                    args=(msg.qname, msg.alias),
+                    confidence=INFERENCE,
+                )
+
+        elif self._py37_plus:
+            msg_future_import = self._msg_postponed_eval_hint(node)
+            for msg in self._consider_using_alias_msgs:
+                if msg.qname in self._alias_name_collisions:
+                    continue
+                self.add_message(
+                    "consider-using-alias",
+                    node=msg.node,
+                    args=(
+                        msg.qname,
+                        msg.alias,
+                        msg_future_import if msg.parent_subscript else "",
+                    ),
+                    confidence=INFERENCE,
+                )

-    def _check_broken_noreturn(self, node: (nodes.Name | nodes.Attribute)
-        ) ->None:
+        # Clear all module cache variables
+        self._found_broken_callable_location = False
+        self._deprecated_typing_alias_msgs.clear()
+        self._alias_name_collisions.clear()
+        self._consider_using_alias_msgs.clear()
+
+    def _check_broken_noreturn(self, node: nodes.Name | nodes.Attribute) -> None:
         """Check for 'NoReturn' inside compound types."""
-        pass
+        if not isinstance(node.parent, nodes.BaseContainer):
+            # NoReturn not part of a Union or Callable type
+            return
+
+        if (
+            in_type_checking_block(node)
+            or is_postponed_evaluation_enabled(node)
+            and is_node_in_type_annotation_context(node)
+        ):
+            return

-    def _check_broken_callable(self, node: (nodes.Name | nodes.Attribute)
-        ) ->None:
+        for inferred in node.infer():
+            # To deal with typing_extensions, don't use safe_infer
+            if (
+                isinstance(inferred, (nodes.FunctionDef, nodes.ClassDef))
+                and inferred.qname() in TYPING_NORETURN
+                # In Python 3.7 - 3.8, NoReturn is alias of '_SpecialForm'
+                or isinstance(inferred, astroid.bases.BaseInstance)
+                and isinstance(inferred._proxied, nodes.ClassDef)
+                and inferred._proxied.qname() == "typing._SpecialForm"
+            ):
+                self.add_message("broken-noreturn", node=node, confidence=INFERENCE)
+                break
+
+    def _check_broken_callable(self, node: nodes.Name | nodes.Attribute) -> None:
         """Check for 'collections.abc.Callable' inside Optional and Union."""
-        pass
+        inferred = safe_infer(node)
+        if not (
+            isinstance(inferred, nodes.ClassDef)
+            and inferred.qname() == "_collections_abc.Callable"
+            and self._broken_callable_location(node)
+        ):
+            return
+
+        self.add_message("broken-collections-callable", node=node, confidence=INFERENCE)

-    def _broken_callable_location(self, node: (nodes.Name | nodes.Attribute)
-        ) ->bool:
+    def _broken_callable_location(self, node: nodes.Name | nodes.Attribute) -> bool:
         """Check if node would be a broken location for collections.abc.Callable."""
-        pass
+        if (
+            in_type_checking_block(node)
+            or is_postponed_evaluation_enabled(node)
+            and is_node_in_type_annotation_context(node)
+        ):
+            return False
+
+        # Check first Callable arg is a list of arguments -> Callable[[int], None]
+        if not (
+            isinstance(node.parent, nodes.Subscript)
+            and isinstance(node.parent.slice, nodes.Tuple)
+            and len(node.parent.slice.elts) == 2
+            and isinstance(node.parent.slice.elts[0], nodes.List)
+        ):
+            return False
+
+        # Check nested inside Optional or Union
+        parent_subscript = node.parent.parent
+        if isinstance(parent_subscript, nodes.BaseContainer):
+            parent_subscript = parent_subscript.parent
+        if not (
+            isinstance(parent_subscript, nodes.Subscript)
+            and isinstance(parent_subscript.value, (nodes.Name, nodes.Attribute))
+        ):
+            return False
+
+        inferred_parent = safe_infer(parent_subscript.value)
+        if not (
+            isinstance(inferred_parent, nodes.FunctionDef)
+            and inferred_parent.qname() in {"typing.Optional", "typing.Union"}
+            or isinstance(inferred_parent, astroid.bases.Instance)
+            and inferred_parent.qname() == "typing._SpecialForm"
+        ):
+            return False
+
+        return True
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(TypingChecker(linter))
diff --git a/pylint/extensions/while_used.py b/pylint/extensions/while_used.py
index f121a899f..da1f9d59c 100644
--- a/pylint/extensions/while_used.py
+++ b/pylint/extensions/while_used.py
@@ -1,15 +1,37 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Check for use of while loops."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.checkers import BaseChecker
 from pylint.checkers.utils import only_required_for_messages
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter


 class WhileChecker(BaseChecker):
-    name = 'while_used'
-    msgs = {'W0149': ('Used `while` loop', 'while-used',
-        'Unbounded `while` loops can often be rewritten as bounded `for` loops. Exceptions can be made for cases such as event loops, listeners, etc.'
-        )}
+    name = "while_used"
+    msgs = {
+        "W0149": (
+            "Used `while` loop",
+            "while-used",
+            "Unbounded `while` loops can often be rewritten as bounded `for` loops. "
+            "Exceptions can be made for cases such as event loops, listeners, etc.",
+        )
+    }
+
+    @only_required_for_messages("while-used")
+    def visit_while(self, node: nodes.While) -> None:
+        self.add_message("while-used", node=node)
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_checker(WhileChecker(linter))
diff --git a/pylint/graph.py b/pylint/graph.py
index a76f38ae0..4112fadfa 100644
--- a/pylint/graph.py
+++ b/pylint/graph.py
@@ -1,8 +1,14 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Graph manipulation utilities.

 (dot generation adapted from pypy/translator/tool/make_dot.py)
 """
+
 from __future__ import annotations
+
 import codecs
 import os
 import shutil
@@ -12,44 +18,63 @@ from collections.abc import Sequence
 from typing import Any


-def target_info_from_filename(filename: str) ->tuple[str, str, str]:
+def target_info_from_filename(filename: str) -> tuple[str, str, str]:
     """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
-    pass
+    basename = os.path.basename(filename)
+    storedir = os.path.dirname(os.path.abspath(filename))
+    target = os.path.splitext(filename)[-1][1:]
+    return storedir, basename, target


 class DotBackend:
     """Dot File back-end."""

-    def __init__(self, graphname: str, rankdir: (str | None)=None, size:
-        Any=None, ratio: Any=None, charset: str='utf-8', renderer: str=
-        'dot', additional_param: (dict[str, Any] | None)=None) ->None:
+    def __init__(
+        self,
+        graphname: str,
+        rankdir: str | None = None,
+        size: Any = None,
+        ratio: Any = None,
+        charset: str = "utf-8",
+        renderer: str = "dot",
+        additional_param: dict[str, Any] | None = None,
+    ) -> None:
         if additional_param is None:
             additional_param = {}
         self.graphname = graphname
         self.renderer = renderer
         self.lines: list[str] = []
         self._source: str | None = None
-        self.emit(f'digraph {normalize_node_id(graphname)} {{')
+        self.emit(f"digraph {normalize_node_id(graphname)} {{")
         if rankdir:
-            self.emit(f'rankdir={rankdir}')
+            self.emit(f"rankdir={rankdir}")
         if ratio:
-            self.emit(f'ratio={ratio}')
+            self.emit(f"ratio={ratio}")
         if size:
             self.emit(f'size="{size}"')
         if charset:
-            assert charset.lower() in {'utf-8', 'iso-8859-1', 'latin1'
-                }, f'unsupported charset {charset}'
+            assert charset.lower() in {
+                "utf-8",
+                "iso-8859-1",
+                "latin1",
+            }, f"unsupported charset {charset}"
             self.emit(f'charset="{charset}"')
         for param in additional_param.items():
-            self.emit('='.join(param))
+            self.emit("=".join(param))

-    def get_source(self) ->str:
+    def get_source(self) -> str:
         """Returns self._source."""
-        pass
+        if self._source is None:
+            self.emit("}\n")
+            self._source = "\n".join(self.lines)
+            del self.lines
+        return self._source
+
     source = property(get_source)

-    def generate(self, outputfile: (str | None)=None, mapfile: (str | None)
-        =None) ->str:
+    def generate(
+        self, outputfile: str | None = None, mapfile: str | None = None
+    ) -> str:
         """Generates a graph file.

         :param str outputfile: filename and path [defaults to graphname.png]
@@ -59,41 +84,129 @@ class DotBackend:
         :return: a path to the generated file
         :raises RuntimeError: if the executable for rendering was not found
         """
-        pass
-
-    def emit(self, line: str) ->None:
+        # pylint: disable=duplicate-code
+        graphviz_extensions = ("dot", "gv")
+        name = self.graphname
+        if outputfile is None:
+            target = "png"
+            pdot, dot_sourcepath = tempfile.mkstemp(".gv", name)
+            ppng, outputfile = tempfile.mkstemp(".png", name)
+            os.close(pdot)
+            os.close(ppng)
+        else:
+            _, _, target = target_info_from_filename(outputfile)
+            if not target:
+                target = "png"
+                outputfile = outputfile + "." + target
+            if target not in graphviz_extensions:
+                pdot, dot_sourcepath = tempfile.mkstemp(".gv", name)
+                os.close(pdot)
+            else:
+                dot_sourcepath = outputfile
+        with codecs.open(dot_sourcepath, "w", encoding="utf8") as file:
+            file.write(self.source)
+        if target not in graphviz_extensions:
+            if shutil.which(self.renderer) is None:
+                raise RuntimeError(
+                    f"Cannot generate `{outputfile}` because '{self.renderer}' "
+                    "executable not found. Install graphviz, or specify a `.gv` "
+                    "outputfile to produce the DOT source code."
+                )
+            if mapfile:
+                subprocess.run(
+                    [
+                        self.renderer,
+                        "-Tcmapx",
+                        "-o",
+                        mapfile,
+                        "-T",
+                        target,
+                        dot_sourcepath,
+                        "-o",
+                        outputfile,
+                    ],
+                    check=True,
+                )
+            else:
+                subprocess.run(
+                    [self.renderer, "-T", target, dot_sourcepath, "-o", outputfile],
+                    check=True,
+                )
+            os.unlink(dot_sourcepath)
+        return outputfile
+
+    def emit(self, line: str) -> None:
         """Adds <line> to final output."""
-        pass
+        self.lines.append(line)

-    def emit_edge(self, name1: str, name2: str, **props: Any) ->None:
+    def emit_edge(self, name1: str, name2: str, **props: Any) -> None:
         """Emit an edge from <name1> to <name2>.

         For edge properties: see https://www.graphviz.org/doc/info/attrs.html
         """
-        pass
+        attrs = [f'{prop}="{value}"' for prop, value in props.items()]
+        n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
+        self.emit(f"{n_from} -> {n_to} [{', '.join(sorted(attrs))}];")

-    def emit_node(self, name: str, **props: Any) ->None:
+    def emit_node(self, name: str, **props: Any) -> None:
         """Emit a node with given properties.

         For node properties: see https://www.graphviz.org/doc/info/attrs.html
         """
-        pass
+        attrs = [f'{prop}="{value}"' for prop, value in props.items()]
+        self.emit(f"{normalize_node_id(name)} [{', '.join(sorted(attrs))}];")


-def normalize_node_id(nid: str) ->str:
+def normalize_node_id(nid: str) -> str:
     """Returns a suitable DOT node id for `nid`."""
-    pass
+    return f'"{nid}"'


-def get_cycles(graph_dict: dict[str, set[str]], vertices: (list[str] | None
-    )=None) ->Sequence[list[str]]:
+def get_cycles(
+    graph_dict: dict[str, set[str]], vertices: list[str] | None = None
+) -> Sequence[list[str]]:
     """Return a list of detected cycles based on an ordered graph (i.e. keys are
     vertices and values are lists of destination vertices representing edges).
     """
-    pass
-
-
-def _get_cycles(graph_dict: dict[str, set[str]], path: list[str], visited:
-    set[str], result: list[list[str]], vertice: str) ->None:
+    if not graph_dict:
+        return ()
+    result: list[list[str]] = []
+    if vertices is None:
+        vertices = list(graph_dict.keys())
+    for vertice in vertices:
+        _get_cycles(graph_dict, [], set(), result, vertice)
+    return result
+
+
+def _get_cycles(
+    graph_dict: dict[str, set[str]],
+    path: list[str],
+    visited: set[str],
+    result: list[list[str]],
+    vertice: str,
+) -> None:
     """Recursive function doing the real work for get_cycles."""
-    pass
+    if vertice in path:
+        cycle = [vertice]
+        for node in path[::-1]:
+            if node == vertice:
+                break
+            cycle.insert(0, node)
+        # make a canonical representation
+        start_from = min(cycle)
+        index = cycle.index(start_from)
+        cycle = cycle[index:] + cycle[0:index]
+        # append it to result if not already in
+        if cycle not in result:
+            result.append(cycle)
+        return
+    path.append(vertice)
+    try:
+        for node in graph_dict[vertice]:
+            # don't check already visited nodes again
+            if node not in visited:
+                _get_cycles(graph_dict, path, visited, result, node)
+                visited.add(node)
+    except KeyError:
+        pass
+    path.pop()
diff --git a/pylint/interfaces.py b/pylint/interfaces.py
index 73c93e32d..c47e297b4 100644
--- a/pylint/interfaces.py
+++ b/pylint/interfaces.py
@@ -1,7 +1,20 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import NamedTuple
-__all__ = ('HIGH', 'CONTROL_FLOW', 'INFERENCE', 'INFERENCE_FAILURE',
-    'UNDEFINED', 'CONFIDENCE_LEVELS', 'CONFIDENCE_LEVEL_NAMES')
+
+__all__ = (
+    "HIGH",
+    "CONTROL_FLOW",
+    "INFERENCE",
+    "INFERENCE_FAILURE",
+    "UNDEFINED",
+    "CONFIDENCE_LEVELS",
+    "CONFIDENCE_LEVEL_NAMES",
+)


 class Confidence(NamedTuple):
@@ -9,15 +22,17 @@ class Confidence(NamedTuple):
     description: str


-HIGH = Confidence('HIGH', 'Warning that is not based on inference result.')
-CONTROL_FLOW = Confidence('CONTROL_FLOW',
-    'Warning based on assumptions about control flow.')
-INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
-INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
-    'Warning based on inference with failures.')
-UNDEFINED = Confidence('UNDEFINED',
-    'Warning without any associated confidence level.')
-CONFIDENCE_LEVELS = [HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
-    UNDEFINED]
+# Warning Certainties
+HIGH = Confidence("HIGH", "Warning that is not based on inference result.")
+CONTROL_FLOW = Confidence(
+    "CONTROL_FLOW", "Warning based on assumptions about control flow."
+)
+INFERENCE = Confidence("INFERENCE", "Warning based on inference result.")
+INFERENCE_FAILURE = Confidence(
+    "INFERENCE_FAILURE", "Warning based on inference with failures."
+)
+UNDEFINED = Confidence("UNDEFINED", "Warning without any associated confidence level.")
+
+CONFIDENCE_LEVELS = [HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
 CONFIDENCE_LEVEL_NAMES = [i.name for i in CONFIDENCE_LEVELS]
 CONFIDENCE_MAP = {i.name: i for i in CONFIDENCE_LEVELS}
diff --git a/pylint/lint/base_options.py b/pylint/lint/base_options.py
index af170fd04..59a811d9c 100644
--- a/pylint/lint/base_options.py
+++ b/pylint/lint/base_options.py
@@ -1,20 +1,608 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Functions that creates the basic options for the Run and PyLinter classes."""
+
 from __future__ import annotations
+
 import re
 import sys
 from typing import TYPE_CHECKING
+
 from pylint import constants, interfaces
-from pylint.config.callback_actions import _DisableAction, _DoNothingAction, _EnableAction, _ErrorsOnlyModeAction, _FullDocumentationAction, _GenerateConfigFileAction, _GenerateRCFileAction, _ListCheckGroupsAction, _ListConfidenceLevelsAction, _ListExtensionsAction, _ListMessagesAction, _ListMessagesEnabledAction, _LongHelpAction, _MessageHelpAction, _OutputFormatAction
+from pylint.config.callback_actions import (
+    _DisableAction,
+    _DoNothingAction,
+    _EnableAction,
+    _ErrorsOnlyModeAction,
+    _FullDocumentationAction,
+    _GenerateConfigFileAction,
+    _GenerateRCFileAction,
+    _ListCheckGroupsAction,
+    _ListConfidenceLevelsAction,
+    _ListExtensionsAction,
+    _ListMessagesAction,
+    _ListMessagesEnabledAction,
+    _LongHelpAction,
+    _MessageHelpAction,
+    _OutputFormatAction,
+)
 from pylint.typing import Options
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter, Run


-def _make_linter_options(linter: PyLinter) ->Options:
+def _make_linter_options(linter: PyLinter) -> Options:
     """Return the options used in a PyLinter class."""
-    pass
+    return (
+        (
+            "ignore",
+            {
+                "type": "csv",
+                "metavar": "<file>[,<file>...]",
+                "dest": "black_list",
+                "kwargs": {"old_names": ["black_list"]},
+                "default": constants.DEFAULT_IGNORE_LIST,
+                "help": "Files or directories to be skipped. "
+                "They should be base names, not paths.",
+            },
+        ),
+        (
+            "ignore-patterns",
+            {
+                "type": "regexp_csv",
+                "metavar": "<pattern>[,<pattern>...]",
+                "dest": "black_list_re",
+                "default": (re.compile(r"^\.#"),),
+                "help": "Files or directories matching the regular expression patterns are"
+                " skipped. The regex matches against base names, not paths. The default value "
+                "ignores Emacs file locks",
+            },
+        ),
+        (
+            "ignore-paths",
+            {
+                "type": "regexp_paths_csv",
+                "metavar": "<pattern>[,<pattern>...]",
+                "default": [],
+                "help": "Add files or directories matching the regular expressions patterns to the "
+                "ignore-list. The regex matches against paths and can be in "
+                "Posix or Windows format. Because '\\\\' represents the directory delimiter "
+                "on Windows systems, it can't be used as an escape character.",
+            },
+        ),
+        (
+            "persistent",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Pickle collected data for later comparisons.",
+            },
+        ),
+        (
+            "load-plugins",
+            {
+                "type": "csv",
+                "metavar": "<modules>",
+                "default": (),
+                "help": "List of plugins (as comma separated values of "
+                "python module names) to load, usually to register "
+                "additional checkers.",
+            },
+        ),
+        (
+            "output-format",
+            {
+                "default": "text",
+                "action": _OutputFormatAction,
+                "callback": lambda x: x,
+                "metavar": "<format>",
+                "short": "f",
+                "group": "Reports",
+                "help": "Set the output format. Available formats are: text, "
+                "parseable, colorized, json2 (improved json format), json "
+                "(old json format) and msvs (visual studio). "
+                "You can also give a reporter class, e.g. mypackage.mymodule."
+                "MyReporterClass.",
+                "kwargs": {"linter": linter},
+            },
+        ),
+        (
+            "reports",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "short": "r",
+                "group": "Reports",
+                "help": "Tells whether to display a full report or only the "
+                "messages.",
+            },
+        ),
+        (
+            "evaluation",
+            {
+                "type": "string",
+                "metavar": "<python_expression>",
+                "group": "Reports",
+                "default": "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + "
+                "convention) / statement) * 10))",
+                "help": "Python expression which should return a score less "
+                "than or equal to 10. You have access to the variables 'fatal', "
+                "'error', 'warning', 'refactor', 'convention', and 'info' which "
+                "contain the number of messages in each category, as well as "
+                "'statement' which is the total number of statements "
+                "analyzed. This score is used by the global "
+                "evaluation report (RP0004).",
+            },
+        ),
+        (
+            "score",
+            {
+                "default": True,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "short": "s",
+                "group": "Reports",
+                "help": "Activate the evaluation score.",
+            },
+        ),
+        (
+            "fail-under",
+            {
+                "default": 10,
+                "type": "float",
+                "metavar": "<score>",
+                "help": "Specify a score threshold under which the program will exit with error.",
+            },
+        ),
+        (
+            "fail-on",
+            {
+                "default": "",
+                "type": "csv",
+                "metavar": "<msg ids>",
+                "help": "Return non-zero exit code if any of these messages/categories are detected,"
+                " even if score is above --fail-under value. Syntax same as enable."
+                " Messages specified are enabled, while categories only check already-enabled messages.",
+            },
+        ),
+        (
+            "confidence",
+            {
+                "type": "confidence",
+                "metavar": "<levels>",
+                "default": interfaces.CONFIDENCE_LEVEL_NAMES,
+                "group": "Messages control",
+                "help": "Only show warnings with the listed confidence levels."
+                f" Leave empty to show all. Valid levels: {', '.join(interfaces.CONFIDENCE_LEVEL_NAMES)}.",
+            },
+        ),
+        (
+            "enable",
+            {
+                "action": _EnableAction,
+                "callback": lambda x1, x2, x3, x4: x1,
+                "default": (),
+                "metavar": "<msg ids>",
+                "short": "e",
+                "group": "Messages control",
+                "help": "Enable the message, report, category or checker with the "
+                "given id(s). You can either give multiple identifier "
+                "separated by comma (,) or put this option multiple time "
+                "(only on the command line, not in the configuration file "
+                "where it should appear only once). "
+                'See also the "--disable" option for examples.',
+                "kwargs": {"linter": linter},
+            },
+        ),
+        (
+            "disable",
+            {
+                "action": _DisableAction,
+                "callback": lambda x1, x2, x3, x4: x1,
+                "metavar": "<msg ids>",
+                "default": (),
+                "short": "d",
+                "group": "Messages control",
+                "help": "Disable the message, report, category or checker "
+                "with the given id(s). You can either give multiple identifiers "
+                "separated by comma (,) or put this option multiple times "
+                "(only on the command line, not in the configuration file "
+                "where it should appear only once). "
+                'You can also use "--disable=all" to disable everything first '
+                "and then re-enable specific checks. For example, if you want "
+                "to run only the similarities checker, you can use "
+                '"--disable=all --enable=similarities". '
+                "If you want to run only the classes checker, but have no "
+                "Warning level messages displayed, use "
+                '"--disable=all --enable=classes --disable=W".',
+                "kwargs": {"linter": linter},
+            },
+        ),
+        (
+            "msg-template",
+            {
+                "type": "string",
+                "default": "",
+                "metavar": "<template>",
+                "group": "Reports",
+                "help": (
+                    "Template used to display messages. "
+                    "This is a python new-style format string "
+                    "used to format the message information. "
+                    "See doc for all details."
+                ),
+            },
+        ),
+        (
+            "jobs",
+            {
+                "type": "int",
+                "metavar": "<n-processes>",
+                "short": "j",
+                "default": 1,
+                "help": "Use multiple processes to speed up Pylint. Specifying 0 will "
+                "auto-detect the number of processors available to use, and will cap "
+                "the count on Windows to avoid hangs.",
+            },
+        ),
+        (
+            "unsafe-load-any-extension",
+            {
+                "type": "yn",
+                "metavar": "<y or n>",
+                "default": False,
+                "hide": True,
+                "help": (
+                    "Allow loading of arbitrary C extensions. Extensions"
+                    " are imported into the active Python interpreter and"
+                    " may run arbitrary code."
+                ),
+            },
+        ),
+        (
+            "limit-inference-results",
+            {
+                "type": "int",
+                "metavar": "<number-of-results>",
+                "default": 100,
+                "help": (
+                    "Control the amount of potential inferred values when inferring "
+                    "a single object. This can help the performance when dealing with "
+                    "large functions or complex, nested conditions."
+                ),
+            },
+        ),
+        (
+            "extension-pkg-allow-list",
+            {
+                "type": "csv",
+                "metavar": "<pkg[,pkg]>",
+                "default": [],
+                "help": (
+                    "A comma-separated list of package or module names"
+                    " from where C extensions may be loaded. Extensions are"
+                    " loading into the active Python interpreter and may run"
+                    " arbitrary code."
+                ),
+            },
+        ),
+        (
+            "extension-pkg-whitelist",
+            {
+                "type": "csv",
+                "metavar": "<pkg[,pkg]>",
+                "default": [],
+                "help": (
+                    "A comma-separated list of package or module names"
+                    " from where C extensions may be loaded. Extensions are"
+                    " loading into the active Python interpreter and may run"
+                    " arbitrary code. (This is an alternative name to"
+                    " extension-pkg-allow-list for backward compatibility.)"
+                ),
+            },
+        ),
+        (
+            "suggestion-mode",
+            {
+                "type": "yn",
+                "metavar": "<y or n>",
+                "default": True,
+                "help": (
+                    "When enabled, pylint would attempt to guess common "
+                    "misconfiguration and emit user-friendly hints instead "
+                    "of false-positive error messages."
+                ),
+            },
+        ),
+        (
+            "exit-zero",
+            {
+                "action": "store_true",
+                "default": False,
+                "metavar": "<flag>",
+                "help": (
+                    "Always return a 0 (non-error) status code, even if "
+                    "lint errors are found. This is primarily useful in "
+                    "continuous integration scripts."
+                ),
+            },
+        ),
+        (
+            "from-stdin",
+            {
+                "action": "store_true",
+                "default": False,
+                "metavar": "<flag>",
+                "help": (
+                    "Interpret the stdin as a python script, whose filename "
+                    "needs to be passed as the module_or_package argument."
+                ),
+            },
+        ),
+        (
+            "source-roots",
+            {
+                "type": "glob_paths_csv",
+                "metavar": "<path>[,<path>...]",
+                "default": (),
+                "help": "Add paths to the list of the source roots. Supports globbing patterns. "
+                "The source root is an absolute path or a path relative to the current working "
+                "directory used to determine a package namespace for modules located under the "
+                "source root.",
+            },
+        ),
+        (
+            "recursive",
+            {
+                "type": "yn",
+                "metavar": "<yn>",
+                "default": False,
+                "help": "Discover python modules and packages in the file system subtree.",
+            },
+        ),
+        (
+            "py-version",
+            {
+                "default": sys.version_info[:2],
+                "type": "py_version",
+                "metavar": "<py_version>",
+                "help": (
+                    "Minimum Python version to use for version dependent checks. "
+                    "Will default to the version used to run pylint."
+                ),
+            },
+        ),
+        (
+            "ignored-modules",
+            {
+                "default": (),
+                "type": "csv",
+                "metavar": "<module names>",
+                "help": "List of module names for which member attributes "
+                "should not be checked and will not be imported "
+                "(useful for modules/projects "
+                "where namespaces are manipulated during runtime and "
+                "thus existing member attributes cannot be "
+                "deduced by static analysis). It supports qualified "
+                "module names, as well as Unix pattern matching.",
+            },
+        ),
+        (
+            "analyse-fallback-blocks",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Analyse import fallback blocks. This can be used to "
+                "support both Python 2 and 3 compatible code, which "
+                "means that the block might have code that exists "
+                "only in one or another interpreter, leading to false "
+                "positives when analysed.",
+            },
+        ),
+        (
+            "clear-cache-post-run",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Clear in-memory caches upon conclusion of linting. "
+                "Useful if running pylint in a server-like mode.",
+            },
+        ),
+        (
+            "prefer-stubs",
+            {
+                "default": False,
+                "type": "yn",
+                "metavar": "<y or n>",
+                "help": "Resolve imports to .pyi stubs if available. May "
+                "reduce no-member messages and increase not-an-iterable "
+                "messages.",
+            },
+        ),
+    )


-def _make_run_options(self: Run) ->Options:
+def _make_run_options(self: Run) -> Options:
     """Return the options used in a Run class."""
-    pass
+    return (
+        (
+            "rcfile",
+            {
+                "action": _DoNothingAction,
+                "kwargs": {},
+                "group": "Commands",
+                "help": "Specify a configuration file to load.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "output",
+            {
+                "action": _DoNothingAction,
+                "kwargs": {},
+                "group": "Commands",
+                "help": "Specify an output file.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "init-hook",
+            {
+                "action": _DoNothingAction,
+                "kwargs": {},
+                "help": "Python code to execute, usually for sys.path "
+                "manipulation such as pygtk.require().",
+            },
+        ),
+        (
+            "help-msg",
+            {
+                "action": _MessageHelpAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Display a help message for the given message id and "
+                "exit. The value may be a comma separated list of message ids.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "list-msgs",
+            {
+                "action": _ListMessagesAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Display a list of all pylint's messages divided by whether "
+                "they are emittable with the given interpreter.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "list-msgs-enabled",
+            {
+                "action": _ListMessagesEnabledAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Display a list of what messages are enabled, "
+                "disabled and non-emittable with the given configuration.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "list-groups",
+            {
+                "action": _ListCheckGroupsAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "List pylint's message groups.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "list-conf-levels",
+            {
+                "action": _ListConfidenceLevelsAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Generate pylint's confidence levels.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "list-extensions",
+            {
+                "action": _ListExtensionsAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "List available extensions.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "full-documentation",
+            {
+                "action": _FullDocumentationAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Generate pylint's full documentation.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "generate-rcfile",
+            {
+                "action": _GenerateRCFileAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Generate a sample configuration file according to "
+                "the current configuration. You can put other options "
+                "before this one to get them in the generated "
+                "configuration.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "generate-toml-config",
+            {
+                "action": _GenerateConfigFileAction,
+                "kwargs": {"Run": self},
+                "group": "Commands",
+                "help": "Generate a sample configuration file according to "
+                "the current configuration. You can put other options "
+                "before this one to get them in the generated "
+                "configuration. The config is in the .toml format.",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "errors-only",
+            {
+                "action": _ErrorsOnlyModeAction,
+                "kwargs": {"Run": self},
+                "short": "E",
+                "help": "In error mode, messages with a category besides "
+                "ERROR or FATAL are suppressed, and no reports are done by default. "
+                "Error mode is compatible with disabling specific errors. ",
+                "hide_from_config_file": True,
+            },
+        ),
+        (
+            "verbose",
+            {
+                "action": _DoNothingAction,
+                "kwargs": {},
+                "short": "v",
+                "help": "In verbose mode, extra non-checker-related info "
+                "will be displayed.",
+                "hide_from_config_file": True,
+                "metavar": "",
+            },
+        ),
+        (
+            "enable-all-extensions",
+            {
+                "action": _DoNothingAction,
+                "kwargs": {},
+                "help": "Load and enable all available extensions. "
+                "Use --list-extensions to see a list all available extensions.",
+                "hide_from_config_file": True,
+                "metavar": "",
+            },
+        ),
+        (
+            "long-help",
+            {
+                "action": _LongHelpAction,
+                "kwargs": {"Run": self},
+                "help": "Show more verbose help.",
+                "group": "Commands",
+                "hide_from_config_file": True,
+            },
+        ),
+    )
diff --git a/pylint/lint/caching.py b/pylint/lint/caching.py
index c1d94e651..97c4503d1 100644
--- a/pylint/lint/caching.py
+++ b/pylint/lint/caching.py
@@ -1,8 +1,71 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import pickle
 import sys
 import warnings
 from pathlib import Path
+
 from pylint.constants import PYLINT_HOME
 from pylint.utils import LinterStats
+
 PYLINT_HOME_AS_PATH = Path(PYLINT_HOME)
+
+
+def _get_pdata_path(
+    base_name: Path, recurs: int, pylint_home: Path = PYLINT_HOME_AS_PATH
+) -> Path:
+    # We strip all characters that can't be used in a filename. Also strip '/' and
+    # '\\' because we want to create a single file, not sub-directories.
+    underscored_name = "_".join(
+        str(p.replace(":", "_").replace("/", "_").replace("\\", "_"))
+        for p in base_name.parts
+    )
+    return pylint_home / f"{underscored_name}_{recurs}.stats"
+
+
+def load_results(
+    base: str | Path, pylint_home: str | Path = PYLINT_HOME
+) -> LinterStats | None:
+    base = Path(base)
+    pylint_home = Path(pylint_home)
+    data_file = _get_pdata_path(base, 1, pylint_home)
+
+    if not data_file.exists():
+        return None
+
+    try:
+        with open(data_file, "rb") as stream:
+            data = pickle.load(stream)
+            if not isinstance(data, LinterStats):
+                warnings.warn(
+                    "You're using an old pylint cache with invalid data following "
+                    f"an upgrade, please delete '{data_file}'.",
+                    UserWarning,
+                    stacklevel=2,
+                )
+                raise TypeError
+            return data
+    except Exception:  # pylint: disable=broad-except
+        # There's an issue with the cache but we just continue as if it isn't there
+        return None
+
+
+def save_results(
+    results: LinterStats, base: str | Path, pylint_home: str | Path = PYLINT_HOME
+) -> None:
+    base = Path(base)
+    pylint_home = Path(pylint_home)
+    try:
+        pylint_home.mkdir(parents=True, exist_ok=True)
+    except OSError:  # pragma: no cover
+        print(f"Unable to create directory {pylint_home}", file=sys.stderr)
+    data_file = _get_pdata_path(base, 1)
+    try:
+        with open(data_file, "wb") as stream:
+            pickle.dump(results, stream)
+    except OSError as ex:  # pragma: no cover
+        print(f"Unable to create file {data_file}: {ex}", file=sys.stderr)
diff --git a/pylint/lint/expand_modules.py b/pylint/lint/expand_modules.py
index e14142cd7..04e701884 100644
--- a/pylint/lint/expand_modules.py
+++ b/pylint/lint/expand_modules.py
@@ -1,29 +1,171 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 import sys
 from collections.abc import Sequence
 from pathlib import Path
 from re import Pattern
+
 from astroid import modutils
+
 from pylint.typing import ErrorDescriptionDict, ModuleDescriptionDict


-def discover_package_path(modulepath: str, source_roots: Sequence[str]) ->str:
+def _modpath_from_file(filename: str, is_namespace: bool, path: list[str]) -> list[str]:
+    def _is_package_cb(inner_path: str, parts: list[str]) -> bool:
+        return modutils.check_modpath_has_init(inner_path, parts) or is_namespace
+
+    return modutils.modpath_from_file_with_callback(  # type: ignore[no-any-return]
+        filename, path=path, is_package_cb=_is_package_cb
+    )
+
+
+def discover_package_path(modulepath: str, source_roots: Sequence[str]) -> str:
     """Discover package path from one its modules and source roots."""
-    pass
+    dirname = os.path.realpath(os.path.expanduser(modulepath))
+    if not os.path.isdir(dirname):
+        dirname = os.path.dirname(dirname)

+    # Look for a source root that contains the module directory
+    for source_root in source_roots:
+        source_root = os.path.realpath(os.path.expanduser(source_root))
+        if os.path.commonpath([source_root, dirname]) == source_root:
+            return source_root

-def _is_in_ignore_list_re(element: str, ignore_list_re: list[Pattern[str]]
-    ) ->bool:
+    # Fall back to legacy discovery by looking for __init__.py upwards as
+    # it's the only way given that source root was not found or was not provided
+    while True:
+        if not os.path.exists(os.path.join(dirname, "__init__.py")):
+            return dirname
+        old_dirname = dirname
+        dirname = os.path.dirname(dirname)
+        if old_dirname == dirname:
+            return os.getcwd()
+
+
+def _is_in_ignore_list_re(element: str, ignore_list_re: list[Pattern[str]]) -> bool:
     """Determines if the element is matched in a regex ignore-list."""
-    pass
+    return any(file_pattern.match(element) for file_pattern in ignore_list_re)
+

+def _is_ignored_file(
+    element: str,
+    ignore_list: list[str],
+    ignore_list_re: list[Pattern[str]],
+    ignore_list_paths_re: list[Pattern[str]],
+) -> bool:
+    element = os.path.normpath(element)
+    basename = Path(element).absolute().name
+    return (
+        basename in ignore_list
+        or _is_in_ignore_list_re(basename, ignore_list_re)
+        or _is_in_ignore_list_re(element, ignore_list_paths_re)
+    )

-def expand_modules(files_or_modules: Sequence[str], source_roots: Sequence[
-    str], ignore_list: list[str], ignore_list_re: list[Pattern[str]],
-    ignore_list_paths_re: list[Pattern[str]]) ->tuple[dict[str,
-    ModuleDescriptionDict], list[ErrorDescriptionDict]]:
+
+# pylint: disable = too-many-locals, too-many-statements
+def expand_modules(
+    files_or_modules: Sequence[str],
+    source_roots: Sequence[str],
+    ignore_list: list[str],
+    ignore_list_re: list[Pattern[str]],
+    ignore_list_paths_re: list[Pattern[str]],
+) -> tuple[dict[str, ModuleDescriptionDict], list[ErrorDescriptionDict]]:
     """Take a list of files/modules/packages and return the list of tuple
     (file, module name) which have to be actually checked.
     """
-    pass
+    result: dict[str, ModuleDescriptionDict] = {}
+    errors: list[ErrorDescriptionDict] = []
+    path = sys.path.copy()
+
+    for something in files_or_modules:
+        basename = os.path.basename(something)
+        if _is_ignored_file(
+            something, ignore_list, ignore_list_re, ignore_list_paths_re
+        ):
+            continue
+        module_package_path = discover_package_path(something, source_roots)
+        additional_search_path = [".", module_package_path, *path]
+        if os.path.exists(something):
+            # this is a file or a directory
+            try:
+                modname = ".".join(
+                    modutils.modpath_from_file(something, path=additional_search_path)
+                )
+            except ImportError:
+                modname = os.path.splitext(basename)[0]
+            if os.path.isdir(something):
+                filepath = os.path.join(something, "__init__.py")
+            else:
+                filepath = something
+        else:
+            # suppose it's a module or package
+            modname = something
+            try:
+                filepath = modutils.file_from_modpath(
+                    modname.split("."), path=additional_search_path
+                )
+                if filepath is None:
+                    continue
+            except ImportError as ex:
+                errors.append({"key": "fatal", "mod": modname, "ex": ex})
+                continue
+        filepath = os.path.normpath(filepath)
+        modparts = (modname or something).split(".")
+        try:
+            spec = modutils.file_info_from_modpath(
+                modparts, path=additional_search_path
+            )
+        except ImportError:
+            # Might not be acceptable, don't crash.
+            is_namespace = False
+            is_directory = os.path.isdir(something)
+        else:
+            is_namespace = modutils.is_namespace(spec)
+            is_directory = modutils.is_directory(spec)
+        if not is_namespace:
+            if filepath in result:
+                # Always set arg flag if module explicitly given.
+                result[filepath]["isarg"] = True
+            else:
+                result[filepath] = {
+                    "path": filepath,
+                    "name": modname,
+                    "isarg": True,
+                    "basepath": filepath,
+                    "basename": modname,
+                }
+        has_init = (
+            not (modname.endswith(".__init__") or modname == "__init__")
+            and os.path.basename(filepath) == "__init__.py"
+        )
+        if has_init or is_namespace or is_directory:
+            for subfilepath in modutils.get_module_files(
+                os.path.dirname(filepath) or ".", ignore_list, list_all=is_namespace
+            ):
+                subfilepath = os.path.normpath(subfilepath)
+                if filepath == subfilepath:
+                    continue
+                if _is_in_ignore_list_re(
+                    os.path.basename(subfilepath), ignore_list_re
+                ) or _is_in_ignore_list_re(subfilepath, ignore_list_paths_re):
+                    continue
+
+                modpath = _modpath_from_file(
+                    subfilepath, is_namespace, path=additional_search_path
+                )
+                submodname = ".".join(modpath)
+                # Preserve arg flag if module is also explicitly given.
+                isarg = subfilepath in result and result[subfilepath]["isarg"]
+                result[subfilepath] = {
+                    "path": subfilepath,
+                    "name": submodname,
+                    "isarg": isarg,
+                    "basepath": filepath,
+                    "basename": modname,
+                }
+    return result, errors
diff --git a/pylint/lint/message_state_handler.py b/pylint/lint/message_state_handler.py
index db36d7947..2ddd7d4db 100644
--- a/pylint/lint/message_state_handler.py
+++ b/pylint/lint/message_state_handler.py
@@ -1,13 +1,31 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import tokenize
 from collections import defaultdict
 from typing import TYPE_CHECKING, Literal
+
 from pylint import exceptions, interfaces
-from pylint.constants import MSG_STATE_CONFIDENCE, MSG_STATE_SCOPE_CONFIG, MSG_STATE_SCOPE_MODULE, MSG_TYPES, MSG_TYPES_LONG
+from pylint.constants import (
+    MSG_STATE_CONFIDENCE,
+    MSG_STATE_SCOPE_CONFIG,
+    MSG_STATE_SCOPE_MODULE,
+    MSG_TYPES,
+    MSG_TYPES_LONG,
+)
 from pylint.interfaces import HIGH
 from pylint.message import MessageDefinition
 from pylint.typing import ManagedMessage
-from pylint.utils.pragma_parser import OPTION_PO, InvalidPragmaError, UnRecognizedOptionError, parse_pragma
+from pylint.utils.pragma_parser import (
+    OPTION_PO,
+    InvalidPragmaError,
+    UnRecognizedOptionError,
+    parse_pragma,
+)
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter

@@ -17,16 +35,22 @@ class _MessageStateHandler:
     pragma's.
     """

-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
         self.linter = linter
         self._msgs_state: dict[str, bool] = {}
-        self._options_methods = {'enable': self.enable, 'disable': self.
-            disable, 'disable-next': self.disable_next}
-        self._bw_options_methods = {'disable-msg': self._options_methods[
-            'disable'], 'enable-msg': self._options_methods['enable']}
+        self._options_methods = {
+            "enable": self.enable,
+            "disable": self.disable,
+            "disable-next": self.disable_next,
+        }
+        self._bw_options_methods = {
+            "disable-msg": self._options_methods["disable"],
+            "enable-msg": self._options_methods["enable"],
+        }
         self._pragma_lineno: dict[str, int] = {}
-        self._stashed_messages: defaultdict[tuple[str, str], list[tuple[str |
-            None, str]]] = defaultdict(list)
+        self._stashed_messages: defaultdict[
+            tuple[str, str], list[tuple[str | None, str]]
+        ] = defaultdict(list)
         """Some messages in the options (for --enable and --disable) are encountered
         too early to warn about them.

@@ -34,63 +58,253 @@ class _MessageStateHandler:
         option_value and msg_id needed to (later) emit the messages keyed on module names.
         """

-    def _set_one_msg_status(self, scope: str, msg: MessageDefinition, line:
-        (int | None), enable: bool) ->None:
+    def _set_one_msg_status(
+        self, scope: str, msg: MessageDefinition, line: int | None, enable: bool
+    ) -> None:
         """Set the status of an individual message."""
-        pass
+        if scope in {"module", "line"}:
+            assert isinstance(line, int)  # should always be int inside module scope
+
+            self.linter.file_state.set_msg_status(msg, line, enable, scope)
+            if not enable and msg.symbol != "locally-disabled":
+                self.linter.add_message(
+                    "locally-disabled", line=line, args=(msg.symbol, msg.msgid)
+                )
+        else:
+            msgs = self._msgs_state
+            msgs[msg.msgid] = enable

-    def _get_messages_to_set(self, msgid: str, enable: bool, ignore_unknown:
-        bool=False) ->list[MessageDefinition]:
+    def _get_messages_to_set(
+        self, msgid: str, enable: bool, ignore_unknown: bool = False
+    ) -> list[MessageDefinition]:
         """Do some tests and find the actual messages of which the status should be set."""
-        pass
+        message_definitions: list[MessageDefinition] = []
+        if msgid == "all":
+            for _msgid in MSG_TYPES:
+                message_definitions.extend(
+                    self._get_messages_to_set(_msgid, enable, ignore_unknown)
+                )
+            return message_definitions

-    def _set_msg_status(self, msgid: str, enable: bool, scope: str=
-        'package', line: (int | None)=None, ignore_unknown: bool=False) ->None:
+        # msgid is a category?
+        category_id = msgid.upper()
+        if category_id not in MSG_TYPES:
+            category_id_formatted = MSG_TYPES_LONG.get(category_id)
+        else:
+            category_id_formatted = category_id
+        if category_id_formatted is not None:
+            for _msgid in self.linter.msgs_store._msgs_by_category[
+                category_id_formatted
+            ]:
+                message_definitions.extend(
+                    self._get_messages_to_set(_msgid, enable, ignore_unknown)
+                )
+            return message_definitions
+
+        # msgid is a checker name?
+        if msgid.lower() in self.linter._checkers:
+            for checker in self.linter._checkers[msgid.lower()]:
+                for _msgid in checker.msgs:
+                    message_definitions.extend(
+                        self._get_messages_to_set(_msgid, enable, ignore_unknown)
+                    )
+            return message_definitions
+
+        # msgid is report id?
+        if msgid.lower().startswith("rp"):
+            if enable:
+                self.linter.enable_report(msgid)
+            else:
+                self.linter.disable_report(msgid)
+            return message_definitions
+
+        try:
+            # msgid is a symbolic or numeric msgid.
+            message_definitions = self.linter.msgs_store.get_message_definitions(msgid)
+        except exceptions.UnknownMessageError:
+            if not ignore_unknown:
+                raise
+        return message_definitions
+
+    def _set_msg_status(
+        self,
+        msgid: str,
+        enable: bool,
+        scope: str = "package",
+        line: int | None = None,
+        ignore_unknown: bool = False,
+    ) -> None:
         """Do some tests and then iterate over message definitions to set state."""
-        pass
+        assert scope in {"package", "module", "line"}
+
+        message_definitions = self._get_messages_to_set(msgid, enable, ignore_unknown)

-    def _register_by_id_managed_msg(self, msgid_or_symbol: str, line: (int |
-        None), is_disabled: bool=True) ->None:
+        for message_definition in message_definitions:
+            self._set_one_msg_status(scope, message_definition, line, enable)
+
+        # sync configuration object
+        self.linter.config.enable = []
+        self.linter.config.disable = []
+        for msgid_or_symbol, is_enabled in self._msgs_state.items():
+            symbols = [
+                m.symbol
+                for m in self.linter.msgs_store.get_message_definitions(msgid_or_symbol)
+            ]
+            if is_enabled:
+                self.linter.config.enable += symbols
+            else:
+                self.linter.config.disable += symbols
+
+    def _register_by_id_managed_msg(
+        self, msgid_or_symbol: str, line: int | None, is_disabled: bool = True
+    ) -> None:
         """If the msgid is a numeric one, then register it to inform the user
         it could furnish instead a symbolic msgid.
         """
-        pass
+        if msgid_or_symbol[1:].isdigit():
+            try:
+                symbol = self.linter.msgs_store.message_id_store.get_symbol(
+                    msgid=msgid_or_symbol
+                )
+            except exceptions.UnknownMessageError:
+                return
+            managed = ManagedMessage(
+                self.linter.current_name, msgid_or_symbol, symbol, line, is_disabled
+            )
+            self.linter._by_id_managed_msgs.append(managed)

-    def disable(self, msgid: str, scope: str='package', line: (int | None)=
-        None, ignore_unknown: bool=False) ->None:
+    def disable(
+        self,
+        msgid: str,
+        scope: str = "package",
+        line: int | None = None,
+        ignore_unknown: bool = False,
+    ) -> None:
         """Disable a message for a scope."""
-        pass
+        self._set_msg_status(
+            msgid, enable=False, scope=scope, line=line, ignore_unknown=ignore_unknown
+        )
+        self._register_by_id_managed_msg(msgid, line)

-    def disable_next(self, msgid: str, _: str='package', line: (int | None)
-        =None, ignore_unknown: bool=False) ->None:
+    def disable_next(
+        self,
+        msgid: str,
+        _: str = "package",
+        line: int | None = None,
+        ignore_unknown: bool = False,
+    ) -> None:
         """Disable a message for the next line."""
-        pass
+        if not line:
+            raise exceptions.NoLineSuppliedError
+        self._set_msg_status(
+            msgid,
+            enable=False,
+            scope="line",
+            line=line + 1,
+            ignore_unknown=ignore_unknown,
+        )
+        self._register_by_id_managed_msg(msgid, line + 1)

-    def enable(self, msgid: str, scope: str='package', line: (int | None)=
-        None, ignore_unknown: bool=False) ->None:
+    def enable(
+        self,
+        msgid: str,
+        scope: str = "package",
+        line: int | None = None,
+        ignore_unknown: bool = False,
+    ) -> None:
         """Enable a message for a scope."""
-        pass
+        self._set_msg_status(
+            msgid, enable=True, scope=scope, line=line, ignore_unknown=ignore_unknown
+        )
+        self._register_by_id_managed_msg(msgid, line, is_disabled=False)

-    def disable_noerror_messages(self) ->None:
+    def disable_noerror_messages(self) -> None:
         """Disable message categories other than `error` and `fatal`."""
-        pass
+        for msgcat in self.linter.msgs_store._msgs_by_category:
+            if msgcat in {"E", "F"}:
+                continue
+            self.disable(msgcat)
+
+    def list_messages_enabled(self) -> None:
+        emittable, non_emittable = self.linter.msgs_store.find_emittable_messages()
+        enabled: list[str] = []
+        disabled: list[str] = []
+        for message in emittable:
+            if self.is_message_enabled(message.msgid):
+                enabled.append(f"  {message.symbol} ({message.msgid})")
+            else:
+                disabled.append(f"  {message.symbol} ({message.msgid})")
+        print("Enabled messages:")
+        for msg in enabled:
+            print(msg)
+        print("\nDisabled messages:")
+        for msg in disabled:
+            print(msg)
+        print("\nNon-emittable messages with current interpreter:")
+        for msg_def in non_emittable:
+            print(f"  {msg_def.symbol} ({msg_def.msgid})")
+        print("")

-    def _get_message_state_scope(self, msgid: str, line: (int | None)=None,
-        confidence: (interfaces.Confidence | None)=None) ->(Literal[0, 1, 2
-        ] | None):
+    def _get_message_state_scope(
+        self,
+        msgid: str,
+        line: int | None = None,
+        confidence: interfaces.Confidence | None = None,
+    ) -> Literal[0, 1, 2] | None:
         """Returns the scope at which a message was enabled/disabled."""
-        pass
+        if confidence is None:
+            confidence = interfaces.UNDEFINED
+        if confidence.name not in self.linter.config.confidence:
+            return MSG_STATE_CONFIDENCE  # type: ignore[return-value] # mypy does not infer Literal correctly
+        try:
+            if line in self.linter.file_state._module_msgs_state[msgid]:
+                return MSG_STATE_SCOPE_MODULE  # type: ignore[return-value]
+        except (KeyError, TypeError):
+            return MSG_STATE_SCOPE_CONFIG  # type: ignore[return-value]
+        return None

-    def _is_one_message_enabled(self, msgid: str, line: (int | None)) ->bool:
+    def _is_one_message_enabled(self, msgid: str, line: int | None) -> bool:
         """Checks state of a single message for the current file.

         This function can't be cached as it depends on self.file_state which can
         change.
         """
-        pass
+        if line is None:
+            return self._msgs_state.get(msgid, True)
+        try:
+            return self.linter.file_state._module_msgs_state[msgid][line]
+        except KeyError:
+            # Check if the message's line is after the maximum line existing in ast tree.
+            # This line won't appear in the ast tree and won't be referred in
+            # self.file_state._module_msgs_state
+            # This happens for example with a commented line at the end of a module.
+            max_line_number = self.linter.file_state.get_effective_max_line_number()
+            if max_line_number and line > max_line_number:
+                fallback = True
+                lines = self.linter.file_state._raw_module_msgs_state.get(msgid, {})
+
+                # Doesn't consider scopes, as a 'disable' can be in a
+                # different scope than that of the current line.
+                closest_lines = reversed(
+                    [
+                        (message_line, enable)
+                        for message_line, enable in lines.items()
+                        if message_line <= line
+                    ]
+                )
+                _, fallback_iter = next(closest_lines, (None, None))
+                if fallback_iter is not None:
+                    fallback = fallback_iter
+
+                return self._msgs_state.get(msgid, fallback)
+            return self._msgs_state.get(msgid, True)

-    def is_message_enabled(self, msg_descr: str, line: (int | None)=None,
-        confidence: (interfaces.Confidence | None)=None) ->bool:
+    def is_message_enabled(
+        self,
+        msg_descr: str,
+        line: int | None = None,
+        confidence: interfaces.Confidence | None = None,
+    ) -> bool:
         """Is this message enabled for the current file ?

         Optionally, is it enabled for this line and confidence level ?
@@ -104,12 +318,114 @@ class _MessageStateHandler:
         :param line: The line of the currently analysed file
         :param confidence: The confidence of the message
         """
-        pass
+        if confidence and confidence.name not in self.linter.config.confidence:
+            return False
+        try:
+            msgids = self.linter.msgs_store.message_id_store.get_active_msgids(
+                msg_descr
+            )
+        except exceptions.UnknownMessageError:
+            # The linter checks for messages that are not registered
+            # due to version mismatch, just treat them as message IDs
+            # for now.
+            msgids = [msg_descr]
+        return any(self._is_one_message_enabled(msgid, line) for msgid in msgids)

-    def process_tokens(self, tokens: list[tokenize.TokenInfo]) ->None:
+    def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
         """Process tokens from the current module to search for module/block level
         options.

         See func_block_disable_msg.py test case for expected behaviour.
         """
-        pass
+        control_pragmas = {"disable", "disable-next", "enable"}
+        prev_line = None
+        saw_newline = True
+        seen_newline = True
+        for tok_type, content, start, _, _ in tokens:
+            if prev_line and prev_line != start[0]:
+                saw_newline = seen_newline
+                seen_newline = False
+
+            prev_line = start[0]
+            if tok_type in (tokenize.NL, tokenize.NEWLINE):
+                seen_newline = True
+
+            if tok_type != tokenize.COMMENT:
+                continue
+            match = OPTION_PO.search(content)
+            if match is None:
+                continue
+            try:  # pylint: disable = too-many-try-statements
+                for pragma_repr in parse_pragma(match.group(2)):
+                    if pragma_repr.action in {"disable-all", "skip-file"}:
+                        if pragma_repr.action == "disable-all":
+                            self.linter.add_message(
+                                "deprecated-pragma",
+                                line=start[0],
+                                args=("disable-all", "skip-file"),
+                            )
+                        self.linter.add_message("file-ignored", line=start[0])
+                        self._ignore_file = True
+                        return
+                    try:
+                        meth = self._options_methods[pragma_repr.action]
+                    except KeyError:
+                        meth = self._bw_options_methods[pragma_repr.action]
+                        # found a "(dis|en)able-msg" pragma deprecated suppression
+                        self.linter.add_message(
+                            "deprecated-pragma",
+                            line=start[0],
+                            args=(
+                                pragma_repr.action,
+                                pragma_repr.action.replace("-msg", ""),
+                            ),
+                        )
+                    for msgid in pragma_repr.messages:
+                        # Add the line where a control pragma was encountered.
+                        if pragma_repr.action in control_pragmas:
+                            self._pragma_lineno[msgid] = start[0]
+
+                        if (pragma_repr.action, msgid) == ("disable", "all"):
+                            self.linter.add_message(
+                                "deprecated-pragma",
+                                line=start[0],
+                                args=("disable=all", "skip-file"),
+                            )
+                            self.linter.add_message("file-ignored", line=start[0])
+                            self._ignore_file = True
+                            return
+                            # If we did not see a newline between the previous line and now,
+                            # we saw a backslash so treat the two lines as one.
+                        l_start = start[0]
+                        if not saw_newline:
+                            l_start -= 1
+                        try:
+                            meth(msgid, "module", l_start)
+                        except (
+                            exceptions.DeletedMessageError,
+                            exceptions.MessageBecameExtensionError,
+                        ) as e:
+                            self.linter.add_message(
+                                "useless-option-value",
+                                args=(pragma_repr.action, e),
+                                line=start[0],
+                                confidence=HIGH,
+                            )
+                        except exceptions.UnknownMessageError:
+                            self.linter.add_message(
+                                "unknown-option-value",
+                                args=(pragma_repr.action, msgid),
+                                line=start[0],
+                                confidence=HIGH,
+                            )
+
+            except UnRecognizedOptionError as err:
+                self.linter.add_message(
+                    "unrecognized-inline-option", args=err.token, line=start[0]
+                )
+                continue
+            except InvalidPragmaError as err:
+                self.linter.add_message(
+                    "bad-inline-option", args=err.token, line=start[0]
+                )
+                continue
diff --git a/pylint/lint/parallel.py b/pylint/lint/parallel.py
index a5b179dbe..af381494c 100644
--- a/pylint/lint/parallel.py
+++ b/pylint/lint/parallel.py
@@ -1,48 +1,173 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import functools
 from collections import defaultdict
 from collections.abc import Iterable, Sequence
 from typing import TYPE_CHECKING, Any
+
 import dill
+
 from pylint import reporters
 from pylint.lint.utils import _augment_sys_path
 from pylint.message import Message
 from pylint.typing import FileItem
 from pylint.utils import LinterStats, merge_stats
+
 try:
     import multiprocessing
 except ImportError:
-    multiprocessing = None
+    multiprocessing = None  # type: ignore[assignment]
+
 try:
     from concurrent.futures import ProcessPoolExecutor
 except ImportError:
-    ProcessPoolExecutor = None
+    ProcessPoolExecutor = None  # type: ignore[assignment,misc]
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
+# PyLinter object used by worker processes when checking files using parallel mode
+# should only be used by the worker processes
 _worker_linter: PyLinter | None = None


-def _worker_initialize(linter: bytes, extra_packages_paths: (Sequence[str] |
-    None)=None) ->None:
+def _worker_initialize(
+    linter: bytes, extra_packages_paths: Sequence[str] | None = None
+) -> None:
     """Function called to initialize a worker for a Process within a concurrent Pool.

     :param linter: A linter-class (PyLinter) instance pickled with dill
     :param extra_packages_paths: Extra entries to be added to `sys.path`
     """
-    pass
+    global _worker_linter  # pylint: disable=global-statement
+    _worker_linter = dill.loads(linter)
+    assert _worker_linter

+    # On the worker process side the messages are just collected and passed back to
+    # parent process as _worker_check_file function's return value
+    _worker_linter.set_reporter(reporters.CollectingReporter())
+    _worker_linter.open()

-def _merge_mapreduce_data(linter: PyLinter, all_mapreduce_data: defaultdict
-    [int, list[defaultdict[str, list[Any]]]]) ->None:
+    # Re-register dynamic plugins, since the pool does not have access to the
+    # astroid module that existed when the linter was pickled.
+    _worker_linter.load_plugin_modules(_worker_linter._dynamic_plugins, force=True)
+    _worker_linter.load_plugin_configuration()
+
+    if extra_packages_paths:
+        _augment_sys_path(extra_packages_paths)
+
+
+def _worker_check_single_file(
+    file_item: FileItem,
+) -> tuple[
+    int,
+    str,
+    str,
+    str,
+    list[Message],
+    LinterStats,
+    int,
+    defaultdict[str, list[Any]],
+]:
+    if not _worker_linter:
+        raise RuntimeError("Worker linter not yet initialised")
+    _worker_linter.open()
+    _worker_linter.check_single_file_item(file_item)
+    mapreduce_data = defaultdict(list)
+    for checker in _worker_linter.get_checkers():
+        data = checker.get_map_data()
+        if data is not None:
+            mapreduce_data[checker.name].append(data)
+    msgs = _worker_linter.reporter.messages
+    assert isinstance(_worker_linter.reporter, reporters.CollectingReporter)
+    _worker_linter.reporter.reset()
+    return (
+        id(multiprocessing.current_process()),
+        _worker_linter.current_name,
+        file_item.filepath,
+        _worker_linter.file_state.base_name,
+        msgs,
+        _worker_linter.stats,
+        _worker_linter.msg_status,
+        mapreduce_data,
+    )
+
+
+def _merge_mapreduce_data(
+    linter: PyLinter,
+    all_mapreduce_data: defaultdict[int, list[defaultdict[str, list[Any]]]],
+) -> None:
     """Merges map/reduce data across workers, invoking relevant APIs on checkers."""
-    pass
+    # First collate the data and prepare it, so we can send it to the checkers for
+    # validation. The intent here is to collect all the mapreduce data for all checker-
+    # runs across processes - that will then be passed to a static method on the
+    # checkers to be reduced and further processed.
+    collated_map_reduce_data: defaultdict[str, list[Any]] = defaultdict(list)
+    for linter_data in all_mapreduce_data.values():
+        for run_data in linter_data:
+            for checker_name, data in run_data.items():
+                collated_map_reduce_data[checker_name].extend(data)
+
+    # Send the data to checkers that support/require consolidated data
+    original_checkers = linter.get_checkers()
+    for checker in original_checkers:
+        if checker.name in collated_map_reduce_data:
+            # Assume that if the check has returned map/reduce data that it has the
+            # reducer function
+            checker.reduce_map_data(linter, collated_map_reduce_data[checker.name])


-def check_parallel(linter: PyLinter, jobs: int, files: Iterable[FileItem],
-    extra_packages_paths: (Sequence[str] | None)=None) ->None:
+def check_parallel(
+    linter: PyLinter,
+    jobs: int,
+    files: Iterable[FileItem],
+    extra_packages_paths: Sequence[str] | None = None,
+) -> None:
     """Use the given linter to lint the files with given amount of workers (jobs).

     This splits the work filestream-by-filestream. If you need to do work across
     multiple files, as in the similarity-checker, then implement the map/reduce functionality.
     """
-    pass
+    # The linter is inherited by all the pool's workers, i.e. the linter
+    # is identical to the linter object here. This is required so that
+    # a custom PyLinter object can be used.
+    initializer = functools.partial(
+        _worker_initialize, extra_packages_paths=extra_packages_paths
+    )
+    with ProcessPoolExecutor(
+        max_workers=jobs, initializer=initializer, initargs=(dill.dumps(linter),)
+    ) as executor:
+        linter.open()
+        all_stats = []
+        all_mapreduce_data: defaultdict[int, list[defaultdict[str, list[Any]]]] = (
+            defaultdict(list)
+        )
+
+        # Maps each file to be worked on by a single _worker_check_single_file() call,
+        # collecting any map/reduce data by checker module so that we can 'reduce' it
+        # later.
+        for (
+            worker_idx,  # used to merge map/reduce data across workers
+            module,
+            file_path,
+            base_name,
+            messages,
+            stats,
+            msg_status,
+            mapreduce_data,
+        ) in executor.map(_worker_check_single_file, files):
+            linter.file_state.base_name = base_name
+            linter.file_state._is_base_filestate = False
+            linter.set_current_module(module, file_path)
+            for msg in messages:
+                linter.reporter.handle_message(msg)
+            all_stats.append(stats)
+            all_mapreduce_data[worker_idx].append(mapreduce_data)
+            linter.msg_status |= msg_status
+
+    _merge_mapreduce_data(linter, all_mapreduce_data)
+    linter.stats = merge_stats([linter.stats, *all_stats])
diff --git a/pylint/lint/pylinter.py b/pylint/lint/pylinter.py
index 3f632fa03..eff15cc44 100644
--- a/pylint/lint/pylinter.py
+++ b/pylint/lint/pylinter.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import argparse
 import collections
 import contextlib
@@ -14,97 +19,245 @@ from pathlib import Path
 from re import Pattern
 from types import ModuleType
 from typing import Any, Protocol
+
 import astroid
 from astroid import nodes
+
 from pylint import checkers, exceptions, interfaces, reporters
 from pylint.checkers.base_checker import BaseChecker
 from pylint.config.arguments_manager import _ArgumentsManager
-from pylint.constants import MAIN_CHECKER_NAME, MSG_TYPES, MSG_TYPES_STATUS, WarningScope
+from pylint.constants import (
+    MAIN_CHECKER_NAME,
+    MSG_TYPES,
+    MSG_TYPES_STATUS,
+    WarningScope,
+)
 from pylint.interfaces import HIGH
 from pylint.lint.base_options import _make_linter_options
 from pylint.lint.caching import load_results, save_results
-from pylint.lint.expand_modules import _is_ignored_file, discover_package_path, expand_modules
+from pylint.lint.expand_modules import (
+    _is_ignored_file,
+    discover_package_path,
+    expand_modules,
+)
 from pylint.lint.message_state_handler import _MessageStateHandler
 from pylint.lint.parallel import check_parallel
-from pylint.lint.report_functions import report_messages_by_module_stats, report_messages_stats, report_total_messages_stats
-from pylint.lint.utils import _is_relative_to, augmented_sys_path, get_fatal_error_message, prepare_crash_report
+from pylint.lint.report_functions import (
+    report_messages_by_module_stats,
+    report_messages_stats,
+    report_total_messages_stats,
+)
+from pylint.lint.utils import (
+    _is_relative_to,
+    augmented_sys_path,
+    get_fatal_error_message,
+    prepare_crash_report,
+)
 from pylint.message import Message, MessageDefinition, MessageDefinitionStore
 from pylint.reporters.base_reporter import BaseReporter
 from pylint.reporters.text import TextReporter
 from pylint.reporters.ureports import nodes as report_nodes
-from pylint.typing import DirectoryNamespaceDict, FileItem, ManagedMessage, MessageDefinitionTuple, MessageLocationTuple, ModuleDescriptionDict, Options
+from pylint.typing import (
+    DirectoryNamespaceDict,
+    FileItem,
+    ManagedMessage,
+    MessageDefinitionTuple,
+    MessageLocationTuple,
+    ModuleDescriptionDict,
+    Options,
+)
 from pylint.utils import ASTWalker, FileState, LinterStats, utils
+
 MANAGER = astroid.MANAGER


 class GetAstProtocol(Protocol):
-
-    def __call__(self, filepath: str, modname: str, data: (str | None)=None
-        ) ->nodes.Module:
-        ...
-
-
-MSGS: dict[str, MessageDefinitionTuple] = {'F0001': ('%s', 'fatal',
-    'Used when an error occurred preventing the analysis of a               module (unable to find it for instance).'
-    , {'scope': WarningScope.LINE}), 'F0002': ('%s: %s', 'astroid-error',
-    'Used when an unexpected error occurred while building the Astroid  representation. This is usually accompanied by a traceback. Please report such errors !'
-    , {'scope': WarningScope.LINE}), 'F0010': (
-    'error while code parsing: %s', 'parse-error',
-    'Used when an exception occurred while building the Astroid representation which could be handled by astroid.'
-    , {'scope': WarningScope.LINE}), 'F0011': (
-    'error while parsing the configuration: %s', 'config-parse-error',
-    'Used when an exception occurred while parsing a pylint configuration file.'
-    , {'scope': WarningScope.LINE}), 'I0001': (
-    'Unable to run raw checkers on built-in module %s',
-    'raw-checker-failed',
-    'Used to inform that a built-in module has not been checked using the raw checkers.'
-    , {'scope': WarningScope.LINE, 'default_enabled': False}), 'I0010': (
-    'Unable to consider inline option %r', 'bad-inline-option',
-    "Used when an inline option is either badly formatted or can't be used inside modules."
-    , {'scope': WarningScope.LINE, 'default_enabled': False}), 'I0011': (
-    'Locally disabling %s (%s)', 'locally-disabled',
-    'Used when an inline option disables a message or a messages category.',
-    {'scope': WarningScope.LINE, 'default_enabled': False}), 'I0013': (
-    'Ignoring entire file', 'file-ignored',
-    'Used to inform that the file will not be checked', {'scope':
-    WarningScope.LINE, 'default_enabled': False}), 'I0020': (
-    'Suppressed %s (from line %d)', 'suppressed-message',
-    'A message was triggered on a line, but suppressed explicitly by a disable= comment in the file. This message is not generated for messages that are ignored due to configuration settings.'
-    , {'scope': WarningScope.LINE, 'default_enabled': False}), 'I0021': (
-    'Useless suppression of %s', 'useless-suppression',
-    'Reported when a message is explicitly disabled for a line or a block of code, but never triggered.'
-    , {'scope': WarningScope.LINE, 'default_enabled': False}), 'I0022': (
-    'Pragma "%s" is deprecated, use "%s" instead', 'deprecated-pragma',
-    'Some inline pylint options have been renamed or reworked, only the most recent form should be used. NOTE:skip-all is only available with pylint >= 0.26'
-    , {'old_names': [('I0014', 'deprecated-disable-all')], 'scope':
-    WarningScope.LINE, 'default_enabled': False}), 'E0001': ('%s',
-    'syntax-error', 'Used when a syntax error is raised for a module.', {
-    'scope': WarningScope.LINE}), 'E0011': ('Unrecognized file option %r',
-    'unrecognized-inline-option',
-    'Used when an unknown inline option is encountered.', {'scope':
-    WarningScope.LINE}), 'W0012': (
-    "Unknown option value for '%s', expected a valid pylint message and got '%s'"
-    , 'unknown-option-value',
-    'Used when an unknown value is encountered for an option.', {'scope':
-    WarningScope.LINE, 'old_names': [('E0012', 'bad-option-value')]}),
-    'R0022': ("Useless option value for '%s', %s", 'useless-option-value',
-    'Used when a value for an option that is now deleted from pylint is encountered.'
-    , {'scope': WarningScope.LINE, 'old_names': [('E0012',
-    'bad-option-value')]}), 'E0013': (
-    "Plugin '%s' is impossible to load, is it installed ? ('%s')",
-    'bad-plugin-value', "Used when a bad value is used in 'load-plugins'.",
-    {'scope': WarningScope.LINE}), 'E0014': (
-    "Out-of-place setting encountered in top level configuration-section '%s' : '%s'"
-    , 'bad-configuration-section',
-    "Used when we detect a setting in the top level of a toml configuration that shouldn't be there."
-    , {'scope': WarningScope.LINE}), 'E0015': (
-    'Unrecognized option found: %s', 'unrecognized-option',
-    'Used when we detect an option that we do not recognize.', {'scope':
-    WarningScope.LINE})}
-
-
-class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
-    ReportsHandlerMixIn, checkers.BaseChecker):
+    def __call__(
+        self, filepath: str, modname: str, data: str | None = None
+    ) -> nodes.Module: ...
+
+
+def _read_stdin() -> str:
+    # See https://github.com/python/typeshed/pull/5623 for rationale behind assertion
+    assert isinstance(sys.stdin, TextIOWrapper)
+    sys.stdin = TextIOWrapper(sys.stdin.detach(), encoding="utf-8")
+    return sys.stdin.read()
+
+
+def _load_reporter_by_class(reporter_class: str) -> type[BaseReporter]:
+    qname = reporter_class
+    module_part = astroid.modutils.get_module_part(qname)
+    module = astroid.modutils.load_module_from_name(module_part)
+    class_name = qname.split(".")[-1]
+    klass = getattr(module, class_name)
+    assert issubclass(klass, BaseReporter), f"{klass} is not a BaseReporter"
+    return klass  # type: ignore[no-any-return]
+
+
+# Python Linter class #########################################################
+
+# pylint: disable-next=consider-using-namedtuple-or-dataclass
+MSGS: dict[str, MessageDefinitionTuple] = {
+    "F0001": (
+        "%s",
+        "fatal",
+        "Used when an error occurred preventing the analysis of a \
+              module (unable to find it for instance).",
+        {"scope": WarningScope.LINE},
+    ),
+    "F0002": (
+        "%s: %s",
+        "astroid-error",
+        "Used when an unexpected error occurred while building the "
+        "Astroid  representation. This is usually accompanied by a "
+        "traceback. Please report such errors !",
+        {"scope": WarningScope.LINE},
+    ),
+    "F0010": (
+        "error while code parsing: %s",
+        "parse-error",
+        "Used when an exception occurred while building the Astroid "
+        "representation which could be handled by astroid.",
+        {"scope": WarningScope.LINE},
+    ),
+    "F0011": (
+        "error while parsing the configuration: %s",
+        "config-parse-error",
+        "Used when an exception occurred while parsing a pylint configuration file.",
+        {"scope": WarningScope.LINE},
+    ),
+    "I0001": (
+        "Unable to run raw checkers on built-in module %s",
+        "raw-checker-failed",
+        "Used to inform that a built-in module has not been checked "
+        "using the raw checkers.",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0010": (
+        "Unable to consider inline option %r",
+        "bad-inline-option",
+        "Used when an inline option is either badly formatted or can't "
+        "be used inside modules.",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0011": (
+        "Locally disabling %s (%s)",
+        "locally-disabled",
+        "Used when an inline option disables a message or a messages category.",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0013": (
+        "Ignoring entire file",
+        "file-ignored",
+        "Used to inform that the file will not be checked",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0020": (
+        "Suppressed %s (from line %d)",
+        "suppressed-message",
+        "A message was triggered on a line, but suppressed explicitly "
+        "by a disable= comment in the file. This message is not "
+        "generated for messages that are ignored due to configuration "
+        "settings.",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0021": (
+        "Useless suppression of %s",
+        "useless-suppression",
+        "Reported when a message is explicitly disabled for a line or "
+        "a block of code, but never triggered.",
+        {
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "I0022": (
+        'Pragma "%s" is deprecated, use "%s" instead',
+        "deprecated-pragma",
+        "Some inline pylint options have been renamed or reworked, "
+        "only the most recent form should be used. "
+        "NOTE:skip-all is only available with pylint >= 0.26",
+        {
+            "old_names": [("I0014", "deprecated-disable-all")],
+            "scope": WarningScope.LINE,
+            "default_enabled": False,
+        },
+    ),
+    "E0001": (
+        "%s",
+        "syntax-error",
+        "Used when a syntax error is raised for a module.",
+        {"scope": WarningScope.LINE},
+    ),
+    "E0011": (
+        "Unrecognized file option %r",
+        "unrecognized-inline-option",
+        "Used when an unknown inline option is encountered.",
+        {"scope": WarningScope.LINE},
+    ),
+    "W0012": (
+        "Unknown option value for '%s', expected a valid pylint message and got '%s'",
+        "unknown-option-value",
+        "Used when an unknown value is encountered for an option.",
+        {
+            "scope": WarningScope.LINE,
+            "old_names": [("E0012", "bad-option-value")],
+        },
+    ),
+    "R0022": (
+        "Useless option value for '%s', %s",
+        "useless-option-value",
+        "Used when a value for an option that is now deleted from pylint"
+        " is encountered.",
+        {
+            "scope": WarningScope.LINE,
+            "old_names": [("E0012", "bad-option-value")],
+        },
+    ),
+    "E0013": (
+        "Plugin '%s' is impossible to load, is it installed ? ('%s')",
+        "bad-plugin-value",
+        "Used when a bad value is used in 'load-plugins'.",
+        {"scope": WarningScope.LINE},
+    ),
+    "E0014": (
+        "Out-of-place setting encountered in top level configuration-section '%s' : '%s'",
+        "bad-configuration-section",
+        "Used when we detect a setting in the top level of a toml configuration that"
+        " shouldn't be there.",
+        {"scope": WarningScope.LINE},
+    ),
+    "E0015": (
+        "Unrecognized option found: %s",
+        "unrecognized-option",
+        "Used when we detect an option that we do not recognize.",
+        {"scope": WarningScope.LINE},
+    ),
+}
+
+
+# pylint: disable=too-many-instance-attributes,too-many-public-methods
+class PyLinter(
+    _ArgumentsManager,
+    _MessageStateHandler,
+    reporters.ReportsHandlerMixIn,
+    checkers.BaseChecker,
+):
     """Lint Python modules using external checkers.

     This is the main checker controlling the other ones and the reports
@@ -120,18 +273,32 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
     This class needs to support pickling for parallel linting to work. The exception
     is reporter member; see check_parallel function for more details.
     """
+
     name = MAIN_CHECKER_NAME
     msgs = MSGS
-    crash_file_path: str = 'pylint-crash-%Y-%m-%d-%H-%M-%S.txt'
-    option_groups_descs = {'Messages control':
-        'Options controlling analysis messages', 'Reports':
-        'Options related to output formatting and reporting'}
-
-    def __init__(self, options: Options=(), reporter: (reporters.
-        BaseReporter | reporters.MultiReporter | None)=None, option_groups:
-        tuple[tuple[str, str], ...]=(), pylintrc: (str | None)=None) ->None:
-        _ArgumentsManager.__init__(self, prog='pylint')
+    # Will be used like this : datetime.now().strftime(crash_file_path)
+    crash_file_path: str = "pylint-crash-%Y-%m-%d-%H-%M-%S.txt"
+
+    option_groups_descs = {
+        "Messages control": "Options controlling analysis messages",
+        "Reports": "Options related to output formatting and reporting",
+    }
+
+    def __init__(
+        self,
+        options: Options = (),
+        reporter: reporters.BaseReporter | reporters.MultiReporter | None = None,
+        option_groups: tuple[tuple[str, str], ...] = (),
+        # TODO: Deprecate passing the pylintrc parameter
+        pylintrc: str | None = None,  # pylint: disable=unused-argument
+    ) -> None:
+        _ArgumentsManager.__init__(self, prog="pylint")
         _MessageStateHandler.__init__(self, self)
+
+        # Some stuff has to be done before initialization of other ancestors...
+        # messages store / checkers / reporter / astroid manager
+
+        # Attributes for reporters
         self.reporter: reporters.BaseReporter | reporters.MultiReporter
         if reporter:
             self.set_reporter(reporter)
@@ -139,41 +306,63 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
             self.set_reporter(TextReporter())
         self._reporters: dict[str, type[reporters.BaseReporter]] = {}
         """Dictionary of possible but non-initialized reporters."""
-        self._checkers: defaultdict[str, list[checkers.BaseChecker]
-            ] = collections.defaultdict(list)
+
+        # Attributes for checkers and plugins
+        self._checkers: defaultdict[str, list[checkers.BaseChecker]] = (
+            collections.defaultdict(list)
+        )
         """Dictionary of registered and initialized checkers."""
-        self._dynamic_plugins: dict[str, ModuleType | ModuleNotFoundError |
-            bool] = {}
+        self._dynamic_plugins: dict[str, ModuleType | ModuleNotFoundError | bool] = {}
         """Set of loaded plugin names."""
+
+        # Attributes related to stats
         self.stats = LinterStats()
+
+        # Attributes related to (command-line) options and their parsing
         self.options: Options = options + _make_linter_options(self)
         for opt_group in option_groups:
             self.option_groups_descs[opt_group[0]] = opt_group[1]
-        self._option_groups: tuple[tuple[str, str], ...] = (*option_groups,
-            ('Messages control', 'Options controlling analysis messages'),
-            ('Reports', 'Options related to output formatting and reporting'))
+        self._option_groups: tuple[tuple[str, str], ...] = (
+            *option_groups,
+            ("Messages control", "Options controlling analysis messages"),
+            ("Reports", "Options related to output formatting and reporting"),
+        )
         self.fail_on_symbols: list[str] = []
         """List of message symbols on which pylint should fail, set by --fail-on."""
         self._error_mode = False
+
         reporters.ReportsHandlerMixIn.__init__(self)
         checkers.BaseChecker.__init__(self, self)
-        self.reports = ('RP0001', 'Messages by category',
-            report_total_messages_stats), ('RP0002',
-            '% errors / warnings by module', report_messages_by_module_stats
-            ), ('RP0003', 'Messages', report_messages_stats)
+        # provided reports
+        self.reports = (
+            ("RP0001", "Messages by category", report_total_messages_stats),
+            (
+                "RP0002",
+                "% errors / warnings by module",
+                report_messages_by_module_stats,
+            ),
+            ("RP0003", "Messages", report_messages_stats),
+        )
+
+        # Attributes related to registering messages and their handling
         self.msgs_store = MessageDefinitionStore(self.config.py_version)
         self.msg_status = 0
         self._by_id_managed_msgs: list[ManagedMessage] = []
-        self.file_state = FileState('', self.msgs_store, is_base_filestate=True
-            )
-        self.current_name: str = ''
+
+        # Attributes related to visiting files
+        self.file_state = FileState("", self.msgs_store, is_base_filestate=True)
+        self.current_name: str = ""
         self.current_file: str | None = None
         self._ignore_file = False
         self._ignore_paths: list[Pattern[str]] = []
+
         self.register_checker(self)

-    def load_plugin_modules(self, modnames: Iterable[str], force: bool=False
-        ) ->None:
+    def load_default_plugins(self) -> None:
+        checkers.initialize(self)
+        reporters.initialize(self)
+
+    def load_plugin_modules(self, modnames: Iterable[str], force: bool = False) -> None:
         """Check a list of pylint plugins modules, load and register them.

         If a module cannot be loaded, never try to load it again and instead
@@ -183,9 +372,17 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         If `force` is True (useful when multiprocessing), then the plugin is
         reloaded regardless if an entry exists in self._dynamic_plugins.
         """
-        pass
-
-    def load_plugin_configuration(self) ->None:
+        for modname in modnames:
+            if modname in self._dynamic_plugins and not force:
+                continue
+            try:
+                module = astroid.modutils.load_module_from_name(modname)
+                module.register(self)
+                self._dynamic_plugins[modname] = module
+            except ModuleNotFoundError as mnf_e:
+                self._dynamic_plugins[modname] = mnf_e
+
+    def load_plugin_configuration(self) -> None:
         """Call the configuration hook for plugins.

         This walks through the list of plugins, grabs the "load_configuration"
@@ -201,60 +398,190 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
             in GitHub issue #7264. Making it use the stored result is more efficient, and
             means that we avoid the ``init-hook`` problems from before.
         """
-        pass
-
-    def _load_reporters(self, reporter_names: str) ->None:
+        for modname, module_or_error in self._dynamic_plugins.items():
+            if isinstance(module_or_error, ModuleNotFoundError):
+                self.add_message(
+                    "bad-plugin-value", args=(modname, module_or_error), line=0
+                )
+            elif hasattr(module_or_error, "load_configuration"):
+                module_or_error.load_configuration(self)
+
+        # We re-set all the dictionary values to True here to make sure the dict
+        # is pickle-able. This is only a problem in multiprocessing/parallel mode.
+        # (e.g. invoking pylint -j 2)
+        self._dynamic_plugins = {
+            modname: not isinstance(val, ModuleNotFoundError)
+            for modname, val in self._dynamic_plugins.items()
+        }
+
+    def _load_reporters(self, reporter_names: str) -> None:
         """Load the reporters if they are available on _reporters."""
-        pass
+        if not self._reporters:
+            return
+        sub_reporters = []
+        output_files = []
+        with contextlib.ExitStack() as stack:
+            for reporter_name in reporter_names.split(","):
+                reporter_name, *reporter_output = reporter_name.split(":", 1)
+
+                reporter = self._load_reporter_by_name(reporter_name)
+                sub_reporters.append(reporter)
+                if reporter_output:
+                    output_file = stack.enter_context(
+                        open(reporter_output[0], "w", encoding="utf-8")
+                    )
+                    reporter.out = output_file
+                    output_files.append(output_file)
+
+            # Extend the lifetime of all opened output files
+            close_output_files = stack.pop_all().close
+
+        if len(sub_reporters) > 1 or output_files:
+            self.set_reporter(
+                reporters.MultiReporter(
+                    sub_reporters,
+                    close_output_files,
+                )
+            )
+        else:
+            self.set_reporter(sub_reporters[0])
+
+    def _load_reporter_by_name(self, reporter_name: str) -> reporters.BaseReporter:
+        name = reporter_name.lower()
+        if name in self._reporters:
+            return self._reporters[name]()

-    def set_reporter(self, reporter: (reporters.BaseReporter | reporters.
-        MultiReporter)) ->None:
+        try:
+            reporter_class = _load_reporter_by_class(reporter_name)
+        except (ImportError, AttributeError, AssertionError) as e:
+            raise exceptions.InvalidReporterError(name) from e
+
+        return reporter_class()
+
+    def set_reporter(
+        self, reporter: reporters.BaseReporter | reporters.MultiReporter
+    ) -> None:
         """Set the reporter used to display messages and reports."""
-        pass
+        self.reporter = reporter
+        reporter.linter = self

-    def register_reporter(self, reporter_class: type[reporters.BaseReporter]
-        ) ->None:
+    def register_reporter(self, reporter_class: type[reporters.BaseReporter]) -> None:
         """Registers a reporter class on the _reporters attribute."""
-        pass
+        self._reporters[reporter_class.name] = reporter_class
+
+    def report_order(self) -> list[BaseChecker]:
+        reports = sorted(self._reports, key=lambda x: getattr(x, "name", ""))
+        try:
+            # Remove the current reporter and add it
+            # at the end of the list.
+            reports.pop(reports.index(self))
+        except ValueError:
+            pass
+        else:
+            reports.append(self)
+        return reports

-    def register_checker(self, checker: checkers.BaseChecker) ->None:
-        """This method auto registers the checker."""
-        pass
+    # checkers manipulation methods ############################################

-    def enable_fail_on_messages(self) ->None:
+    def register_checker(self, checker: checkers.BaseChecker) -> None:
+        """This method auto registers the checker."""
+        self._checkers[checker.name].append(checker)
+        for r_id, r_title, r_cb in checker.reports:
+            self.register_report(r_id, r_title, r_cb, checker)
+        if hasattr(checker, "msgs"):
+            self.msgs_store.register_messages_from_checker(checker)
+            for message in checker.messages:
+                if not message.default_enabled:
+                    self.disable(message.msgid)
+        # Register the checker, but disable all of its messages.
+        if not getattr(checker, "enabled", True):
+            self.disable(checker.name)
+
+    def enable_fail_on_messages(self) -> None:
         """Enable 'fail on' msgs.

         Convert values in config.fail_on (which might be msg category, msg id,
         or symbol) to specific msgs, then enable and flag them for later.
         """
-        pass
-
-    def disable_reporters(self) ->None:
+        fail_on_vals = self.config.fail_on
+        if not fail_on_vals:
+            return
+
+        fail_on_cats = set()
+        fail_on_msgs = set()
+        for val in fail_on_vals:
+            # If value is a category, add category, else add message
+            if val in MSG_TYPES:
+                fail_on_cats.add(val)
+            else:
+                fail_on_msgs.add(val)
+
+        # For every message in every checker, if cat or msg flagged, enable check
+        for all_checkers in self._checkers.values():
+            for checker in all_checkers:
+                for msg in checker.messages:
+                    if msg.msgid in fail_on_msgs or msg.symbol in fail_on_msgs:
+                        # message id/symbol matched, enable and flag it
+                        self.enable(msg.msgid)
+                        self.fail_on_symbols.append(msg.symbol)
+                    elif msg.msgid[0] in fail_on_cats:
+                        # message starts with a category value, flag (but do not enable) it
+                        self.fail_on_symbols.append(msg.symbol)
+
+    def any_fail_on_issues(self) -> bool:
+        return any(x in self.fail_on_symbols for x in self.stats.by_msg.keys())
+
+    def disable_reporters(self) -> None:
         """Disable all reporters."""
-        pass
+        for _reporters in self._reports.values():
+            for report_id, _, _ in _reporters:
+                self.disable_report(report_id)

-    def _parse_error_mode(self) ->None:
+    def _parse_error_mode(self) -> None:
         """Parse the current state of the error mode.

         Error mode: enable only errors; no reports, no persistent.
         """
-        pass
+        if not self._error_mode:
+            return
+
+        self.disable_noerror_messages()
+        self.disable("miscellaneous")
+        self.set_option("reports", False)
+        self.set_option("persistent", False)
+        self.set_option("score", False)
+
+    # code checking methods ###################################################

-    def get_checkers(self) ->list[BaseChecker]:
+    def get_checkers(self) -> list[BaseChecker]:
         """Return all available checkers as an ordered list."""
-        pass
+        return sorted(c for _checkers in self._checkers.values() for c in _checkers)

-    def get_checker_names(self) ->list[str]:
+    def get_checker_names(self) -> list[str]:
         """Get all the checker names that this linter knows about."""
-        pass
-
-    def prepare_checkers(self) ->list[BaseChecker]:
+        return sorted(
+            {
+                checker.name
+                for checker in self.get_checkers()
+                if checker.name != MAIN_CHECKER_NAME
+            }
+        )
+
+    def prepare_checkers(self) -> list[BaseChecker]:
         """Return checkers needed for activated messages and reports."""
-        pass
-
+        if not self.config.reports:
+            self.disable_reporters()
+        # get needed checkers
+        needed_checkers: list[BaseChecker] = [self]
+        for checker in self.get_checkers()[1:]:
+            messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)}
+            if messages or any(self.report_is_enabled(r[0]) for r in checker.reports):
+                needed_checkers.append(checker)
+        return needed_checkers
+
+    # pylint: disable=unused-argument
     @staticmethod
-    def should_analyze_file(modname: str, path: str, is_argument: bool=False
-        ) ->bool:
+    def should_analyze_file(modname: str, path: str, is_argument: bool = False) -> bool:
         """Returns whether a module should be checked.

         This implementation returns True for all python source files (.py and .pyi),
@@ -270,50 +597,176 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
                                  checked, since the user requested it explicitly.
         :returns: True if the module should be checked.
         """
-        pass
+        if is_argument:
+            return True
+        return path.endswith((".py", ".pyi"))

-    def initialize(self) ->None:
+    # pylint: enable=unused-argument
+
+    def initialize(self) -> None:
         """Initialize linter for linting.

         This method is called before any linting is done.
         """
-        pass
-
-    def _discover_files(self, files_or_modules: Sequence[str]) ->Iterator[str]:
+        self._ignore_paths = self.config.ignore_paths
+        # initialize msgs_state now that all messages have been registered into
+        # the store
+        for msg in self.msgs_store.messages:
+            if not msg.may_be_emitted(self.config.py_version):
+                self._msgs_state[msg.msgid] = False
+
+    def _discover_files(self, files_or_modules: Sequence[str]) -> Iterator[str]:
         """Discover python modules and packages in sub-directory.

         Returns iterator of paths to discovered modules and packages.
         """
-        pass
-
-    def check(self, files_or_modules: Sequence[str]) ->None:
+        for something in files_or_modules:
+            if os.path.isdir(something) and not os.path.isfile(
+                os.path.join(something, "__init__.py")
+            ):
+                skip_subtrees: list[str] = []
+                for root, _, files in os.walk(something):
+                    if any(root.startswith(s) for s in skip_subtrees):
+                        # Skip subtree of already discovered package.
+                        continue
+
+                    if _is_ignored_file(
+                        root,
+                        self.config.ignore,
+                        self.config.ignore_patterns,
+                        self.config.ignore_paths,
+                    ):
+                        skip_subtrees.append(root)
+                        continue
+
+                    if "__init__.py" in files:
+                        skip_subtrees.append(root)
+                        yield root
+                    else:
+                        yield from (
+                            os.path.join(root, file)
+                            for file in files
+                            if file.endswith((".py", ".pyi"))
+                        )
+            else:
+                yield something
+
+    def check(self, files_or_modules: Sequence[str]) -> None:
         """Main checking entry: check a list of files or modules from their name.

         files_or_modules is either a string or list of strings presenting modules to check.
         """
-        pass
-
-    def _get_asts(self, fileitems: Iterator[FileItem], data: (str | None)
-        ) ->dict[FileItem, nodes.Module | None]:
+        self.initialize()
+        if self.config.recursive:
+            files_or_modules = tuple(self._discover_files(files_or_modules))
+        if self.config.from_stdin:
+            if len(files_or_modules) != 1:
+                raise exceptions.InvalidArgsError(
+                    "Missing filename required for --from-stdin"
+                )
+
+        extra_packages_paths = list(
+            {
+                discover_package_path(file_or_module, self.config.source_roots)
+                for file_or_module in files_or_modules
+            }
+        )
+
+        # TODO: Move the parallel invocation into step 3 of the checking process
+        if not self.config.from_stdin and self.config.jobs > 1:
+            original_sys_path = sys.path[:]
+            check_parallel(
+                self,
+                self.config.jobs,
+                self._iterate_file_descrs(files_or_modules),
+                extra_packages_paths,
+            )
+            sys.path = original_sys_path
+            return
+
+        # 1) Get all FileItems
+        with augmented_sys_path(extra_packages_paths):
+            if self.config.from_stdin:
+                fileitems = self._get_file_descr_from_stdin(files_or_modules[0])
+                data: str | None = _read_stdin()
+            else:
+                fileitems = self._iterate_file_descrs(files_or_modules)
+                data = None
+
+        # The contextmanager also opens all checkers and sets up the PyLinter class
+        with augmented_sys_path(extra_packages_paths):
+            with self._astroid_module_checker() as check_astroid_module:
+                # 2) Get the AST for each FileItem
+                ast_per_fileitem = self._get_asts(fileitems, data)
+
+                # 3) Lint each ast
+                self._lint_files(ast_per_fileitem, check_astroid_module)
+
+    def _get_asts(
+        self, fileitems: Iterator[FileItem], data: str | None
+    ) -> dict[FileItem, nodes.Module | None]:
         """Get the AST for all given FileItems."""
-        pass
-
-    def check_single_file_item(self, file: FileItem) ->None:
+        ast_per_fileitem: dict[FileItem, nodes.Module | None] = {}
+
+        for fileitem in fileitems:
+            self.set_current_module(fileitem.name, fileitem.filepath)
+
+            try:
+                ast_per_fileitem[fileitem] = self.get_ast(
+                    fileitem.filepath, fileitem.name, data
+                )
+            except astroid.AstroidBuildingError as ex:
+                template_path = prepare_crash_report(
+                    ex, fileitem.filepath, self.crash_file_path
+                )
+                msg = get_fatal_error_message(fileitem.filepath, template_path)
+                self.add_message(
+                    "astroid-error",
+                    args=(fileitem.filepath, msg),
+                    confidence=HIGH,
+                )
+
+        return ast_per_fileitem
+
+    def check_single_file_item(self, file: FileItem) -> None:
         """Check single file item.

         The arguments are the same that are documented in _check_files

         initialize() should be called before calling this method
         """
-        pass
-
-    def _lint_files(self, ast_mapping: dict[FileItem, nodes.Module | None],
-        check_astroid_module: Callable[[nodes.Module], bool | None]) ->None:
+        with self._astroid_module_checker() as check_astroid_module:
+            self._check_file(self.get_ast, check_astroid_module, file)
+
+    def _lint_files(
+        self,
+        ast_mapping: dict[FileItem, nodes.Module | None],
+        check_astroid_module: Callable[[nodes.Module], bool | None],
+    ) -> None:
         """Lint all AST modules from a mapping.."""
-        pass
-
-    def _lint_file(self, file: FileItem, module: nodes.Module,
-        check_astroid_module: Callable[[nodes.Module], bool | None]) ->None:
+        for fileitem, module in ast_mapping.items():
+            if module is None:
+                continue
+            try:
+                self._lint_file(fileitem, module, check_astroid_module)
+            except Exception as ex:  # pylint: disable=broad-except
+                template_path = prepare_crash_report(
+                    ex, fileitem.filepath, self.crash_file_path
+                )
+                msg = get_fatal_error_message(fileitem.filepath, template_path)
+                if isinstance(ex, astroid.AstroidError):
+                    self.add_message(
+                        "astroid-error", args=(fileitem.filepath, msg), confidence=HIGH
+                    )
+                else:
+                    self.add_message("fatal", args=msg, confidence=HIGH)
+
+    def _lint_file(
+        self,
+        file: FileItem,
+        module: nodes.Module,
+        check_astroid_module: Callable[[nodes.Module], bool | None],
+    ) -> None:
         """Lint a file using the passed utility function check_astroid_module).

         :param FileItem file: data about the file
@@ -323,10 +776,31 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         - ast: AST of the module
         :raises AstroidError: for any failures stemming from astroid
         """
-        pass
-
-    def _check_file(self, get_ast: GetAstProtocol, check_astroid_module:
-        Callable[[nodes.Module], bool | None], file: FileItem) ->None:
+        self.set_current_module(file.name, file.filepath)
+        self._ignore_file = False
+        self.file_state = FileState(file.modpath, self.msgs_store, module)
+        # fix the current file (if the source file was not available or
+        # if it's actually a c extension)
+        self.current_file = module.file
+
+        try:
+            check_astroid_module(module)
+        except Exception as e:
+            raise astroid.AstroidError from e
+
+        # warn about spurious inline messages handling
+        spurious_messages = self.file_state.iter_spurious_suppression_messages(
+            self.msgs_store
+        )
+        for msgid, line, args in spurious_messages:
+            self.add_message(msgid, line, None, args)
+
+    def _check_file(
+        self,
+        get_ast: GetAstProtocol,
+        check_astroid_module: Callable[[nodes.Module], bool | None],
+        file: FileItem,
+    ) -> None:
         """Check a file using the passed utility functions (get_ast and
         check_astroid_module).

@@ -340,49 +814,153 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         :param FileItem file: data about the file
         :raises AstroidError: for any failures stemming from astroid
         """
-        pass
+        self.set_current_module(file.name, file.filepath)
+        # get the module representation
+        ast_node = get_ast(file.filepath, file.name)
+        if ast_node is None:
+            return

-    def _get_file_descr_from_stdin(self, filepath: str) ->Iterator[FileItem]:
+        self._ignore_file = False
+
+        self.file_state = FileState(file.modpath, self.msgs_store, ast_node)
+        # fix the current file (if the source file was not available or
+        # if it's actually a c extension)
+        self.current_file = ast_node.file
+        try:
+            check_astroid_module(ast_node)
+        except Exception as e:  # pragma: no cover
+            raise astroid.AstroidError from e
+        # warn about spurious inline messages handling
+        spurious_messages = self.file_state.iter_spurious_suppression_messages(
+            self.msgs_store
+        )
+        for msgid, line, args in spurious_messages:
+            self.add_message(msgid, line, None, args)
+
+    def _get_file_descr_from_stdin(self, filepath: str) -> Iterator[FileItem]:
         """Return file description (tuple of module name, file path, base name) from
         given file path.

         This method is used for creating suitable file description for _check_files when the
         source is standard input.
         """
-        pass
-
-    def _iterate_file_descrs(self, files_or_modules: Sequence[str]) ->Iterator[
-        FileItem]:
+        if _is_ignored_file(
+            filepath,
+            self.config.ignore,
+            self.config.ignore_patterns,
+            self.config.ignore_paths,
+        ):
+            return
+
+        try:
+            # Note that this function does not really perform an
+            # __import__ but may raise an ImportError exception, which
+            # we want to catch here.
+            modname = ".".join(astroid.modutils.modpath_from_file(filepath))
+        except ImportError:
+            modname = os.path.splitext(os.path.basename(filepath))[0]
+
+        yield FileItem(modname, filepath, filepath)
+
+    def _iterate_file_descrs(
+        self, files_or_modules: Sequence[str]
+    ) -> Iterator[FileItem]:
         """Return generator yielding file descriptions (tuples of module name, file
         path, base name).

         The returned generator yield one item for each Python module that should be linted.
         """
-        pass
-
-    def _expand_files(self, files_or_modules: Sequence[str]) ->dict[str,
-        ModuleDescriptionDict]:
+        for descr in self._expand_files(files_or_modules).values():
+            name, filepath, is_arg = descr["name"], descr["path"], descr["isarg"]
+            if self.should_analyze_file(name, filepath, is_argument=is_arg):
+                yield FileItem(name, filepath, descr["basename"])
+
+    def _expand_files(
+        self, files_or_modules: Sequence[str]
+    ) -> dict[str, ModuleDescriptionDict]:
         """Get modules and errors from a list of modules and handle errors."""
-        pass
-
-    def set_current_module(self, modname: str, filepath: (str | None)=None
-        ) ->None:
+        result, errors = expand_modules(
+            files_or_modules,
+            self.config.source_roots,
+            self.config.ignore,
+            self.config.ignore_patterns,
+            self._ignore_paths,
+        )
+        for error in errors:
+            message = modname = error["mod"]
+            key = error["key"]
+            self.set_current_module(modname)
+            if key == "fatal":
+                message = str(error["ex"]).replace(os.getcwd() + os.sep, "")
+            self.add_message(key, args=message)
+        return result
+
+    def set_current_module(self, modname: str, filepath: str | None = None) -> None:
         """Set the name of the currently analyzed module and
         init statistics for it.
         """
-        pass
+        if not modname and filepath is None:
+            return
+        self.reporter.on_set_current_module(modname or "", filepath)
+        self.current_name = modname
+        self.current_file = filepath or modname
+        self.stats.init_single_module(modname or "")
+
+        # If there is an actual filepath we might need to update the config attribute
+        if filepath:
+            namespace = self._get_namespace_for_file(
+                Path(filepath), self._directory_namespaces
+            )
+            if namespace:
+                self.config = namespace or self._base_config
+
+    def _get_namespace_for_file(
+        self, filepath: Path, namespaces: DirectoryNamespaceDict
+    ) -> argparse.Namespace | None:
+        for directory in namespaces:
+            if _is_relative_to(filepath, directory):
+                namespace = self._get_namespace_for_file(
+                    filepath, namespaces[directory][1]
+                )
+                if namespace is None:
+                    return namespaces[directory][0]
+        return None

     @contextlib.contextmanager
-    def _astroid_module_checker(self) ->Iterator[Callable[[nodes.Module], 
-        bool | None]]:
+    def _astroid_module_checker(
+        self,
+    ) -> Iterator[Callable[[nodes.Module], bool | None]]:
         """Context manager for checking ASTs.

         The value in the context is callable accepting AST as its only argument.
         """
-        pass
-
-    def get_ast(self, filepath: str, modname: str, data: (str | None)=None) ->(
-        nodes.Module | None):
+        walker = ASTWalker(self)
+        _checkers = self.prepare_checkers()
+        tokencheckers = [
+            c for c in _checkers if isinstance(c, checkers.BaseTokenChecker)
+        ]
+        rawcheckers = [
+            c for c in _checkers if isinstance(c, checkers.BaseRawFileChecker)
+        ]
+        for checker in _checkers:
+            checker.open()
+            walker.add_checker(checker)
+
+        yield functools.partial(
+            self.check_astroid_module,
+            walker=walker,
+            tokencheckers=tokencheckers,
+            rawcheckers=rawcheckers,
+        )
+
+        # notify global end
+        self.stats.statement = walker.nbstatements
+        for checker in reversed(_checkers):
+            checker.close()
+
+    def get_ast(
+        self, filepath: str, modname: str, data: str | None = None
+    ) -> nodes.Module | None:
         """Return an ast(roid) representation of a module or a string.

         :param filepath: path to checked file.
@@ -392,20 +970,65 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         :rtype: astroid.nodes.Module
         :raises AstroidBuildingError: Whenever we encounter an unexpected exception
         """
-        pass
-
-    def check_astroid_module(self, ast_node: nodes.Module, walker:
-        ASTWalker, rawcheckers: list[checkers.BaseRawFileChecker],
-        tokencheckers: list[checkers.BaseTokenChecker]) ->(bool | None):
+        try:
+            if data is None:
+                return MANAGER.ast_from_file(filepath, modname, source=True)
+            return astroid.builder.AstroidBuilder(MANAGER).string_build(
+                data, modname, filepath
+            )
+        except astroid.AstroidSyntaxError as ex:
+            line = getattr(ex.error, "lineno", None)
+            if line is None:
+                line = 0
+            self.add_message(
+                "syntax-error",
+                line=line,
+                col_offset=getattr(ex.error, "offset", None),
+                args=f"Parsing failed: '{ex.error}'",
+                confidence=HIGH,
+            )
+        except astroid.AstroidBuildingError as ex:
+            self.add_message("parse-error", args=ex)
+        except Exception as ex:
+            traceback.print_exc()
+            # We raise BuildingError here as this is essentially an astroid issue
+            # Creating an issue template and adding the 'astroid-error' message is handled
+            # by caller: _check_files
+            raise astroid.AstroidBuildingError(
+                "Building error when trying to create ast representation of module '{modname}'",
+                modname=modname,
+            ) from ex
+        return None
+
+    def check_astroid_module(
+        self,
+        ast_node: nodes.Module,
+        walker: ASTWalker,
+        rawcheckers: list[checkers.BaseRawFileChecker],
+        tokencheckers: list[checkers.BaseTokenChecker],
+    ) -> bool | None:
         """Check a module from its astroid representation.

         For return value see _check_astroid_module
         """
-        pass
-
-    def _check_astroid_module(self, node: nodes.Module, walker: ASTWalker,
-        rawcheckers: list[checkers.BaseRawFileChecker], tokencheckers: list
-        [checkers.BaseTokenChecker]) ->(bool | None):
+        before_check_statements = walker.nbstatements
+
+        retval = self._check_astroid_module(
+            ast_node, walker, rawcheckers, tokencheckers
+        )
+        self.stats.by_module[self.current_name]["statement"] = (
+            walker.nbstatements - before_check_statements
+        )
+
+        return retval
+
+    def _check_astroid_module(
+        self,
+        node: nodes.Module,
+        walker: ASTWalker,
+        rawcheckers: list[checkers.BaseRawFileChecker],
+        tokencheckers: list[checkers.BaseTokenChecker],
+    ) -> bool | None:
         """Check given AST node with given walker and checkers.

         :param astroid.nodes.Module node: AST node of the module to check
@@ -416,37 +1039,220 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         :returns: True if the module was checked, False if ignored,
             None if the module contents could not be parsed
         """
-        pass
+        try:
+            tokens = utils.tokenize_module(node)
+        except tokenize.TokenError as ex:
+            self.add_message(
+                "syntax-error",
+                line=ex.args[1][0],
+                col_offset=ex.args[1][1],
+                args=ex.args[0],
+                confidence=HIGH,
+            )
+            return None

-    def open(self) ->None:
+        if not node.pure_python:
+            self.add_message("raw-checker-failed", args=node.name)
+        else:
+            # assert astroid.file.endswith('.py')
+            # Parse module/block level option pragma's
+            self.process_tokens(tokens)
+            if self._ignore_file:
+                return False
+            # run raw and tokens checkers
+            for raw_checker in rawcheckers:
+                raw_checker.process_module(node)
+            for token_checker in tokencheckers:
+                token_checker.process_tokens(tokens)
+        # generate events to astroid checkers
+        walker.walk(node)
+        return True
+
+    def open(self) -> None:
         """Initialize counters."""
-        pass
+        MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
+        MANAGER.max_inferable_values = self.config.limit_inference_results
+        MANAGER.extension_package_whitelist.update(self.config.extension_pkg_allow_list)
+        MANAGER.module_denylist.update(self.config.ignored_modules)
+        MANAGER.prefer_stubs = self.config.prefer_stubs
+        if self.config.extension_pkg_whitelist:
+            MANAGER.extension_package_whitelist.update(
+                self.config.extension_pkg_whitelist
+            )
+        self.stats.reset_message_count()

-    def generate_reports(self, verbose: bool=False) ->(int | None):
+    def generate_reports(self, verbose: bool = False) -> int | None:
         """Close the whole package /module, it's time to make reports !

         if persistent run, pickle results for later comparison
         """
-        pass
+        # Display whatever messages are left on the reporter.
+        self.reporter.display_messages(report_nodes.Section())
+        if not self.file_state._is_base_filestate:
+            # load previous results if any
+            previous_stats = load_results(self.file_state.base_name)
+            self.reporter.on_close(self.stats, previous_stats)
+            if self.config.reports:
+                sect = self.make_reports(self.stats, previous_stats)
+            else:
+                sect = report_nodes.Section()
+
+            if self.config.reports:
+                self.reporter.display_reports(sect)
+            score_value = self._report_evaluation(verbose)
+            # save results if persistent run
+            if self.config.persistent:
+                save_results(self.stats, self.file_state.base_name)
+        else:
+            self.reporter.on_close(self.stats, LinterStats())
+            score_value = None
+        return score_value

-    def _report_evaluation(self, verbose: bool=False) ->(int | None):
+    def _report_evaluation(self, verbose: bool = False) -> int | None:
         """Make the global evaluation report."""
-        pass
-
-    def _add_one_message(self, message_definition: MessageDefinition, line:
-        (int | None), node: (nodes.NodeNG | None), args: (Any | None),
-        confidence: (interfaces.Confidence | None), col_offset: (int | None
-        ), end_lineno: (int | None), end_col_offset: (int | None)) ->None:
+        # check with at least a statement (usually 0 when there is a
+        # syntax error preventing pylint from further processing)
+        note = None
+        previous_stats = load_results(self.file_state.base_name)
+        if self.stats.statement == 0:
+            return note
+
+        # get a global note for the code
+        evaluation = self.config.evaluation
+        try:
+            stats_dict = {
+                "fatal": self.stats.fatal,
+                "error": self.stats.error,
+                "warning": self.stats.warning,
+                "refactor": self.stats.refactor,
+                "convention": self.stats.convention,
+                "statement": self.stats.statement,
+                "info": self.stats.info,
+            }
+            note = eval(evaluation, {}, stats_dict)  # pylint: disable=eval-used
+        except Exception as ex:  # pylint: disable=broad-except
+            msg = f"An exception occurred while rating: {ex}"
+        else:
+            self.stats.global_note = note
+            msg = f"Your code has been rated at {note:.2f}/10"
+            if previous_stats:
+                pnote = previous_stats.global_note
+                if pnote is not None:
+                    msg += f" (previous run: {pnote:.2f}/10, {note - pnote:+.2f})"
+
+            if verbose:
+                checked_files_count = self.stats.node_count["module"]
+                unchecked_files_count = self.stats.undocumented["module"]
+                msg += f"\nChecked {checked_files_count} files, skipped {unchecked_files_count} files"
+
+        if self.config.score:
+            sect = report_nodes.EvaluationSection(msg)
+            self.reporter.display_reports(sect)
+        return note
+
+    def _add_one_message(
+        self,
+        message_definition: MessageDefinition,
+        line: int | None,
+        node: nodes.NodeNG | None,
+        args: Any | None,
+        confidence: interfaces.Confidence | None,
+        col_offset: int | None,
+        end_lineno: int | None,
+        end_col_offset: int | None,
+    ) -> None:
         """After various checks have passed a single Message is
         passed to the reporter and added to stats.
         """
-        pass
-
-    def add_message(self, msgid: str, line: (int | None)=None, node: (nodes
-        .NodeNG | None)=None, args: (Any | None)=None, confidence: (
-        interfaces.Confidence | None)=None, col_offset: (int | None)=None,
-        end_lineno: (int | None)=None, end_col_offset: (int | None)=None
-        ) ->None:
+        message_definition.check_message_definition(line, node)
+
+        # Look up "location" data of node if not yet supplied
+        if node:
+            if node.position:
+                if not line:
+                    line = node.position.lineno
+                if not col_offset:
+                    col_offset = node.position.col_offset
+                if not end_lineno:
+                    end_lineno = node.position.end_lineno
+                if not end_col_offset:
+                    end_col_offset = node.position.end_col_offset
+            else:
+                if not line:
+                    line = node.fromlineno
+                if not col_offset:
+                    col_offset = node.col_offset
+                if not end_lineno:
+                    end_lineno = node.end_lineno
+                if not end_col_offset:
+                    end_col_offset = node.end_col_offset
+
+        # should this message be displayed
+        if not self.is_message_enabled(message_definition.msgid, line, confidence):
+            self.file_state.handle_ignored_message(
+                self._get_message_state_scope(
+                    message_definition.msgid, line, confidence
+                ),
+                message_definition.msgid,
+                line,
+            )
+            return
+
+        # update stats
+        msg_cat = MSG_TYPES[message_definition.msgid[0]]
+        self.msg_status |= MSG_TYPES_STATUS[message_definition.msgid[0]]
+        self.stats.increase_single_message_count(msg_cat, 1)
+        self.stats.increase_single_module_message_count(self.current_name, msg_cat, 1)
+        try:
+            self.stats.by_msg[message_definition.symbol] += 1
+        except KeyError:
+            self.stats.by_msg[message_definition.symbol] = 1
+        # Interpolate arguments into message string
+        msg = message_definition.msg
+        if args is not None:
+            msg %= args
+        # get module and object
+        if node is None:
+            module, obj = self.current_name, ""
+            abspath = self.current_file
+        else:
+            module, obj = utils.get_module_and_frameid(node)
+            abspath = node.root().file
+        if abspath is not None:
+            path = abspath.replace(self.reporter.path_strip_prefix, "", 1)
+        else:
+            path = "configuration"
+        # add the message
+        self.reporter.handle_message(
+            Message(
+                message_definition.msgid,
+                message_definition.symbol,
+                MessageLocationTuple(
+                    abspath or "",
+                    path,
+                    module or "",
+                    obj,
+                    line or 1,
+                    col_offset or 0,
+                    end_lineno,
+                    end_col_offset,
+                ),
+                msg,
+                confidence,
+            )
+        )
+
+    def add_message(
+        self,
+        msgid: str,
+        line: int | None = None,
+        node: nodes.NodeNG | None = None,
+        args: Any | None = None,
+        confidence: interfaces.Confidence | None = None,
+        col_offset: int | None = None,
+        end_lineno: int | None = None,
+        end_col_offset: int | None = None,
+    ) -> None:
         """Adds a message given by ID or name.

         If provided, the message string is expanded using args.
@@ -455,11 +1261,28 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         provide line if the line number is different), raw and token checkers
         must provide the line argument.
         """
-        pass
+        if confidence is None:
+            confidence = interfaces.UNDEFINED
+        message_definitions = self.msgs_store.get_message_definitions(msgid)
+        for message_definition in message_definitions:
+            self._add_one_message(
+                message_definition,
+                line,
+                node,
+                args,
+                confidence,
+                col_offset,
+                end_lineno,
+                end_col_offset,
+            )

-    def add_ignored_message(self, msgid: str, line: int, node: (nodes.
-        NodeNG | None)=None, confidence: (interfaces.Confidence | None)=
-        interfaces.UNDEFINED) ->None:
+    def add_ignored_message(
+        self,
+        msgid: str,
+        line: int,
+        node: nodes.NodeNG | None = None,
+        confidence: interfaces.Confidence | None = interfaces.UNDEFINED,
+    ) -> None:
         """Prepares a message to be added to the ignored message storage.

         Some checks return early in special cases and never reach add_message(),
@@ -467,4 +1290,26 @@ class PyLinter(_ArgumentsManager, _MessageStateHandler, reporters.
         This creates false positives for useless-suppression.
         This function avoids this by adding those message to the ignored msgs attribute
         """
-        pass
+        message_definitions = self.msgs_store.get_message_definitions(msgid)
+        for message_definition in message_definitions:
+            message_definition.check_message_definition(line, node)
+            self.file_state.handle_ignored_message(
+                self._get_message_state_scope(
+                    message_definition.msgid, line, confidence
+                ),
+                message_definition.msgid,
+                line,
+            )
+
+    def _emit_stashed_messages(self) -> None:
+        for keys, values in self._stashed_messages.items():
+            modname, symbol = keys
+            self.linter.set_current_module(modname)
+            for args in values:
+                self.add_message(
+                    symbol,
+                    args=args,
+                    line=0,
+                    confidence=HIGH,
+                )
+        self._stashed_messages = collections.defaultdict(list)
diff --git a/pylint/lint/report_functions.py b/pylint/lint/report_functions.py
index eec3528bf..72734e468 100644
--- a/pylint/lint/report_functions.py
+++ b/pylint/lint/report_functions.py
@@ -1,26 +1,88 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import collections
 from collections import defaultdict
 from typing import cast
+
 from pylint import checkers, exceptions
 from pylint.reporters.ureports.nodes import Section, Table
 from pylint.typing import MessageTypesFullName
 from pylint.utils import LinterStats


-def report_total_messages_stats(sect: Section, stats: LinterStats,
-    previous_stats: (LinterStats | None)) ->None:
+def report_total_messages_stats(
+    sect: Section,
+    stats: LinterStats,
+    previous_stats: LinterStats | None,
+) -> None:
     """Make total errors / warnings report."""
-    pass
+    lines = ["type", "number", "previous", "difference"]
+    lines += checkers.table_lines_from_stats(stats, previous_stats, "message_types")
+    sect.append(Table(children=lines, cols=4, rheaders=1))


-def report_messages_stats(sect: Section, stats: LinterStats, _: (
-    LinterStats | None)) ->None:
+def report_messages_stats(
+    sect: Section,
+    stats: LinterStats,
+    _: LinterStats | None,
+) -> None:
     """Make messages type report."""
-    pass
+    by_msg_stats = stats.by_msg
+    in_order = sorted(
+        (value, msg_id)
+        for msg_id, value in by_msg_stats.items()
+        if not msg_id.startswith("I")
+    )
+    in_order.reverse()
+    lines = ["message id", "occurrences"]
+    for value, msg_id in in_order:
+        lines += [msg_id, str(value)]
+    sect.append(Table(children=lines, cols=2, rheaders=1))


-def report_messages_by_module_stats(sect: Section, stats: LinterStats, _: (
-    LinterStats | None)) ->None:
+def report_messages_by_module_stats(
+    sect: Section,
+    stats: LinterStats,
+    _: LinterStats | None,
+) -> None:
     """Make errors / warnings by modules report."""
-    pass
+    module_stats = stats.by_module
+    if len(module_stats) == 1:
+        # don't print this report when we are analysing a single module
+        raise exceptions.EmptyReportError()
+    by_mod: defaultdict[str, dict[str, int | float]] = collections.defaultdict(dict)
+    for m_type in ("fatal", "error", "warning", "refactor", "convention"):
+        m_type = cast(MessageTypesFullName, m_type)
+        total = stats.get_global_message_count(m_type)
+        for module in module_stats.keys():
+            mod_total = stats.get_module_message_count(module, m_type)
+            percent = 0 if total == 0 else float(mod_total * 100) / total
+            by_mod[module][m_type] = percent
+    sorted_result = []
+    for module, mod_info in by_mod.items():
+        sorted_result.append(
+            (
+                mod_info["error"],
+                mod_info["warning"],
+                mod_info["refactor"],
+                mod_info["convention"],
+                module,
+            )
+        )
+    sorted_result.sort()
+    sorted_result.reverse()
+    lines = ["module", "error", "warning", "refactor", "convention"]
+    for line in sorted_result:
+        # Don't report clean modules.
+        if all(entry == 0 for entry in line[:-1]):
+            continue
+        lines.append(line[-1])
+        for val in line[:-1]:
+            lines.append(f"{val:.2f}")
+    if len(lines) == 5:
+        raise exceptions.EmptyReportError()
+    sect.append(Table(children=lines, cols=5, rheaders=1))
diff --git a/pylint/lint/run.py b/pylint/lint/run.py
index 646f5ae00..1a8d594a0 100644
--- a/pylint/lint/run.py
+++ b/pylint/lint/run.py
@@ -1,13 +1,22 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 import sys
 import warnings
 from collections.abc import Sequence
 from pathlib import Path
 from typing import ClassVar
+
 from pylint import config
 from pylint.checkers.utils import clear_lru_caches
-from pylint.config._pylint_config import _handle_pylint_config_commands, _register_generate_config_options
+from pylint.config._pylint_config import (
+    _handle_pylint_config_commands,
+    _register_generate_config_options,
+)
 from pylint.config.config_initialization import _config_initialization
 from pylint.config.exceptions import ArgumentPreprocessingError
 from pylint.config.utils import _preprocess_options
@@ -15,99 +24,183 @@ from pylint.constants import full_version
 from pylint.lint.base_options import _make_run_options
 from pylint.lint.pylinter import MANAGER, PyLinter
 from pylint.reporters.base_reporter import BaseReporter
+
 try:
     import multiprocessing
-    from multiprocessing import synchronize
+    from multiprocessing import synchronize  # noqa pylint: disable=unused-import
 except ImportError:
-    multiprocessing = None
+    multiprocessing = None  # type: ignore[assignment]
+
 try:
     from concurrent.futures import ProcessPoolExecutor
 except ImportError:
-    ProcessPoolExecutor = None
+    ProcessPoolExecutor = None  # type: ignore[assignment,misc]


-def _query_cpu() ->(int | None):
+def _query_cpu() -> int | None:
     """Try to determine number of CPUs allotted in a docker container.

     This is based on discussion and copied from suggestions in
     https://bugs.python.org/issue36054.
     """
-    pass
+    cpu_quota, avail_cpu = None, None
+
+    if Path("/sys/fs/cgroup/cpu/cpu.cfs_quota_us").is_file():
+        with open("/sys/fs/cgroup/cpu/cpu.cfs_quota_us", encoding="utf-8") as file:
+            # Not useful for AWS Batch based jobs as result is -1, but works on local linux systems
+            cpu_quota = int(file.read().rstrip())

+    if (
+        cpu_quota
+        and cpu_quota != -1
+        and Path("/sys/fs/cgroup/cpu/cpu.cfs_period_us").is_file()
+    ):
+        with open("/sys/fs/cgroup/cpu/cpu.cfs_period_us", encoding="utf-8") as file:
+            cpu_period = int(file.read().rstrip())
+        # Divide quota by period and you should get num of allotted CPU to the container,
+        # rounded down if fractional.
+        avail_cpu = int(cpu_quota / cpu_period)
+    elif Path("/sys/fs/cgroup/cpu/cpu.shares").is_file():
+        with open("/sys/fs/cgroup/cpu/cpu.shares", encoding="utf-8") as file:
+            cpu_shares = int(file.read().rstrip())
+        # For AWS, gives correct value * 1024.
+        avail_cpu = int(cpu_shares / 1024)

-def _cpu_count() ->int:
+    # In K8s Pods also a fraction of a single core could be available
+    # As multiprocessing is not able to run only a "fraction" of process
+    # assume we have 1 CPU available
+    if avail_cpu == 0:
+        avail_cpu = 1
+
+    return avail_cpu
+
+
+def _cpu_count() -> int:
     """Use sched_affinity if available for virtualized or containerized
     environments.
     """
-    pass
+    cpu_share = _query_cpu()
+    cpu_count = None
+    sched_getaffinity = getattr(os, "sched_getaffinity", None)
+    # pylint: disable=not-callable,using-constant-test,useless-suppression
+    if sched_getaffinity:
+        cpu_count = len(sched_getaffinity(0))
+    elif multiprocessing:
+        cpu_count = multiprocessing.cpu_count()
+    else:
+        cpu_count = 1
+    if sys.platform == "win32":
+        # See also https://github.com/python/cpython/issues/94242
+        cpu_count = min(cpu_count, 56)  # pragma: no cover
+    if cpu_share is not None:
+        return min(cpu_share, cpu_count)
+    return cpu_count


 class Run:
     """Helper class to use as main for pylint with 'run(*sys.argv[1:])'."""
+
     LinterClass = PyLinter
-    option_groups = ('Commands',
-        'Options which are actually commands. Options in this group are mutually exclusive.'
+    option_groups = (
+        (
+            "Commands",
+            "Options which are actually commands. Options in this \
+group are mutually exclusive.",
         ),
+    )
     _is_pylint_config: ClassVar[bool] = False
     """Boolean whether or not this is a 'pylint-config' run.

     Used by _PylintConfigRun to make the 'pylint-config' command work.
     """

-    def __init__(self, args: Sequence[str], reporter: (BaseReporter | None)
-        =None, exit: bool=True) ->None:
-        if '--version' in args:
+    # pylint: disable = too-many-statements, too-many-branches
+    def __init__(
+        self,
+        args: Sequence[str],
+        reporter: BaseReporter | None = None,
+        exit: bool = True,  # pylint: disable=redefined-builtin
+    ) -> None:
+        # Immediately exit if user asks for version
+        if "--version" in args:
             print(full_version)
             sys.exit(0)
+
         self._rcfile: str | None = None
         self._output: str | None = None
         self._plugins: list[str] = []
         self.verbose: bool = False
+
+        # Pre-process certain options and remove them from args list
         try:
             args = _preprocess_options(self, args)
         except ArgumentPreprocessingError as ex:
             print(ex, file=sys.stderr)
             sys.exit(32)
+
+        # Determine configuration file
         if self._rcfile is None:
             default_file = next(config.find_default_config_files(), None)
             if default_file:
                 self._rcfile = str(default_file)
-        self.linter = linter = self.LinterClass(_make_run_options(self),
-            option_groups=self.option_groups, pylintrc=self._rcfile)
+
+        self.linter = linter = self.LinterClass(
+            _make_run_options(self),
+            option_groups=self.option_groups,
+            pylintrc=self._rcfile,
+        )
+        # register standard checkers
         linter.load_default_plugins()
+        # load command line plugins
         linter.load_plugin_modules(self._plugins)
+
+        # Register the options needed for 'pylint-config'
+        # By not registering them by default they don't show up in the normal usage message
         if self._is_pylint_config:
             _register_generate_config_options(linter._arg_parser)
-        args = _config_initialization(linter, args, reporter, config_file=
-            self._rcfile, verbose_mode=self.verbose)
+
+        args = _config_initialization(
+            linter, args, reporter, config_file=self._rcfile, verbose_mode=self.verbose
+        )
+
+        # Handle the 'pylint-config' command
         if self._is_pylint_config:
             warnings.warn(
-                "NOTE: The 'pylint-config' command is experimental and usage can change"
-                , UserWarning, stacklevel=2)
+                "NOTE: The 'pylint-config' command is experimental and usage can change",
+                UserWarning,
+                stacklevel=2,
+            )
             code = _handle_pylint_config_commands(linter)
             if exit:
                 sys.exit(code)
             return
-        if not args or len(linter.config.disable) == len(linter.msgs_store.
-            _messages_definitions):
-            print('No files to lint: exiting.')
+
+        # Display help if there are no files to lint or no checks enabled
+        if not args or len(linter.config.disable) == len(
+            linter.msgs_store._messages_definitions
+        ):
+            print("No files to lint: exiting.")
             sys.exit(32)
+
         if linter.config.jobs < 0:
             print(
-                f'Jobs number ({linter.config.jobs}) should be greater than or equal to 0'
-                , file=sys.stderr)
+                f"Jobs number ({linter.config.jobs}) should be greater than or equal to 0",
+                file=sys.stderr,
+            )
             sys.exit(32)
         if linter.config.jobs > 1 or linter.config.jobs == 0:
             if ProcessPoolExecutor is None:
                 print(
-                    'concurrent.futures module is missing, fallback to single process'
-                    , file=sys.stderr)
-                linter.set_option('jobs', 1)
+                    "concurrent.futures module is missing, fallback to single process",
+                    file=sys.stderr,
+                )
+                linter.set_option("jobs", 1)
             elif linter.config.jobs == 0:
                 linter.config.jobs = _cpu_count()
+
         if self._output:
             try:
-                with open(self._output, 'w', encoding='utf-8') as output:
+                with open(self._output, "w", encoding="utf-8") as output:
                     linter.reporter.out = output
                     linter.check(args)
                     score_value = linter.generate_reports(verbose=self.verbose)
@@ -120,15 +213,20 @@ class Run:
         if linter.config.clear_cache_post_run:
             clear_lru_caches()
             MANAGER.clear_cache()
+
         if exit:
             if linter.config.exit_zero:
                 sys.exit(0)
             elif linter.any_fail_on_issues():
+                # We need to make sure we return a failing exit code in this case.
+                # So we use self.linter.msg_status if that is non-zero, otherwise we just return 1.
                 sys.exit(self.linter.msg_status or 1)
             elif score_value is not None:
                 if score_value >= linter.config.fail_under:
                     sys.exit(0)
                 else:
+                    # We need to make sure we return a failing exit code in this case.
+                    # So we use self.linter.msg_status if that is non-zero, otherwise we just return 1.
                     sys.exit(self.linter.msg_status or 1)
             else:
                 sys.exit(self.linter.msg_status)
@@ -136,6 +234,7 @@ class Run:

 class _PylintConfigRun(Run):
     """A private wrapper for the 'pylint-config' command."""
+
     _is_pylint_config: ClassVar[bool] = True
     """Boolean whether or not this is a 'pylint-config' run.

diff --git a/pylint/lint/utils.py b/pylint/lint/utils.py
index ec27fb7a9..a7fbfd0bc 100644
--- a/pylint/lint/utils.py
+++ b/pylint/lint/utils.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import contextlib
 import platform
 import sys
@@ -6,19 +11,138 @@ import traceback
 from collections.abc import Iterator, Sequence
 from datetime import datetime
 from pathlib import Path
+
 from pylint.constants import PYLINT_HOME, full_version


+def prepare_crash_report(ex: Exception, filepath: str, crash_file_path: str) -> Path:
+    issue_template_path = (
+        Path(PYLINT_HOME) / datetime.now().strftime(str(crash_file_path))
+    ).resolve()
+    with open(filepath, encoding="utf8") as f:
+        file_content = f.read()
+    template = ""
+    if not issue_template_path.exists():
+        template = """\
+First, please verify that the bug is not already filled:
+https://github.com/pylint-dev/pylint/issues/
+
+Then create a new issue:
+https://github.com/pylint-dev/pylint/issues/new?labels=Crash 💥%2CNeeds triage 📥
+
+
+"""
+    template += f"""
+Issue title:
+Crash ``{ex}`` (if possible, be more specific about what made pylint crash)
+
+### Bug description
+
+When parsing the following ``a.py``:
+
+<!--
+ If sharing the code is not an option, please state so,
+ but providing only the stacktrace would still be helpful.
+ -->
+
+```python
+{file_content}
+```
+
+### Command used
+
+```shell
+pylint a.py
+```
+
+### Pylint output
+
+<details open>
+    <summary>
+        pylint crashed with a ``{ex.__class__.__name__}`` and with the following stacktrace:
+    </summary>
+
+```python
+"""
+    template += traceback.format_exc()
+    template += f"""
+```
+
+
+</details>
+
+### Expected behavior
+
+No crash.
+
+### Pylint version
+
+```shell
+{full_version}
+```
+
+### OS / Environment
+
+{sys.platform} ({platform.system()})
+
+### Additional dependencies
+
+<!--
+Please remove this part if you're not using any of
+your dependencies in the example.
+ -->
+"""
+    try:
+        with open(issue_template_path, "a", encoding="utf8") as f:
+            f.write(template)
+    except Exception as exc:  # pylint: disable=broad-except
+        print(
+            f"Can't write the issue template for the crash in {issue_template_path} "
+            f"because of: '{exc}'\nHere's the content anyway:\n{template}.",
+            file=sys.stderr,
+        )
+    return issue_template_path
+
+
+def get_fatal_error_message(filepath: str, issue_template_path: Path) -> str:
+    return (
+        f"Fatal error while checking '{filepath}'. "
+        f"Please open an issue in our bug tracker so we address this. "
+        f"There is a pre-filled template that you can use in '{issue_template_path}'."
+    )
+
+
+def _augment_sys_path(additional_paths: Sequence[str]) -> list[str]:
+    original = list(sys.path)
+    changes = []
+    seen = set()
+    for additional_path in additional_paths:
+        if additional_path not in seen:
+            changes.append(additional_path)
+            seen.add(additional_path)
+
+    sys.path[:] = changes + sys.path
+    return original
+
+
 @contextlib.contextmanager
-def augmented_sys_path(additional_paths: Sequence[str]) ->Iterator[None]:
+def augmented_sys_path(additional_paths: Sequence[str]) -> Iterator[None]:
     """Augment 'sys.path' by adding non-existent entries from additional_paths."""
-    pass
+    original = _augment_sys_path(additional_paths)
+    try:
+        yield
+    finally:
+        sys.path[:] = original


-def _is_relative_to(self: Path, *other: Path) ->bool:
+def _is_relative_to(self: Path, *other: Path) -> bool:
     """Checks if self is relative to other.

     Backport of pathlib.Path.is_relative_to for Python <3.9
     TODO: py39: Remove this backport and use stdlib function.
     """
-    pass
+    try:
+        self.relative_to(*other)
+        return True
+    except ValueError:
+        return False
diff --git a/pylint/message/_deleted_message_ids.py b/pylint/message/_deleted_message_ids.py
index dbcd3d0ab..60289e805 100644
--- a/pylint/message/_deleted_message_ids.py
+++ b/pylint/message/_deleted_message_ids.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from functools import lru_cache
 from typing import NamedTuple

@@ -10,98 +15,165 @@ class DeletedMessage(NamedTuple):


 DELETED_MSGID_PREFIXES: list[int] = []
-DELETED_MESSAGES_IDS = {'https://github.com/pylint-dev/pylint/pull/4942': [
-    DeletedMessage('W1601', 'apply-builtin'), DeletedMessage('E1601',
-    'print-statement'), DeletedMessage('E1602', 'parameter-unpacking'),
-    DeletedMessage('E1603', 'unpacking-in-except', [('W0712',
-    'old-unpacking-in-except')]), DeletedMessage('E1604',
-    'old-raise-syntax', [('W0121', 'old-old-raise-syntax')]),
-    DeletedMessage('E1605', 'backtick', [('W0333', 'old-backtick')]),
-    DeletedMessage('E1609', 'import-star-module-level'), DeletedMessage(
-    'W1601', 'apply-builtin'), DeletedMessage('W1602', 'basestring-builtin'
-    ), DeletedMessage('W1603', 'buffer-builtin'), DeletedMessage('W1604',
-    'cmp-builtin'), DeletedMessage('W1605', 'coerce-builtin'),
-    DeletedMessage('W1606', 'execfile-builtin'), DeletedMessage('W1607',
-    'file-builtin'), DeletedMessage('W1608', 'long-builtin'),
-    DeletedMessage('W1609', 'raw_input-builtin'), DeletedMessage('W1610',
-    'reduce-builtin'), DeletedMessage('W1611', 'standarderror-builtin'),
-    DeletedMessage('W1612', 'unicode-builtin'), DeletedMessage('W1613',
-    'xrange-builtin'), DeletedMessage('W1614', 'coerce-method'),
-    DeletedMessage('W1615', 'delslice-method'), DeletedMessage('W1616',
-    'getslice-method'), DeletedMessage('W1617', 'setslice-method'),
-    DeletedMessage('W1618', 'no-absolute-import'), DeletedMessage('W1619',
-    'old-division'), DeletedMessage('W1620', 'dict-iter-method'),
-    DeletedMessage('W1621', 'dict-view-method'), DeletedMessage('W1622',
-    'next-method-called'), DeletedMessage('W1623', 'metaclass-assignment'),
-    DeletedMessage('W1624', 'indexing-exception', [('W0713',
-    'old-indexing-exception')]), DeletedMessage('W1625', 'raising-string',
-    [('W0701', 'old-raising-string')]), DeletedMessage('W1626',
-    'reload-builtin'), DeletedMessage('W1627', 'oct-method'),
-    DeletedMessage('W1628', 'hex-method'), DeletedMessage('W1629',
-    'nonzero-method'), DeletedMessage('W1630', 'cmp-method'),
-    DeletedMessage('W1632', 'input-builtin'), DeletedMessage('W1633',
-    'round-builtin'), DeletedMessage('W1634', 'intern-builtin'),
-    DeletedMessage('W1635', 'unichr-builtin'), DeletedMessage('W1636',
-    'map-builtin-not-iterating', [('W1631', 'implicit-map-evaluation')]),
-    DeletedMessage('W1637', 'zip-builtin-not-iterating'), DeletedMessage(
-    'W1638', 'range-builtin-not-iterating'), DeletedMessage('W1639',
-    'filter-builtin-not-iterating'), DeletedMessage('W1640',
-    'using-cmp-argument'), DeletedMessage('W1642', 'div-method'),
-    DeletedMessage('W1643', 'idiv-method'), DeletedMessage('W1644',
-    'rdiv-method'), DeletedMessage('W1645', 'exception-message-attribute'),
-    DeletedMessage('W1646', 'invalid-str-codec'), DeletedMessage('W1647',
-    'sys-max-int'), DeletedMessage('W1648', 'bad-python3-import'),
-    DeletedMessage('W1649', 'deprecated-string-function'), DeletedMessage(
-    'W1650', 'deprecated-str-translate-call'), DeletedMessage('W1651',
-    'deprecated-itertools-function'), DeletedMessage('W1652',
-    'deprecated-types-field'), DeletedMessage('W1653',
-    'next-method-defined'), DeletedMessage('W1654',
-    'dict-items-not-iterating'), DeletedMessage('W1655',
-    'dict-keys-not-iterating'), DeletedMessage('W1656',
-    'dict-values-not-iterating'), DeletedMessage('W1657',
-    'deprecated-operator-function'), DeletedMessage('W1658',
-    'deprecated-urllib-function'), DeletedMessage('W1659',
-    'xreadlines-attribute'), DeletedMessage('W1660',
-    'deprecated-sys-function'), DeletedMessage('W1661', 'exception-escape'),
-    DeletedMessage('W1662', 'comprehension-escape')],
-    'https://github.com/pylint-dev/pylint/pull/3578': [DeletedMessage(
-    'W0312', 'mixed-indentation')],
-    'https://github.com/pylint-dev/pylint/pull/3577': [DeletedMessage(
-    'C0326', 'bad-whitespace', [('C0323', 'no-space-after-operator'), (
-    'C0324', 'no-space-after-comma'), ('C0322', 'no-space-before-operator')
-    ])], 'https://github.com/pylint-dev/pylint/pull/3571': [DeletedMessage(
-    'C0330', 'bad-continuation')],
-    'https://pylint.readthedocs.io/en/latest/whatsnew/1/1.4.html#what-s-new-in-pylint-1-4-3'
-    : [DeletedMessage('R0921', 'abstract-class-not-used'), DeletedMessage(
-    'R0922', 'abstract-class-little-used'), DeletedMessage('W0142',
-    'star-args')], 'https://github.com/pylint-dev/pylint/issues/2409': [
-    DeletedMessage('W0232', 'no-init')],
-    'https://github.com/pylint-dev/pylint/pull/6421': [DeletedMessage(
-    'W0111', 'assign-to-new-keyword')]}
+
+DELETED_MESSAGES_IDS = {
+    # Everything until the next comment is from the PY3K+ checker
+    "https://github.com/pylint-dev/pylint/pull/4942": [
+        DeletedMessage("W1601", "apply-builtin"),
+        DeletedMessage("E1601", "print-statement"),
+        DeletedMessage("E1602", "parameter-unpacking"),
+        DeletedMessage(
+            "E1603", "unpacking-in-except", [("W0712", "old-unpacking-in-except")]
+        ),
+        DeletedMessage(
+            "E1604", "old-raise-syntax", [("W0121", "old-old-raise-syntax")]
+        ),
+        DeletedMessage("E1605", "backtick", [("W0333", "old-backtick")]),
+        DeletedMessage("E1609", "import-star-module-level"),
+        DeletedMessage("W1601", "apply-builtin"),
+        DeletedMessage("W1602", "basestring-builtin"),
+        DeletedMessage("W1603", "buffer-builtin"),
+        DeletedMessage("W1604", "cmp-builtin"),
+        DeletedMessage("W1605", "coerce-builtin"),
+        DeletedMessage("W1606", "execfile-builtin"),
+        DeletedMessage("W1607", "file-builtin"),
+        DeletedMessage("W1608", "long-builtin"),
+        DeletedMessage("W1609", "raw_input-builtin"),
+        DeletedMessage("W1610", "reduce-builtin"),
+        DeletedMessage("W1611", "standarderror-builtin"),
+        DeletedMessage("W1612", "unicode-builtin"),
+        DeletedMessage("W1613", "xrange-builtin"),
+        DeletedMessage("W1614", "coerce-method"),
+        DeletedMessage("W1615", "delslice-method"),
+        DeletedMessage("W1616", "getslice-method"),
+        DeletedMessage("W1617", "setslice-method"),
+        DeletedMessage("W1618", "no-absolute-import"),
+        DeletedMessage("W1619", "old-division"),
+        DeletedMessage("W1620", "dict-iter-method"),
+        DeletedMessage("W1621", "dict-view-method"),
+        DeletedMessage("W1622", "next-method-called"),
+        DeletedMessage("W1623", "metaclass-assignment"),
+        DeletedMessage(
+            "W1624", "indexing-exception", [("W0713", "old-indexing-exception")]
+        ),
+        DeletedMessage("W1625", "raising-string", [("W0701", "old-raising-string")]),
+        DeletedMessage("W1626", "reload-builtin"),
+        DeletedMessage("W1627", "oct-method"),
+        DeletedMessage("W1628", "hex-method"),
+        DeletedMessage("W1629", "nonzero-method"),
+        DeletedMessage("W1630", "cmp-method"),
+        DeletedMessage("W1632", "input-builtin"),
+        DeletedMessage("W1633", "round-builtin"),
+        DeletedMessage("W1634", "intern-builtin"),
+        DeletedMessage("W1635", "unichr-builtin"),
+        DeletedMessage(
+            "W1636", "map-builtin-not-iterating", [("W1631", "implicit-map-evaluation")]
+        ),
+        DeletedMessage("W1637", "zip-builtin-not-iterating"),
+        DeletedMessage("W1638", "range-builtin-not-iterating"),
+        DeletedMessage("W1639", "filter-builtin-not-iterating"),
+        DeletedMessage("W1640", "using-cmp-argument"),
+        DeletedMessage("W1642", "div-method"),
+        DeletedMessage("W1643", "idiv-method"),
+        DeletedMessage("W1644", "rdiv-method"),
+        DeletedMessage("W1645", "exception-message-attribute"),
+        DeletedMessage("W1646", "invalid-str-codec"),
+        DeletedMessage("W1647", "sys-max-int"),
+        DeletedMessage("W1648", "bad-python3-import"),
+        DeletedMessage("W1649", "deprecated-string-function"),
+        DeletedMessage("W1650", "deprecated-str-translate-call"),
+        DeletedMessage("W1651", "deprecated-itertools-function"),
+        DeletedMessage("W1652", "deprecated-types-field"),
+        DeletedMessage("W1653", "next-method-defined"),
+        DeletedMessage("W1654", "dict-items-not-iterating"),
+        DeletedMessage("W1655", "dict-keys-not-iterating"),
+        DeletedMessage("W1656", "dict-values-not-iterating"),
+        DeletedMessage("W1657", "deprecated-operator-function"),
+        DeletedMessage("W1658", "deprecated-urllib-function"),
+        DeletedMessage("W1659", "xreadlines-attribute"),
+        DeletedMessage("W1660", "deprecated-sys-function"),
+        DeletedMessage("W1661", "exception-escape"),
+        DeletedMessage("W1662", "comprehension-escape"),
+    ],
+    "https://github.com/pylint-dev/pylint/pull/3578": [
+        DeletedMessage("W0312", "mixed-indentation"),
+    ],
+    "https://github.com/pylint-dev/pylint/pull/3577": [
+        DeletedMessage(
+            "C0326",
+            "bad-whitespace",
+            [
+                ("C0323", "no-space-after-operator"),
+                ("C0324", "no-space-after-comma"),
+                ("C0322", "no-space-before-operator"),
+            ],
+        ),
+    ],
+    "https://github.com/pylint-dev/pylint/pull/3571": [
+        DeletedMessage("C0330", "bad-continuation")
+    ],
+    "https://pylint.readthedocs.io/en/latest/whatsnew/1/1.4.html#what-s-new-in-pylint-1-4-3": [
+        DeletedMessage("R0921", "abstract-class-not-used"),
+        DeletedMessage("R0922", "abstract-class-little-used"),
+        DeletedMessage("W0142", "star-args"),
+    ],
+    "https://github.com/pylint-dev/pylint/issues/2409": [
+        DeletedMessage("W0232", "no-init"),
+    ],
+    "https://github.com/pylint-dev/pylint/pull/6421": [
+        DeletedMessage("W0111", "assign-to-new-keyword"),
+    ],
+}
 MOVED_TO_EXTENSIONS = {
-    'https://pylint.readthedocs.io/en/latest/whatsnew/2/2.14/summary.html#removed-checkers'
-    : [DeletedMessage('R0201', 'no-self-use')]}
+    "https://pylint.readthedocs.io/en/latest/whatsnew/2/2.14/summary.html#removed-checkers": [
+        DeletedMessage("R0201", "no-self-use")
+    ],
+}


 @lru_cache(maxsize=None)
-def is_deleted_symbol(symbol: str) ->(str | None):
+def is_deleted_symbol(symbol: str) -> str | None:
     """Return the explanation for removal if the message was removed."""
-    pass
+    for explanation, deleted_messages in DELETED_MESSAGES_IDS.items():
+        for deleted_message in deleted_messages:
+            if symbol == deleted_message.symbol or any(
+                symbol == m[1] for m in deleted_message.old_names
+            ):
+                return explanation
+    return None


 @lru_cache(maxsize=None)
-def is_deleted_msgid(msgid: str) ->(str | None):
+def is_deleted_msgid(msgid: str) -> str | None:
     """Return the explanation for removal if the message was removed."""
-    pass
+    for explanation, deleted_messages in DELETED_MESSAGES_IDS.items():
+        for deleted_message in deleted_messages:
+            if msgid == deleted_message.msgid or any(
+                msgid == m[0] for m in deleted_message.old_names
+            ):
+                return explanation
+    return None


 @lru_cache(maxsize=None)
-def is_moved_symbol(symbol: str) ->(str | None):
+def is_moved_symbol(symbol: str) -> str | None:
     """Return the explanation for moving if the message was moved to extensions."""
-    pass
+    for explanation, moved_messages in MOVED_TO_EXTENSIONS.items():
+        for moved_message in moved_messages:
+            if symbol == moved_message.symbol or any(
+                symbol == m[1] for m in moved_message.old_names
+            ):
+                return explanation
+    return None


 @lru_cache(maxsize=None)
-def is_moved_msgid(msgid: str) ->(str | None):
+def is_moved_msgid(msgid: str) -> str | None:
     """Return the explanation for moving if the message was moved to extensions."""
-    pass
+    for explanation, moved_messages in MOVED_TO_EXTENSIONS.items():
+        for moved_message in moved_messages:
+            if msgid == moved_message.msgid or any(
+                msgid == m[0] for m in moved_message.old_names
+            ):
+                return explanation
+    return None
diff --git a/pylint/message/message.py b/pylint/message/message.py
index f8eac91be..6ee8c5f78 100644
--- a/pylint/message/message.py
+++ b/pylint/message/message.py
@@ -1,13 +1,20 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from dataclasses import asdict, dataclass
+
 from pylint.constants import MSG_TYPES
 from pylint.interfaces import UNDEFINED, Confidence
 from pylint.typing import MessageLocationTuple


 @dataclass(unsafe_hash=True)
-class Message:
+class Message:  # pylint: disable=too-many-instance-attributes
     """This class represent a message to be issued by the reporters."""
+
     msg_id: str
     symbol: str
     msg: str
@@ -23,9 +30,14 @@ class Message:
     end_line: int | None
     end_column: int | None

-    def __init__(self, msg_id: str, symbol: str, location:
-        MessageLocationTuple, msg: str, confidence: (Confidence | None)
-        ) ->None:
+    def __init__(
+        self,
+        msg_id: str,
+        symbol: str,
+        location: MessageLocationTuple,
+        msg: str,
+        confidence: Confidence | None,
+    ) -> None:
         self.msg_id = msg_id
         self.symbol = symbol
         self.msg = msg
@@ -41,10 +53,23 @@ class Message:
         self.end_line = location.end_line
         self.end_column = location.end_column

-    def format(self, template: str) ->str:
+    def format(self, template: str) -> str:
         """Format the message according to the given template.

         The template format is the one of the format method :
         cf. https://docs.python.org/2/library/string.html#formatstrings
         """
-        pass
+        return template.format(**asdict(self))
+
+    @property
+    def location(self) -> MessageLocationTuple:
+        return MessageLocationTuple(
+            self.abspath,
+            self.path,
+            self.module,
+            self.obj,
+            self.line,
+            self.column,
+            self.end_line,
+            self.end_column,
+        )
diff --git a/pylint/message/message_definition.py b/pylint/message/message_definition.py
index 4ae7655d2..a318cc83f 100644
--- a/pylint/message/message_definition.py
+++ b/pylint/message/message_definition.py
@@ -1,21 +1,38 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import sys
 from typing import TYPE_CHECKING
+
 from astroid import nodes
+
 from pylint.constants import _SCOPE_EXEMPT, MSG_TYPES, WarningScope
 from pylint.exceptions import InvalidMessageError
 from pylint.utils import normalize_text
+
 if TYPE_CHECKING:
     from pylint.checkers import BaseChecker


 class MessageDefinition:
-
-    def __init__(self, checker: BaseChecker, msgid: str, msg: str,
-        description: str, symbol: str, scope: str, minversion: (tuple[int,
-        int] | None)=None, maxversion: (tuple[int, int] | None)=None,
-        old_names: (list[tuple[str, str]] | None)=None, shared: bool=False,
-        default_enabled: bool=True) ->None:
+    # pylint: disable-next=too-many-arguments
+    def __init__(
+        self,
+        checker: BaseChecker,
+        msgid: str,
+        msg: str,
+        description: str,
+        symbol: str,
+        scope: str,
+        minversion: tuple[int, int] | None = None,
+        maxversion: tuple[int, int] | None = None,
+        old_names: list[tuple[str, str]] | None = None,
+        shared: bool = False,
+        default_enabled: bool = True,
+    ) -> None:
         self.checker_name = checker.name
         self.check_msgid(msgid)
         self.msgid = msgid
@@ -31,28 +48,84 @@ class MessageDefinition:
         if old_names:
             for old_msgid, old_symbol in old_names:
                 self.check_msgid(old_msgid)
-                self.old_names.append((old_msgid, old_symbol))
+                self.old_names.append(
+                    (old_msgid, old_symbol),
+                )
+
+    @staticmethod
+    def check_msgid(msgid: str) -> None:
+        if len(msgid) != 5:
+            raise InvalidMessageError(f"Invalid message id {msgid!r}")
+        if msgid[0] not in MSG_TYPES:
+            raise InvalidMessageError(f"Bad message type {msgid[0]} in {msgid!r}")

-    def __eq__(self, other: object) ->bool:
-        return isinstance(other, MessageDefinition
-            ) and self.msgid == other.msgid and self.symbol == other.symbol
+    def __eq__(self, other: object) -> bool:
+        return (
+            isinstance(other, MessageDefinition)
+            and self.msgid == other.msgid
+            and self.symbol == other.symbol
+        )

-    def __repr__(self) ->str:
-        return f'MessageDefinition:{self.symbol} ({self.msgid})'
+    def __repr__(self) -> str:
+        return f"MessageDefinition:{self.symbol} ({self.msgid})"

-    def __str__(self) ->str:
-        return f'{self!r}:\n{self.msg} {self.description}'
+    def __str__(self) -> str:
+        return f"{self!r}:\n{self.msg} {self.description}"

-    def may_be_emitted(self, py_version: (tuple[int, ...] | sys._version_info)
-        ) ->bool:
+    def may_be_emitted(self, py_version: tuple[int, ...] | sys._version_info) -> bool:
         """May the message be emitted using the configured py_version?"""
-        pass
+        if self.minversion is not None and self.minversion > py_version:
+            return False
+        if self.maxversion is not None and self.maxversion <= py_version:
+            return False
+        return True

-    def format_help(self, checkerref: bool=False) ->str:
+    def format_help(self, checkerref: bool = False) -> str:
         """Return the help string for the given message id."""
-        pass
+        desc = self.description
+        if checkerref:
+            desc += f" This message belongs to the {self.checker_name} checker."
+        title = self.msg
+        if self.minversion or self.maxversion:
+            restr = []
+            if self.minversion:
+                restr.append(f"< {'.'.join(str(n) for n in self.minversion)}")
+            if self.maxversion:
+                restr.append(f">= {'.'.join(str(n) for n in self.maxversion)}")
+            restriction = " or ".join(restr)
+            if checkerref:
+                desc += f" It can't be emitted when using Python {restriction}."
+            else:
+                desc += (
+                    f" This message can't be emitted when using Python {restriction}."
+                )
+        msg_help = normalize_text(" ".join(desc.split()), indent="  ")
+        message_id = f"{self.symbol} ({self.msgid})"
+        if title != "%s":
+            title = title.splitlines()[0]
+            return f":{message_id}: *{title.rstrip(' ')}*\n{msg_help}"
+        return f":{message_id}:\n{msg_help}"

-    def check_message_definition(self, line: (int | None), node: (nodes.
-        NodeNG | None)) ->None:
+    def check_message_definition(
+        self, line: int | None, node: nodes.NodeNG | None
+    ) -> None:
         """Check MessageDefinition for possible errors."""
-        pass
+        if self.msgid[0] not in _SCOPE_EXEMPT:
+            # Fatal messages and reports are special, the node/scope distinction
+            # does not apply to them.
+            if self.scope == WarningScope.LINE:
+                if line is None:
+                    raise InvalidMessageError(
+                        f"Message {self.msgid} must provide line, got None"
+                    )
+                if node is not None:
+                    raise InvalidMessageError(
+                        f"Message {self.msgid} must only provide line, "
+                        f"got line={line}, node={node}"
+                    )
+            elif self.scope == WarningScope.NODE:
+                # Node-based warnings may provide an override line.
+                if node is None:
+                    raise InvalidMessageError(
+                        f"Message {self.msgid} must provide Node, got None"
+                    )
diff --git a/pylint/message/message_definition_store.py b/pylint/message/message_definition_store.py
index 98137664d..cf271d7ff 100644
--- a/pylint/message/message_definition_store.py
+++ b/pylint/message/message_definition_store.py
@@ -1,12 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import collections
 import functools
 import sys
 from collections.abc import Sequence, ValuesView
 from typing import TYPE_CHECKING
+
 from pylint.exceptions import UnknownMessageError
 from pylint.message.message_definition import MessageDefinition
 from pylint.message.message_id_store import MessageIdStore
+
 if TYPE_CHECKING:
     from pylint.checkers import BaseChecker

@@ -16,51 +23,98 @@ class MessageDefinitionStore:
     has no particular state during analysis.
     """

-    def __init__(self, py_version: (tuple[int, ...] | sys._version_info)=
-        sys.version_info) ->None:
+    def __init__(
+        self, py_version: tuple[int, ...] | sys._version_info = sys.version_info
+    ) -> None:
         self.message_id_store: MessageIdStore = MessageIdStore()
+        # Primary registry for all active messages definitions.
+        # It contains the 1:1 mapping from msgid to MessageDefinition.
+        # Keys are msgid, values are MessageDefinition
         self._messages_definitions: dict[str, MessageDefinition] = {}
-        self._msgs_by_category: dict[str, list[str]] = collections.defaultdict(
-            list)
+        # MessageDefinition kept by category
+        self._msgs_by_category: dict[str, list[str]] = collections.defaultdict(list)
         self.py_version = py_version

     @property
-    def messages(self) ->ValuesView[MessageDefinition]:
+    def messages(self) -> ValuesView[MessageDefinition]:
         """The list of all active messages."""
-        pass
+        return self._messages_definitions.values()

-    def register_messages_from_checker(self, checker: BaseChecker) ->None:
+    def register_messages_from_checker(self, checker: BaseChecker) -> None:
         """Register all messages definitions from a checker."""
-        pass
+        checker.check_consistency()
+        for message in checker.messages:
+            self.register_message(message)

-    def register_message(self, message: MessageDefinition) ->None:
+    def register_message(self, message: MessageDefinition) -> None:
         """Register a MessageDefinition with consistency in mind."""
-        pass
+        self.message_id_store.register_message_definition(
+            message.msgid, message.symbol, message.old_names
+        )
+        self._messages_definitions[message.msgid] = message
+        self._msgs_by_category[message.msgid[0]].append(message.msgid)

-    @functools.lru_cache(maxsize=None)
-    def get_message_definitions(self, msgid_or_symbol: str) ->list[
-        MessageDefinition]:
+    # Since MessageDefinitionStore is only initialized once
+    # and the arguments are relatively small we do not run the
+    # risk of creating a large memory leak.
+    # See discussion in: https://github.com/pylint-dev/pylint/pull/5673
+    @functools.lru_cache(  # pylint: disable=method-cache-max-size-none # noqa: B019
+        maxsize=None
+    )
+    def get_message_definitions(self, msgid_or_symbol: str) -> list[MessageDefinition]:
         """Returns the Message definition for either a numeric or symbolic id.

         The cache has no limit as its size will likely stay minimal. For each message we store
         about 1000 characters, so even if we would have 1000 messages the cache would only
         take up ~= 1 Mb.
         """
-        pass
+        return [
+            self._messages_definitions[m]
+            for m in self.message_id_store.get_active_msgids(msgid_or_symbol)
+        ]

-    def get_msg_display_string(self, msgid_or_symbol: str) ->str:
+    def get_msg_display_string(self, msgid_or_symbol: str) -> str:
         """Generates a user-consumable representation of a message."""
-        pass
+        message_definitions = self.get_message_definitions(msgid_or_symbol)
+        if len(message_definitions) == 1:
+            return repr(message_definitions[0].symbol)
+        return repr([md.symbol for md in message_definitions])

-    def help_message(self, msgids_or_symbols: Sequence[str]) ->None:
+    def help_message(self, msgids_or_symbols: Sequence[str]) -> None:
         """Display help messages for the given message identifiers."""
-        pass
+        for msgids_or_symbol in msgids_or_symbols:
+            try:
+                for message_definition in self.get_message_definitions(
+                    msgids_or_symbol
+                ):
+                    print(message_definition.format_help(checkerref=True))
+                    print("")
+            except UnknownMessageError as ex:
+                print(ex)
+                print("")
+                continue

-    def list_messages(self) ->None:
+    def list_messages(self) -> None:
         """Output full messages list documentation in ReST format."""
-        pass
+        emittable, non_emittable = self.find_emittable_messages()
+        print("Emittable messages with current interpreter:")
+        for msg in emittable:
+            print(msg.format_help(checkerref=False))
+        print("\nNon-emittable messages with current interpreter:")
+        for msg in non_emittable:
+            print(msg.format_help(checkerref=False))
+        print("")

-    def find_emittable_messages(self) ->tuple[list[MessageDefinition], list
-        [MessageDefinition]]:
+    def find_emittable_messages(
+        self,
+    ) -> tuple[list[MessageDefinition], list[MessageDefinition]]:
         """Finds all emittable and non-emittable messages."""
-        pass
+        messages = sorted(self._messages_definitions.values(), key=lambda m: m.msgid)
+        emittable = []
+        non_emittable = []
+        for message in messages:
+            if message.may_be_emitted(self.py_version):
+                emittable.append(message)
+            else:
+                non_emittable.append(message)
+        return emittable, non_emittable
diff --git a/pylint/message/message_id_store.py b/pylint/message/message_id_store.py
index 64562013d..b07a9c3f7 100644
--- a/pylint/message/message_id_store.py
+++ b/pylint/message/message_id_store.py
@@ -1,7 +1,23 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import NoReturn
-from pylint.exceptions import DeletedMessageError, InvalidMessageError, MessageBecameExtensionError, UnknownMessageError
-from pylint.message._deleted_message_ids import is_deleted_msgid, is_deleted_symbol, is_moved_msgid, is_moved_symbol
+
+from pylint.exceptions import (
+    DeletedMessageError,
+    InvalidMessageError,
+    MessageBecameExtensionError,
+    UnknownMessageError,
+)
+from pylint.message._deleted_message_ids import (
+    is_deleted_msgid,
+    is_deleted_symbol,
+    is_moved_msgid,
+    is_moved_symbol,
+)


 class MessageIdStore:
@@ -9,54 +25,138 @@ class MessageIdStore:
     between msgid and symbol.
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self.__msgid_to_symbol: dict[str, str] = {}
         self.__symbol_to_msgid: dict[str, str] = {}
         self.__old_names: dict[str, list[str]] = {}
         self.__active_msgids: dict[str, list[str]] = {}

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self.__msgid_to_symbol)

-    def __repr__(self) ->str:
-        result = 'MessageIdStore: [\n'
+    def __repr__(self) -> str:
+        result = "MessageIdStore: [\n"
         for msgid, symbol in self.__msgid_to_symbol.items():
-            result += f'  - {msgid} ({symbol})\n'
-        result += ']'
+            result += f"  - {msgid} ({symbol})\n"
+        result += "]"
         return result

-    def add_msgid_and_symbol(self, msgid: str, symbol: str) ->None:
+    def get_symbol(self, msgid: str) -> str:
+        try:
+            return self.__msgid_to_symbol[msgid.upper()]
+        except KeyError as e:
+            msg = f"'{msgid}' is not stored in the message store."
+            raise UnknownMessageError(msg) from e
+
+    def get_msgid(self, symbol: str) -> str:
+        try:
+            return self.__symbol_to_msgid[symbol]
+        except KeyError as e:
+            msg = f"'{symbol}' is not stored in the message store."
+            raise UnknownMessageError(msg) from e
+
+    def register_message_definition(
+        self, msgid: str, symbol: str, old_names: list[tuple[str, str]]
+    ) -> None:
+        self.check_msgid_and_symbol(msgid, symbol)
+        self.add_msgid_and_symbol(msgid, symbol)
+        for old_msgid, old_symbol in old_names:
+            self.check_msgid_and_symbol(old_msgid, old_symbol)
+            self.add_legacy_msgid_and_symbol(old_msgid, old_symbol, msgid)
+
+    def add_msgid_and_symbol(self, msgid: str, symbol: str) -> None:
         """Add valid message id.

         There is a little duplication with add_legacy_msgid_and_symbol to avoid a function call,
         this is called a lot at initialization.
         """
-        pass
+        self.__msgid_to_symbol[msgid] = symbol
+        self.__symbol_to_msgid[symbol] = msgid

-    def add_legacy_msgid_and_symbol(self, msgid: str, symbol: str,
-        new_msgid: str) ->None:
+    def add_legacy_msgid_and_symbol(
+        self, msgid: str, symbol: str, new_msgid: str
+    ) -> None:
         """Add valid legacy message id.

         There is a little duplication with add_msgid_and_symbol to avoid a function call,
         this is called a lot at initialization.
         """
-        pass
+        self.__msgid_to_symbol[msgid] = symbol
+        self.__symbol_to_msgid[symbol] = msgid
+        existing_old_names = self.__old_names.get(msgid, [])
+        existing_old_names.append(new_msgid)
+        self.__old_names[msgid] = existing_old_names
+
+    def check_msgid_and_symbol(self, msgid: str, symbol: str) -> None:
+        existing_msgid: str | None = self.__symbol_to_msgid.get(symbol)
+        existing_symbol: str | None = self.__msgid_to_symbol.get(msgid)
+        if existing_symbol is None and existing_msgid is None:
+            return  # both symbol and msgid are usable
+        if existing_msgid is not None:
+            if existing_msgid != msgid:
+                self._raise_duplicate_msgid(symbol, msgid, existing_msgid)
+        if existing_symbol and existing_symbol != symbol:
+            # See https://github.com/python/mypy/issues/10559
+            self._raise_duplicate_symbol(msgid, symbol, existing_symbol)

     @staticmethod
-    def _raise_duplicate_symbol(msgid: str, symbol: str, other_symbol: str
-        ) ->NoReturn:
+    def _raise_duplicate_symbol(msgid: str, symbol: str, other_symbol: str) -> NoReturn:
         """Raise an error when a symbol is duplicated."""
-        pass
+        symbols = [symbol, other_symbol]
+        symbols.sort()
+        error_message = f"Message id '{msgid}' cannot have both "
+        error_message += f"'{symbols[0]}' and '{symbols[1]}' as symbolic name."
+        raise InvalidMessageError(error_message)

     @staticmethod
-    def _raise_duplicate_msgid(symbol: str, msgid: str, other_msgid: str
-        ) ->NoReturn:
+    def _raise_duplicate_msgid(symbol: str, msgid: str, other_msgid: str) -> NoReturn:
         """Raise an error when a msgid is duplicated."""
-        pass
+        msgids = [msgid, other_msgid]
+        msgids.sort()
+        error_message = (
+            f"Message symbol '{symbol}' cannot be used for "
+            f"'{msgids[0]}' and '{msgids[1]}' at the same time."
+            f" If you're creating an 'old_names' use 'old-{symbol}' as the old symbol."
+        )
+        raise InvalidMessageError(error_message)

-    def get_active_msgids(self, msgid_or_symbol: str) ->list[str]:
+    def get_active_msgids(self, msgid_or_symbol: str) -> list[str]:
         """Return msgids but the input can be a symbol.

         self.__active_msgids is used to implement a primitive cache for this function.
         """
-        pass
+        try:
+            return self.__active_msgids[msgid_or_symbol]
+        except KeyError:
+            pass
+
+        # If we don't have a cached value yet we compute it
+        msgid: str | None
+        deletion_reason = None
+        moved_reason = None
+        if msgid_or_symbol[1:].isdigit():
+            # Only msgid can have a digit as second letter
+            msgid = msgid_or_symbol.upper()
+            symbol = self.__msgid_to_symbol.get(msgid)
+            if not symbol:
+                deletion_reason = is_deleted_msgid(msgid)
+                if deletion_reason is None:
+                    moved_reason = is_moved_msgid(msgid)
+        else:
+            symbol = msgid_or_symbol
+            msgid = self.__symbol_to_msgid.get(msgid_or_symbol)
+            if not msgid:
+                deletion_reason = is_deleted_symbol(symbol)
+                if deletion_reason is None:
+                    moved_reason = is_moved_symbol(symbol)
+        if not msgid or not symbol:
+            if deletion_reason is not None:
+                raise DeletedMessageError(msgid_or_symbol, deletion_reason)
+            if moved_reason is not None:
+                raise MessageBecameExtensionError(msgid_or_symbol, moved_reason)
+            error_msg = f"No such message id or symbol '{msgid_or_symbol}'."
+            raise UnknownMessageError(error_msg)
+        ids = self.__old_names.get(msgid, [msgid])
+        # Add to cache
+        self.__active_msgids[msgid_or_symbol] = ids
+        return ids
diff --git a/pylint/pyreverse/diadefslib.py b/pylint/pyreverse/diadefslib.py
index 4865d5ddc..88aea482e 100644
--- a/pylint/pyreverse/diadefslib.py
+++ b/pylint/pyreverse/diadefslib.py
@@ -1,65 +1,127 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Handle diagram generation options for class diagram or default diagrams."""
+
 from __future__ import annotations
+
 import argparse
 from collections.abc import Generator
 from typing import Any
+
 import astroid
 from astroid import nodes
 from astroid.modutils import is_stdlib_module
+
 from pylint.pyreverse.diagrams import ClassDiagram, PackageDiagram
 from pylint.pyreverse.inspector import Linker, Project
 from pylint.pyreverse.utils import LocalsVisitor

+# diagram generators ##########################################################
+

 class DiaDefGenerator:
     """Handle diagram generation options."""

-    def __init__(self, linker: Linker, handler: DiadefsHandler) ->None:
+    def __init__(self, linker: Linker, handler: DiadefsHandler) -> None:
         """Common Diagram Handler initialization."""
         self.config = handler.config
         self.module_names: bool = False
         self._set_default_options()
         self.linker = linker
-        self.classdiagram: ClassDiagram
+        self.classdiagram: ClassDiagram  # defined by subclasses

-    def get_title(self, node: nodes.ClassDef) ->str:
+    def get_title(self, node: nodes.ClassDef) -> str:
         """Get title for objects."""
-        pass
+        title = node.name
+        if self.module_names:
+            title = f"{node.root().name}.{title}"
+        return title  # type: ignore[no-any-return]

-    def _set_option(self, option: (bool | None)) ->bool:
+    def _set_option(self, option: bool | None) -> bool:
         """Activate some options if not explicitly deactivated."""
-        pass
+        # if we have a class diagram, we want more information by default;
+        # so if the option is None, we return True
+        if option is None:
+            return bool(self.config.classes)
+        return option

-    def _set_default_options(self) ->None:
+    def _set_default_options(self) -> None:
         """Set different default options with _default dictionary."""
-        pass
-
-    def _get_levels(self) ->tuple[int, int]:
+        self.module_names = self._set_option(self.config.module_names)
+        all_ancestors = self._set_option(self.config.all_ancestors)
+        all_associated = self._set_option(self.config.all_associated)
+        anc_level, association_level = (0, 0)
+        if all_ancestors:
+            anc_level = -1
+        if all_associated:
+            association_level = -1
+        if self.config.show_ancestors is not None:
+            anc_level = self.config.show_ancestors
+        if self.config.show_associated is not None:
+            association_level = self.config.show_associated
+        self.anc_level, self.association_level = anc_level, association_level
+
+    def _get_levels(self) -> tuple[int, int]:
         """Help function for search levels."""
-        pass
+        return self.anc_level, self.association_level

-    def show_node(self, node: nodes.ClassDef) ->bool:
+    def show_node(self, node: nodes.ClassDef) -> bool:
         """Determine if node should be shown based on config."""
-        pass
+        if node.root().name == "builtins":
+            return self.config.show_builtin  # type: ignore[no-any-return]
+
+        if is_stdlib_module(node.root().name):
+            return self.config.show_stdlib  # type: ignore[no-any-return]
+
+        return True

-    def add_class(self, node: nodes.ClassDef) ->None:
+    def add_class(self, node: nodes.ClassDef) -> None:
         """Visit one class and add it to diagram."""
-        pass
+        self.linker.visit(node)
+        self.classdiagram.add_object(self.get_title(node), node)

-    def get_ancestors(self, node: nodes.ClassDef, level: int) ->Generator[
-        nodes.ClassDef, None, None]:
+    def get_ancestors(
+        self, node: nodes.ClassDef, level: int
+    ) -> Generator[nodes.ClassDef, None, None]:
         """Return ancestor nodes of a class node."""
-        pass
-
-    def get_associated(self, klass_node: nodes.ClassDef, level: int
-        ) ->Generator[nodes.ClassDef, None, None]:
+        if level == 0:
+            return
+        for ancestor in node.ancestors(recurs=False):
+            if not self.show_node(ancestor):
+                continue
+            yield ancestor
+
+    def get_associated(
+        self, klass_node: nodes.ClassDef, level: int
+    ) -> Generator[nodes.ClassDef, None, None]:
         """Return associated nodes of a class node."""
-        pass
-
-    def extract_classes(self, klass_node: nodes.ClassDef, anc_level: int,
-        association_level: int) ->None:
+        if level == 0:
+            return
+        for association_nodes in list(klass_node.instance_attrs_type.values()) + list(
+            klass_node.locals_type.values()
+        ):
+            for node in association_nodes:
+                if isinstance(node, astroid.Instance):
+                    node = node._proxied
+                if not (isinstance(node, nodes.ClassDef) and self.show_node(node)):
+                    continue
+                yield node
+
+    def extract_classes(
+        self, klass_node: nodes.ClassDef, anc_level: int, association_level: int
+    ) -> None:
         """Extract recursively classes related to klass_node."""
-        pass
+        if self.classdiagram.has_node(klass_node) or not self.show_node(klass_node):
+            return
+        self.add_class(klass_node)
+
+        for ancestor in self.get_ancestors(klass_node, anc_level):
+            self.extract_classes(ancestor, anc_level - 1, association_level)
+
+        for node in self.get_associated(klass_node, association_level):
+            self.extract_classes(node, anc_level, association_level - 1)


 class DefaultDiadefGenerator(LocalsVisitor, DiaDefGenerator):
@@ -69,41 +131,54 @@ class DefaultDiadefGenerator(LocalsVisitor, DiaDefGenerator):
     * a class diagram including project's classes
     """

-    def __init__(self, linker: Linker, handler: DiadefsHandler) ->None:
+    def __init__(self, linker: Linker, handler: DiadefsHandler) -> None:
         DiaDefGenerator.__init__(self, linker, handler)
         LocalsVisitor.__init__(self)

-    def visit_project(self, node: Project) ->None:
+    def visit_project(self, node: Project) -> None:
         """Visit a pyreverse.utils.Project node.

         create a diagram definition for packages
         """
-        pass
-
-    def leave_project(self, _: Project) ->Any:
+        mode = self.config.mode
+        if len(node.modules) > 1:
+            self.pkgdiagram: PackageDiagram | None = PackageDiagram(
+                f"packages {node.name}", mode
+            )
+        else:
+            self.pkgdiagram = None
+        self.classdiagram = ClassDiagram(f"classes {node.name}", mode)
+
+    def leave_project(self, _: Project) -> Any:
         """Leave the pyreverse.utils.Project node.

         return the generated diagram definition
         """
-        pass
+        if self.pkgdiagram:
+            return self.pkgdiagram, self.classdiagram
+        return (self.classdiagram,)

-    def visit_module(self, node: nodes.Module) ->None:
+    def visit_module(self, node: nodes.Module) -> None:
         """Visit an astroid.Module node.

         add this class to the package diagram definition
         """
-        pass
+        if self.pkgdiagram:
+            self.linker.visit(node)
+            self.pkgdiagram.add_object(node.name, node)

-    def visit_classdef(self, node: nodes.ClassDef) ->None:
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
         """Visit an astroid.Class node.

         add this class to the class diagram definition
         """
-        pass
+        anc_level, association_level = self._get_levels()
+        self.extract_classes(node, anc_level, association_level)

-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Visit astroid.ImportFrom  and catch modules for package diagram."""
-        pass
+        if self.pkgdiagram:
+            self.pkgdiagram.add_from_depend(node, node.modname)


 class ClassDiadefGenerator(DiaDefGenerator):
@@ -111,20 +186,32 @@ class ClassDiadefGenerator(DiaDefGenerator):
     given class.
     """

-    def class_diagram(self, project: Project, klass: nodes.ClassDef
-        ) ->ClassDiagram:
+    def class_diagram(self, project: Project, klass: nodes.ClassDef) -> ClassDiagram:
         """Return a class diagram definition for the class and related classes."""
-        pass
+        self.classdiagram = ClassDiagram(klass, self.config.mode)
+        if len(project.modules) > 1:
+            module, klass = klass.rsplit(".", 1)
+            module = project.get_module(module)
+        else:
+            module = project.modules[0]
+            klass = klass.split(".")[-1]
+        klass = next(module.ilookup(klass))
+
+        anc_level, association_level = self._get_levels()
+        self.extract_classes(klass, anc_level, association_level)
+        return self.classdiagram
+
+
+# diagram handler #############################################################


 class DiadefsHandler:
     """Get diagram definitions from user (i.e. xml files) or generate them."""

-    def __init__(self, config: argparse.Namespace) ->None:
+    def __init__(self, config: argparse.Namespace) -> None:
         self.config = config

-    def get_diadefs(self, project: Project, linker: Linker) ->list[ClassDiagram
-        ]:
+    def get_diadefs(self, project: Project, linker: Linker) -> list[ClassDiagram]:
         """Get the diagram's configuration data.

         :param project:The pyreverse project
@@ -135,4 +222,13 @@ class DiadefsHandler:
         :returns: The list of diagram definitions
         :rtype: list(:class:`pylint.pyreverse.diagrams.ClassDiagram`)
         """
-        pass
+        #  read and interpret diagram definitions (Diadefs)
+        diagrams = []
+        generator = ClassDiadefGenerator(linker, self)
+        for klass in self.config.classes:
+            diagrams.append(generator.class_diagram(project, klass))
+        if not diagrams:
+            diagrams = DefaultDiadefGenerator(linker, self).visit(project)
+        for diagram in diagrams:
+            diagram.extract_relationships()
+        return diagrams
diff --git a/pylint/pyreverse/diagrams.py b/pylint/pyreverse/diagrams.py
index e1f5e7682..278102cab 100644
--- a/pylint/pyreverse/diagrams.py
+++ b/pylint/pyreverse/diagrams.py
@@ -1,9 +1,17 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Diagram objects."""
+
 from __future__ import annotations
+
 from collections.abc import Iterable
 from typing import Any
+
 import astroid
 from astroid import nodes, util
+
 from pylint.checkers.utils import decorated_with_property, in_type_checking_block
 from pylint.pyreverse.utils import FilterMixIn

@@ -11,15 +19,20 @@ from pylint.pyreverse.utils import FilterMixIn
 class Figure:
     """Base class for counter handling."""

-    def __init__(self) ->None:
-        self.fig_id: str = ''
+    def __init__(self) -> None:
+        self.fig_id: str = ""


 class Relationship(Figure):
     """A relationship from an object in the diagram to another."""

-    def __init__(self, from_object: DiagramEntity, to_object: DiagramEntity,
-        relation_type: str, name: (str | None)=None):
+    def __init__(
+        self,
+        from_object: DiagramEntity,
+        to_object: DiagramEntity,
+        relation_type: str,
+        name: str | None = None,
+    ):
         super().__init__()
         self.from_object = from_object
         self.to_object = to_object
@@ -29,27 +42,36 @@ class Relationship(Figure):

 class DiagramEntity(Figure):
     """A diagram object, i.e. a label associated to an astroid node."""
-    default_shape = ''

-    def __init__(self, title: str='No name', node: (nodes.NodeNG | None)=None
-        ) ->None:
+    default_shape = ""
+
+    def __init__(
+        self, title: str = "No name", node: nodes.NodeNG | None = None
+    ) -> None:
         super().__init__()
         self.title = title
-        self.node: nodes.NodeNG = node or nodes.NodeNG(lineno=None,
-            col_offset=None, end_lineno=None, end_col_offset=None, parent=None)
+        self.node: nodes.NodeNG = node or nodes.NodeNG(
+            lineno=None,
+            col_offset=None,
+            end_lineno=None,
+            end_col_offset=None,
+            parent=None,
+        )
         self.shape = self.default_shape


 class PackageEntity(DiagramEntity):
     """A diagram object representing a package."""
-    default_shape = 'package'
+
+    default_shape = "package"


 class ClassEntity(DiagramEntity):
     """A diagram object representing a class."""
-    default_shape = 'class'

-    def __init__(self, title: str, node: nodes.ClassDef) ->None:
+    default_shape = "class"
+
+    def __init__(self, title: str, node: nodes.ClassDef) -> None:
         super().__init__(title=title, node=node)
         self.attrs: list[str] = []
         self.methods: list[nodes.FunctionDef] = []
@@ -57,89 +79,253 @@ class ClassEntity(DiagramEntity):

 class ClassDiagram(Figure, FilterMixIn):
     """Main class diagram handling."""
-    TYPE = 'class'

-    def __init__(self, title: str, mode: str) ->None:
+    TYPE = "class"
+
+    def __init__(self, title: str, mode: str) -> None:
         FilterMixIn.__init__(self, mode)
         Figure.__init__(self)
         self.title = title
+        # TODO: Specify 'Any' after refactor of `DiagramEntity`
         self.objects: list[Any] = []
         self.relationships: dict[str, list[Relationship]] = {}
         self._nodes: dict[nodes.NodeNG, DiagramEntity] = {}

-    def add_relationship(self, from_object: DiagramEntity, to_object:
-        DiagramEntity, relation_type: str, name: (str | None)=None) ->None:
+    def get_relationships(self, role: str) -> Iterable[Relationship]:
+        # sorted to get predictable (hence testable) results
+        return sorted(
+            self.relationships.get(role, ()),
+            key=lambda x: (x.from_object.fig_id, x.to_object.fig_id),
+        )
+
+    def add_relationship(
+        self,
+        from_object: DiagramEntity,
+        to_object: DiagramEntity,
+        relation_type: str,
+        name: str | None = None,
+    ) -> None:
         """Create a relationship."""
-        pass
+        rel = Relationship(from_object, to_object, relation_type, name)
+        self.relationships.setdefault(relation_type, []).append(rel)

-    def get_relationship(self, from_object: DiagramEntity, relation_type: str
-        ) ->Relationship:
+    def get_relationship(
+        self, from_object: DiagramEntity, relation_type: str
+    ) -> Relationship:
         """Return a relationship or None."""
-        pass
+        for rel in self.relationships.get(relation_type, ()):
+            if rel.from_object is from_object:
+                return rel
+        raise KeyError(relation_type)

-    def get_attrs(self, node: nodes.ClassDef) ->list[str]:
+    def get_attrs(self, node: nodes.ClassDef) -> list[str]:
         """Return visible attributes, possibly with class name."""
-        pass
-
-    def get_methods(self, node: nodes.ClassDef) ->list[nodes.FunctionDef]:
+        attrs = []
+        properties = {
+            local_name: local_node
+            for local_name, local_node in node.items()
+            if isinstance(local_node, nodes.FunctionDef)
+            and decorated_with_property(local_node)
+        }
+        for attr_name, attr_type in list(node.locals_type.items()) + list(
+            node.instance_attrs_type.items()
+        ):
+            if attr_name not in properties:
+                properties[attr_name] = attr_type
+
+        for node_name, associated_nodes in properties.items():
+            if not self.show_attr(node_name):
+                continue
+            names = self.class_names(associated_nodes)
+            if names:
+                node_name = f"{node_name} : {', '.join(names)}"
+            attrs.append(node_name)
+        return sorted(attrs)
+
+    def get_methods(self, node: nodes.ClassDef) -> list[nodes.FunctionDef]:
         """Return visible methods."""
-        pass
-
-    def add_object(self, title: str, node: nodes.ClassDef) ->None:
+        methods = [
+            m
+            for m in node.values()
+            if isinstance(m, nodes.FunctionDef)
+            and not isinstance(m, astroid.objects.Property)
+            and not decorated_with_property(m)
+            and self.show_attr(m.name)
+        ]
+        return sorted(methods, key=lambda n: n.name)
+
+    def add_object(self, title: str, node: nodes.ClassDef) -> None:
         """Create a diagram object."""
-        pass
+        assert node not in self._nodes
+        ent = ClassEntity(title, node)
+        self._nodes[node] = ent
+        self.objects.append(ent)

-    def class_names(self, nodes_lst: Iterable[nodes.NodeNG]) ->list[str]:
+    def class_names(self, nodes_lst: Iterable[nodes.NodeNG]) -> list[str]:
         """Return class names if needed in diagram."""
-        pass
-
-    def has_node(self, node: nodes.NodeNG) ->bool:
+        names = []
+        for node in nodes_lst:
+            if isinstance(node, astroid.Instance):
+                node = node._proxied
+            if (
+                isinstance(
+                    node, (nodes.ClassDef, nodes.Name, nodes.Subscript, nodes.BinOp)
+                )
+                and hasattr(node, "name")
+                and not self.has_node(node)
+            ):
+                if node.name not in names:
+                    node_name = node.name
+                    names.append(node_name)
+        # sorted to get predictable (hence testable) results
+        return sorted(
+            name
+            for name in names
+            if all(name not in other or name == other for other in names)
+        )
+
+    def has_node(self, node: nodes.NodeNG) -> bool:
         """Return true if the given node is included in the diagram."""
-        pass
+        return node in self._nodes

-    def object_from_node(self, node: nodes.NodeNG) ->DiagramEntity:
+    def object_from_node(self, node: nodes.NodeNG) -> DiagramEntity:
         """Return the diagram object mapped to node."""
-        pass
+        return self._nodes[node]

-    def classes(self) ->list[ClassEntity]:
+    def classes(self) -> list[ClassEntity]:
         """Return all class nodes in the diagram."""
-        pass
+        return [o for o in self.objects if isinstance(o, ClassEntity)]

-    def classe(self, name: str) ->ClassEntity:
+    def classe(self, name: str) -> ClassEntity:
         """Return a class by its name, raise KeyError if not found."""
-        pass
+        for klass in self.classes():
+            if klass.node.name == name:
+                return klass
+        raise KeyError(name)

-    def extract_relationships(self) ->None:
+    def extract_relationships(self) -> None:
         """Extract relationships between nodes in the diagram."""
-        pass
+        for obj in self.classes():
+            node = obj.node
+            obj.attrs = self.get_attrs(node)
+            obj.methods = self.get_methods(node)
+            obj.shape = "class"
+            # inheritance link
+            for par_node in node.ancestors(recurs=False):
+                try:
+                    par_obj = self.object_from_node(par_node)
+                    self.add_relationship(obj, par_obj, "specialization")
+                except KeyError:
+                    continue
+
+            # associations & aggregations links
+            for name, values in list(node.aggregations_type.items()):
+                for value in values:
+                    self.assign_association_relationship(
+                        value, obj, name, "aggregation"
+                    )
+
+            associations = node.associations_type.copy()
+
+            for name, values in node.locals_type.items():
+                if name not in associations:
+                    associations[name] = values
+
+            for name, values in associations.items():
+                for value in values:
+                    self.assign_association_relationship(
+                        value, obj, name, "association"
+                    )
+
+    def assign_association_relationship(
+        self, value: astroid.NodeNG, obj: ClassEntity, name: str, type_relationship: str
+    ) -> None:
+        if isinstance(value, util.UninferableBase):
+            return
+        if isinstance(value, astroid.Instance):
+            value = value._proxied
+        try:
+            associated_obj = self.object_from_node(value)
+            self.add_relationship(associated_obj, obj, type_relationship, name)
+        except KeyError:
+            return


 class PackageDiagram(ClassDiagram):
     """Package diagram handling."""
-    TYPE = 'package'

-    def modules(self) ->list[PackageEntity]:
+    TYPE = "package"
+
+    def modules(self) -> list[PackageEntity]:
         """Return all module nodes in the diagram."""
-        pass
+        return [o for o in self.objects if isinstance(o, PackageEntity)]

-    def module(self, name: str) ->PackageEntity:
+    def module(self, name: str) -> PackageEntity:
         """Return a module by its name, raise KeyError if not found."""
-        pass
+        for mod in self.modules():
+            if mod.node.name == name:
+                return mod
+        raise KeyError(name)

-    def add_object(self, title: str, node: nodes.Module) ->None:
+    def add_object(self, title: str, node: nodes.Module) -> None:
         """Create a diagram object."""
-        pass
+        assert node not in self._nodes
+        ent = PackageEntity(title, node)
+        self._nodes[node] = ent
+        self.objects.append(ent)

-    def get_module(self, name: str, node: nodes.Module) ->PackageEntity:
+    def get_module(self, name: str, node: nodes.Module) -> PackageEntity:
         """Return a module by its name, looking also for relative imports;
         raise KeyError if not found.
         """
-        pass
-
-    def add_from_depend(self, node: nodes.ImportFrom, from_module: str) ->None:
+        for mod in self.modules():
+            mod_name = mod.node.name
+            if mod_name == name:
+                return mod
+            # search for fullname of relative import modules
+            package = node.root().name
+            if mod_name == f"{package}.{name}":
+                return mod
+            if mod_name == f"{package.rsplit('.', 1)[0]}.{name}":
+                return mod
+        raise KeyError(name)
+
+    def add_from_depend(self, node: nodes.ImportFrom, from_module: str) -> None:
         """Add dependencies created by from-imports."""
-        pass
+        mod_name = node.root().name
+        package = self.module(mod_name).node
+
+        if from_module in package.depends:
+            return
+
+        if not in_type_checking_block(node):
+            package.depends.append(from_module)
+        elif from_module not in package.type_depends:
+            package.type_depends.append(from_module)

-    def extract_relationships(self) ->None:
+    def extract_relationships(self) -> None:
         """Extract relationships between nodes in the diagram."""
-        pass
+        super().extract_relationships()
+        for class_obj in self.classes():
+            # ownership
+            try:
+                mod = self.object_from_node(class_obj.node.root())
+                self.add_relationship(class_obj, mod, "ownership")
+            except KeyError:
+                continue
+        for package_obj in self.modules():
+            package_obj.shape = "package"
+            # dependencies
+            for dep_name in package_obj.node.depends:
+                try:
+                    dep = self.get_module(dep_name, package_obj.node)
+                except KeyError:
+                    continue
+                self.add_relationship(package_obj, dep, "depends")
+
+            for dep_name in package_obj.node.type_depends:
+                try:
+                    dep = self.get_module(dep_name, package_obj.node)
+                except KeyError:  # pragma: no cover
+                    continue
+                self.add_relationship(package_obj, dep, "type_depends")
diff --git a/pylint/pyreverse/dot_printer.py b/pylint/pyreverse/dot_printer.py
index 30da959a1..4baed6c3c 100644
--- a/pylint/pyreverse/dot_printer.py
+++ b/pylint/pyreverse/dot_printer.py
@@ -1,11 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Class to generate files in dot format and image formats supported by Graphviz."""
+
 from __future__ import annotations
+
 import os
 import subprocess
 import tempfile
 from enum import Enum
 from pathlib import Path
+
 from astroid import nodes
+
 from pylint.pyreverse.printer import EdgeType, Layout, NodeProperties, NodeType, Printer
 from pylint.pyreverse.utils import get_annotation_label

@@ -14,44 +22,163 @@ class HTMLLabels(Enum):
     LINEBREAK_LEFT = '<br ALIGN="LEFT"/>'


-ALLOWED_CHARSETS: frozenset[str] = frozenset(('utf-8', 'iso-8859-1', 'latin1'))
-SHAPES: dict[NodeType, str] = {NodeType.PACKAGE: 'box', NodeType.CLASS:
-    'record'}
-ARROWS: dict[EdgeType, dict[str, str]] = {EdgeType.INHERITS: {'arrowtail':
-    'none', 'arrowhead': 'empty'}, EdgeType.ASSOCIATION: {'fontcolor':
-    'green', 'arrowtail': 'none', 'arrowhead': 'diamond', 'style': 'solid'},
-    EdgeType.AGGREGATION: {'fontcolor': 'green', 'arrowtail': 'none',
-    'arrowhead': 'odiamond', 'style': 'solid'}, EdgeType.USES: {'arrowtail':
-    'none', 'arrowhead': 'open'}, EdgeType.TYPE_DEPENDENCY: {'arrowtail':
-    'none', 'arrowhead': 'open', 'style': 'dashed'}}
+ALLOWED_CHARSETS: frozenset[str] = frozenset(("utf-8", "iso-8859-1", "latin1"))
+SHAPES: dict[NodeType, str] = {
+    NodeType.PACKAGE: "box",
+    NodeType.CLASS: "record",
+}
+# pylint: disable-next=consider-using-namedtuple-or-dataclass
+ARROWS: dict[EdgeType, dict[str, str]] = {
+    EdgeType.INHERITS: {"arrowtail": "none", "arrowhead": "empty"},
+    EdgeType.ASSOCIATION: {
+        "fontcolor": "green",
+        "arrowtail": "none",
+        "arrowhead": "diamond",
+        "style": "solid",
+    },
+    EdgeType.AGGREGATION: {
+        "fontcolor": "green",
+        "arrowtail": "none",
+        "arrowhead": "odiamond",
+        "style": "solid",
+    },
+    EdgeType.USES: {"arrowtail": "none", "arrowhead": "open"},
+    EdgeType.TYPE_DEPENDENCY: {
+        "arrowtail": "none",
+        "arrowhead": "open",
+        "style": "dashed",
+    },
+}


 class DotPrinter(Printer):
-    DEFAULT_COLOR = 'black'
+    DEFAULT_COLOR = "black"

-    def __init__(self, title: str, layout: (Layout | None)=None,
-        use_automatic_namespace: (bool | None)=None):
+    def __init__(
+        self,
+        title: str,
+        layout: Layout | None = None,
+        use_automatic_namespace: bool | None = None,
+    ):
         layout = layout or Layout.BOTTOM_TO_TOP
-        self.charset = 'utf-8'
+        self.charset = "utf-8"
         super().__init__(title, layout, use_automatic_namespace)

-    def _open_graph(self) ->None:
+    def _open_graph(self) -> None:
         """Emit the header lines."""
-        pass
+        self.emit(f'digraph "{self.title}" {{')
+        if self.layout:
+            self.emit(f"rankdir={self.layout.value}")
+        if self.charset:
+            assert (
+                self.charset.lower() in ALLOWED_CHARSETS
+            ), f"unsupported charset {self.charset}"
+            self.emit(f'charset="{self.charset}"')

-    def emit_node(self, name: str, type_: NodeType, properties: (
-        NodeProperties | None)=None) ->None:
+    def emit_node(
+        self,
+        name: str,
+        type_: NodeType,
+        properties: NodeProperties | None = None,
+    ) -> None:
         """Create a new node.

         Nodes can be classes, packages, participants etc.
         """
-        pass
+        if properties is None:
+            properties = NodeProperties(label=name)
+        shape = SHAPES[type_]
+        color = properties.color if properties.color is not None else self.DEFAULT_COLOR
+        style = "filled" if color != self.DEFAULT_COLOR else "solid"
+        label = self._build_label_for_node(properties)
+        label_part = f", label=<{label}>" if label else ""
+        fontcolor_part = (
+            f', fontcolor="{properties.fontcolor}"' if properties.fontcolor else ""
+        )
+        self.emit(
+            f'"{name}" [color="{color}"{fontcolor_part}{label_part}, shape="{shape}", style="{style}"];'
+        )
+
+    def _build_label_for_node(self, properties: NodeProperties) -> str:
+        if not properties.label:
+            return ""

-    def emit_edge(self, from_node: str, to_node: str, type_: EdgeType,
-        label: (str | None)=None) ->None:
+        label: str = properties.label
+        if properties.attrs is None and properties.methods is None:
+            # return a "compact" form which only displays the class name in a box
+            return label
+
+        # Add class attributes
+        attrs: list[str] = properties.attrs or []
+        attrs_string = rf"{HTMLLabels.LINEBREAK_LEFT.value}".join(
+            attr.replace("|", r"\|") for attr in attrs
+        )
+        label = rf"{{{label}|{attrs_string}{HTMLLabels.LINEBREAK_LEFT.value}|"
+
+        # Add class methods
+        methods: list[nodes.FunctionDef] = properties.methods or []
+        for func in methods:
+            args = ", ".join(self._get_method_arguments(func)).replace("|", r"\|")
+            method_name = (
+                f"<I>{func.name}</I>" if func.is_abstract() else f"{func.name}"
+            )
+            label += rf"{method_name}({args})"
+            if func.returns:
+                annotation_label = get_annotation_label(func.returns)
+                label += ": " + self._escape_annotation_label(annotation_label)
+            label += rf"{HTMLLabels.LINEBREAK_LEFT.value}"
+        label += "}"
+        return label
+
+    def _escape_annotation_label(self, annotation_label: str) -> str:
+        # Escape vertical bar characters to make them appear as a literal characters
+        # otherwise it gets treated as field separator of record-based nodes
+        annotation_label = annotation_label.replace("|", r"\|")
+
+        return annotation_label
+
+    def emit_edge(
+        self,
+        from_node: str,
+        to_node: str,
+        type_: EdgeType,
+        label: str | None = None,
+    ) -> None:
         """Create an edge from one node to another to display relationships."""
-        pass
+        arrowstyle = ARROWS[type_]
+        attrs = [f'{prop}="{value}"' for prop, value in arrowstyle.items()]
+        if label:
+            attrs.append(f'label="{label}"')
+        self.emit(f'"{from_node}" -> "{to_node}" [{", ".join(sorted(attrs))}];')
+
+    def generate(self, outputfile: str) -> None:
+        self._close_graph()
+        graphviz_extensions = ("dot", "gv")
+        name = self.title
+        if outputfile is None:
+            target = "png"
+            pdot, dot_sourcepath = tempfile.mkstemp(".gv", name)
+            ppng, outputfile = tempfile.mkstemp(".png", name)
+            os.close(pdot)
+            os.close(ppng)
+        else:
+            target = Path(outputfile).suffix.lstrip(".")
+            if not target:
+                target = "png"
+                outputfile = outputfile + "." + target
+            if target not in graphviz_extensions:
+                pdot, dot_sourcepath = tempfile.mkstemp(".gv", name)
+                os.close(pdot)
+            else:
+                dot_sourcepath = outputfile
+        with open(dot_sourcepath, "w", encoding="utf8") as outfile:
+            outfile.writelines(self.lines)
+        if target not in graphviz_extensions:
+            subprocess.run(
+                ["dot", "-T", target, dot_sourcepath, "-o", outputfile], check=True
+            )
+            os.unlink(dot_sourcepath)

-    def _close_graph(self) ->None:
+    def _close_graph(self) -> None:
         """Emit the lines needed to properly close the graph."""
-        pass
+        self.emit("}\n")
diff --git a/pylint/pyreverse/inspector.py b/pylint/pyreverse/inspector.py
index 6af79d52a..23ccfa6f3 100644
--- a/pylint/pyreverse/inspector.py
+++ b/pylint/pyreverse/inspector.py
@@ -1,43 +1,70 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Visitor doing some post-processing on the astroid tree.

 Try to resolve definitions (namespace) dictionary, relationship...
 """
+
 from __future__ import annotations
+
 import collections
 import os
 import traceback
 from abc import ABC, abstractmethod
 from typing import Callable, Optional
+
 import astroid
 from astroid import nodes
+
 from pylint import constants
 from pylint.pyreverse import utils
-_WrapperFuncT = Callable[[Callable[[str], nodes.Module], str, bool],
-    Optional[nodes.Module]]
+
+_WrapperFuncT = Callable[
+    [Callable[[str], nodes.Module], str, bool], Optional[nodes.Module]
+]
+
+
+def _astroid_wrapper(
+    func: Callable[[str], nodes.Module],
+    modname: str,
+    verbose: bool = False,
+) -> nodes.Module | None:
+    if verbose:
+        print(f"parsing {modname}...")
+    try:
+        return func(modname)
+    except astroid.exceptions.AstroidBuildingError as exc:
+        print(exc)
+    except Exception:  # pylint: disable=broad-except
+        traceback.print_exc()
+    return None


 class IdGeneratorMixIn:
     """Mixin adding the ability to generate integer uid."""

-    def __init__(self, start_value: int=0) ->None:
+    def __init__(self, start_value: int = 0) -> None:
         self.id_count = start_value

-    def init_counter(self, start_value: int=0) ->None:
+    def init_counter(self, start_value: int = 0) -> None:
         """Init the id counter."""
-        pass
+        self.id_count = start_value

-    def generate_id(self) ->int:
+    def generate_id(self) -> int:
         """Generate a new identifier."""
-        pass
+        self.id_count += 1
+        return self.id_count


 class Project:
     """A project handle a set of modules / packages."""

-    def __init__(self, name: str=''):
+    def __init__(self, name: str = ""):
         self.name = name
         self.uid: int | None = None
-        self.path: str = ''
+        self.path: str = ""
         self.modules: list[nodes.Module] = []
         self.locals: dict[str, nodes.Module] = {}
         self.__getitem__ = self.locals.__getitem__
@@ -46,10 +73,18 @@ class Project:
         self.keys = self.locals.keys
         self.items = self.locals.items

-    def __repr__(self) ->str:
-        return (
-            f'<Project {self.name!r} at {id(self)} ({len(self.modules)} modules)>'
-            )
+    def add_module(self, node: nodes.Module) -> None:
+        self.locals[node.name] = node
+        self.modules.append(node)
+
+    def get_module(self, name: str) -> nodes.Module:
+        return self.locals[name]
+
+    def get_children(self) -> list[nodes.Module]:
+        return self.modules
+
+    def __repr__(self) -> str:
+        return f"<Project {self.name!r} at {id(self)} ({len(self.modules)} modules)>"


 class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
@@ -78,88 +113,190 @@ class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
       as instance_attrs_type but for aggregations relationships
     """

-    def __init__(self, project: Project, tag: bool=False) ->None:
+    def __init__(self, project: Project, tag: bool = False) -> None:
         IdGeneratorMixIn.__init__(self)
         utils.LocalsVisitor.__init__(self)
+        # tag nodes or not
         self.tag = tag
+        # visited project
         self.project = project
         self.associations_handler = AggregationsHandler()
         self.associations_handler.set_next(OtherAssociationsHandler())

-    def visit_project(self, node: Project) ->None:
+    def visit_project(self, node: Project) -> None:
         """Visit a pyreverse.utils.Project node.

         * optionally tag the node with a unique id
         """
-        pass
+        if self.tag:
+            node.uid = self.generate_id()
+        for module in node.modules:
+            self.visit(module)

-    def visit_module(self, node: nodes.Module) ->None:
+    def visit_module(self, node: nodes.Module) -> None:
         """Visit an astroid.Module node.

         * set the locals_type mapping
         * set the depends mapping
         * optionally tag the node with a unique id
         """
-        pass
-
-    def visit_classdef(self, node: nodes.ClassDef) ->None:
+        if hasattr(node, "locals_type"):
+            return
+        node.locals_type = collections.defaultdict(list)
+        node.depends = []
+        node.type_depends = []
+        if self.tag:
+            node.uid = self.generate_id()
+
+    def visit_classdef(self, node: nodes.ClassDef) -> None:
         """Visit an astroid.Class node.

         * set the locals_type and instance_attrs_type mappings
         * optionally tag the node with a unique id
         """
-        pass
-
-    def visit_functiondef(self, node: nodes.FunctionDef) ->None:
+        if hasattr(node, "locals_type"):
+            return
+        node.locals_type = collections.defaultdict(list)
+        if self.tag:
+            node.uid = self.generate_id()
+        # resolve ancestors
+        for baseobj in node.ancestors(recurs=False):
+            specializations = getattr(baseobj, "specializations", [])
+            specializations.append(node)
+            baseobj.specializations = specializations
+        # resolve instance attributes
+        node.instance_attrs_type = collections.defaultdict(list)
+        node.aggregations_type = collections.defaultdict(list)
+        node.associations_type = collections.defaultdict(list)
+        for assignattrs in tuple(node.instance_attrs.values()):
+            for assignattr in assignattrs:
+                if not isinstance(assignattr, nodes.Unknown):
+                    self.associations_handler.handle(assignattr, node)
+                    self.handle_assignattr_type(assignattr, node)
+
+    def visit_functiondef(self, node: nodes.FunctionDef) -> None:
         """Visit an astroid.Function node.

         * set the locals_type mapping
         * optionally tag the node with a unique id
         """
-        pass
+        if hasattr(node, "locals_type"):
+            return
+        node.locals_type = collections.defaultdict(list)
+        if self.tag:
+            node.uid = self.generate_id()

-    def visit_assignname(self, node: nodes.AssignName) ->None:
+    def visit_assignname(self, node: nodes.AssignName) -> None:
         """Visit an astroid.AssignName node.

         handle locals_type
         """
-        pass
+        # avoid double parsing done by different Linkers.visit
+        # running over the same project:
+        if hasattr(node, "_handled"):
+            return
+        node._handled = True
+        if node.name in node.frame():
+            frame = node.frame()
+        else:
+            # the name has been defined as 'global' in the frame and belongs
+            # there.
+            frame = node.root()
+        if not hasattr(frame, "locals_type"):
+            # If the frame doesn't have a locals_type yet,
+            # it means it wasn't yet visited. Visit it now
+            # to add what's missing from it.
+            if isinstance(frame, nodes.ClassDef):
+                self.visit_classdef(frame)
+            elif isinstance(frame, nodes.FunctionDef):
+                self.visit_functiondef(frame)
+            else:
+                self.visit_module(frame)
+
+        current = frame.locals_type[node.name]
+        frame.locals_type[node.name] = list(set(current) | utils.infer_node(node))

     @staticmethod
-    def handle_assignattr_type(node: nodes.AssignAttr, parent: nodes.ClassDef
-        ) ->None:
+    def handle_assignattr_type(node: nodes.AssignAttr, parent: nodes.ClassDef) -> None:
         """Handle an astroid.assignattr node.

         handle instance_attrs_type
         """
-        pass
+        current = set(parent.instance_attrs_type[node.attrname])
+        parent.instance_attrs_type[node.attrname] = list(
+            current | utils.infer_node(node)
+        )

-    def visit_import(self, node: nodes.Import) ->None:
+    def visit_import(self, node: nodes.Import) -> None:
         """Visit an astroid.Import node.

         resolve module dependencies
         """
-        pass
+        context_file = node.root().file
+        for name in node.names:
+            relative = astroid.modutils.is_relative(name[0], context_file)
+            self._imported_module(node, name[0], relative)

-    def visit_importfrom(self, node: nodes.ImportFrom) ->None:
+    def visit_importfrom(self, node: nodes.ImportFrom) -> None:
         """Visit an astroid.ImportFrom node.

         resolve module dependencies
         """
-        pass
-
-    def compute_module(self, context_name: str, mod_path: str) ->bool:
+        basename = node.modname
+        context_file = node.root().file
+        if context_file is not None:
+            relative = astroid.modutils.is_relative(basename, context_file)
+        else:
+            relative = False
+        for name in node.names:
+            if name[0] == "*":
+                continue
+            # analyze dependencies
+            fullname = f"{basename}.{name[0]}"
+            if fullname.find(".") > -1:
+                try:
+                    fullname = astroid.modutils.get_module_part(fullname, context_file)
+                except ImportError:
+                    continue
+            if fullname != basename:
+                self._imported_module(node, fullname, relative)
+
+    def compute_module(self, context_name: str, mod_path: str) -> bool:
         """Should the module be added to dependencies ?"""
-        pass
+        package_dir = os.path.dirname(self.project.path)
+        if context_name == mod_path:
+            return False
+        # astroid does return a boolean but is not typed correctly yet

-    def _imported_module(self, node: (nodes.Import | nodes.ImportFrom),
-        mod_path: str, relative: bool) ->None:
+        return astroid.modutils.module_in_path(mod_path, (package_dir,))  # type: ignore[no-any-return]
+
+    def _imported_module(
+        self, node: nodes.Import | nodes.ImportFrom, mod_path: str, relative: bool
+    ) -> None:
         """Notify an imported module, used to analyze dependencies."""
-        pass
+        module = node.root()
+        context_name = module.name
+        if relative:
+            mod_path = f"{'.'.join(context_name.split('.')[:-1])}.{mod_path}"
+        if self.compute_module(context_name, mod_path):
+            # handle dependencies
+            if not hasattr(module, "depends"):
+                module.depends = []
+            mod_paths = module.depends
+            if mod_path not in mod_paths:
+                mod_paths.append(mod_path)


 class AssociationHandlerInterface(ABC):
-    pass
+    @abstractmethod
+    def set_next(
+        self, handler: AssociationHandlerInterface
+    ) -> AssociationHandlerInterface:
+        pass
+
+    @abstractmethod
+    def handle(self, node: nodes.AssignAttr, parent: nodes.ClassDef) -> None:
+        pass


 class AbstractAssociationHandler(AssociationHandlerInterface):
@@ -173,19 +310,72 @@ class AbstractAssociationHandler(AssociationHandlerInterface):
     The default chaining behavior is implemented inside the base handler
     class.
     """
+
     _next_handler: AssociationHandlerInterface

+    def set_next(
+        self, handler: AssociationHandlerInterface
+    ) -> AssociationHandlerInterface:
+        self._next_handler = handler
+        return handler

-class AggregationsHandler(AbstractAssociationHandler):
-    pass
+    @abstractmethod
+    def handle(self, node: nodes.AssignAttr, parent: nodes.ClassDef) -> None:
+        if self._next_handler:
+            self._next_handler.handle(node, parent)


-class OtherAssociationsHandler(AbstractAssociationHandler):
-    pass
+class AggregationsHandler(AbstractAssociationHandler):
+    def handle(self, node: nodes.AssignAttr, parent: nodes.ClassDef) -> None:
+        if isinstance(node.parent, (nodes.AnnAssign, nodes.Assign)) and isinstance(
+            node.parent.value, astroid.node_classes.Name
+        ):
+            current = set(parent.aggregations_type[node.attrname])
+            parent.aggregations_type[node.attrname] = list(
+                current | utils.infer_node(node)
+            )
+        else:
+            super().handle(node, parent)


-def project_from_files(files: list[str], func_wrapper: _WrapperFuncT=
-    _astroid_wrapper, project_name: str='no name', black_list: tuple[str,
-    ...]=constants.DEFAULT_IGNORE_LIST, verbose: bool=False) ->Project:
+class OtherAssociationsHandler(AbstractAssociationHandler):
+    def handle(self, node: nodes.AssignAttr, parent: nodes.ClassDef) -> None:
+        current = set(parent.associations_type[node.attrname])
+        parent.associations_type[node.attrname] = list(current | utils.infer_node(node))
+
+
+def project_from_files(
+    files: list[str],
+    func_wrapper: _WrapperFuncT = _astroid_wrapper,
+    project_name: str = "no name",
+    black_list: tuple[str, ...] = constants.DEFAULT_IGNORE_LIST,
+    verbose: bool = False,
+) -> Project:
     """Return a Project from a list of files or modules."""
-    pass
+    # build the project representation
+    astroid_manager = astroid.MANAGER
+    project = Project(project_name)
+    for something in files:
+        if not os.path.exists(something):
+            fpath = astroid.modutils.file_from_modpath(something.split("."))
+        elif os.path.isdir(something):
+            fpath = os.path.join(something, "__init__.py")
+        else:
+            fpath = something
+        ast = func_wrapper(astroid_manager.ast_from_file, fpath, verbose)
+        if ast is None:
+            continue
+        project.path = project.path or ast.file
+        project.add_module(ast)
+        base_name = ast.name
+        # recurse in package except if __init__ was explicitly given
+        if ast.package and something.find("__init__") == -1:
+            # recurse on others packages / modules if this is a package
+            for fpath in astroid.modutils.get_module_files(
+                os.path.dirname(ast.file), black_list
+            ):
+                ast = func_wrapper(astroid_manager.ast_from_file, fpath, verbose)
+                if ast is None or ast.name == base_name:
+                    continue
+                project.add_module(ast)
+    return project
diff --git a/pylint/pyreverse/main.py b/pylint/pyreverse/main.py
index e8743de5e..3ba0b6c77 100644
--- a/pylint/pyreverse/main.py
+++ b/pylint/pyreverse/main.py
@@ -1,8 +1,15 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Create UML diagrams for classes and modules in <packages>."""
+
 from __future__ import annotations
+
 import sys
 from collections.abc import Sequence
 from typing import NoReturn
+
 from pylint import constants
 from pylint.config.arguments_manager import _ArgumentsManager
 from pylint.config.arguments_provider import _ArgumentsProvider
@@ -11,15 +18,45 @@ from pylint.lint.utils import augmented_sys_path
 from pylint.pyreverse import writer
 from pylint.pyreverse.diadefslib import DiadefsHandler
 from pylint.pyreverse.inspector import Linker, project_from_files
-from pylint.pyreverse.utils import check_graphviz_availability, check_if_graphviz_supports_format, insert_default_options
+from pylint.pyreverse.utils import (
+    check_graphviz_availability,
+    check_if_graphviz_supports_format,
+    insert_default_options,
+)
 from pylint.typing import Options
-DIRECTLY_SUPPORTED_FORMATS = 'dot', 'puml', 'plantuml', 'mmd', 'html'
-DEFAULT_COLOR_PALETTE = ('#77AADD', '#99DDFF', '#44BB99', '#BBCC33',
-    '#AAAA00', '#EEDD88', '#EE8866', '#FFAABB', '#DDDDDD')
-OPTIONS: Options = (('filter-mode', {'short': 'f', 'default': 'PUB_ONLY',
-    'dest': 'mode', 'type': 'string', 'action': 'store', 'metavar':
-    '<mode>', 'help':
-    """filter attributes and functions according to
+
+DIRECTLY_SUPPORTED_FORMATS = (
+    "dot",
+    "puml",
+    "plantuml",
+    "mmd",
+    "html",
+)
+
+DEFAULT_COLOR_PALETTE = (
+    # colorblind scheme taken from https://personal.sron.nl/~pault/
+    "#77AADD",  # light blue
+    "#99DDFF",  # light cyan
+    "#44BB99",  # mint
+    "#BBCC33",  # pear
+    "#AAAA00",  # olive
+    "#EEDD88",  # light yellow
+    "#EE8866",  # orange
+    "#FFAABB",  # pink
+    "#DDDDDD",  # pale grey
+)
+
+OPTIONS: Options = (
+    (
+        "filter-mode",
+        {
+            "short": "f",
+            "default": "PUB_ONLY",
+            "dest": "mode",
+            "type": "string",
+            "action": "store",
+            "metavar": "<mode>",
+            "help": """filter attributes and functions according to
     <mode>. Correct modes are :
                             'PUB_ONLY' filter all non public attributes
                                 [DEFAULT], equivalent to PRIVATE+SPECIAL_A
@@ -27,87 +64,259 @@ OPTIONS: Options = (('filter-mode', {'short': 'f', 'default': 'PUB_ONLY',
                             'SPECIAL' filter Python special functions
                                 except constructor
                             'OTHER' filter protected and private
-                                attributes"""
-    }), ('class', {'short': 'c', 'action': 'extend', 'metavar': '<class>',
-    'type': 'csv', 'dest': 'classes', 'default': None, 'help':
-    'create a class diagram with all classes related to <class>; this uses by default the options -ASmy'
-    }), ('show-ancestors', {'short': 'a', 'action': 'store', 'metavar':
-    '<ancestor>', 'type': 'int', 'default': None, 'help':
-    'show <ancestor> generations of ancestor classes not in <projects>'}),
-    ('all-ancestors', {'short': 'A', 'default': None, 'action':
-    'store_true', 'help':
-    'show all ancestors off all classes in <projects>'}), (
-    'show-associated', {'short': 's', 'action': 'store', 'metavar':
-    '<association_level>', 'type': 'int', 'default': None, 'help':
-    'show <association_level> levels of associated classes not in <projects>'
-    }), ('all-associated', {'short': 'S', 'default': None, 'action':
-    'store_true', 'help':
-    'show recursively all associated off all associated classes'}), (
-    'show-builtin', {'short': 'b', 'action': 'store_true', 'default': False,
-    'help': 'include builtin objects in representation of classes'}), (
-    'show-stdlib', {'short': 'L', 'action': 'store_true', 'default': False,
-    'help': 'include standard library objects in representation of classes'
-    }), ('module-names', {'short': 'm', 'default': None, 'type': 'yn',
-    'metavar': '<y or n>', 'help':
-    'include module name in representation of classes'}), (
-    'only-classnames', {'short': 'k', 'action': 'store_true', 'default': 
-    False, 'help':
-    "don't show attributes and methods in the class boxes; this disables -f values"
-    }), ('no-standalone', {'action': 'store_true', 'default': False, 'help':
-    'only show nodes with connections'}), ('output', {'short': 'o', 'dest':
-    'output_format', 'action': 'store', 'default': 'dot', 'metavar':
-    '<format>', 'type': 'string', 'help':
-    f"create a *.<format> output file if format is available. Available formats are: {', '.join(DIRECTLY_SUPPORTED_FORMATS)}. Any other format will be tried to create by means of the 'dot' command line tool, which requires a graphviz installation."
-    }), ('colorized', {'dest': 'colorized', 'action': 'store_true',
-    'default': False, 'help':
-    'Use colored output. Classes/modules of the same package get the same color.'
-    }), ('max-color-depth', {'dest': 'max_color_depth', 'action': 'store',
-    'default': 2, 'metavar': '<depth>', 'type': 'int', 'help':
-    'Use separate colors up to package depth of <depth>'}), (
-    'color-palette', {'dest': 'color_palette', 'action': 'store', 'default':
-    DEFAULT_COLOR_PALETTE, 'metavar': '<color1,color2,...>', 'type': 'csv',
-    'help': 'Comma separated list of colors to use'}), ('ignore', {'type':
-    'csv', 'metavar': '<file[,file...]>', 'dest': 'ignore_list', 'default':
-    constants.DEFAULT_IGNORE_LIST, 'help':
-    'Files or directories to be skipped. They should be base names, not paths.'
-    }), ('project', {'default': '', 'type': 'string', 'short': 'p',
-    'metavar': '<project name>', 'help': 'set the project name.'}), (
-    'output-directory', {'default': '', 'type': 'path', 'short': 'd',
-    'action': 'store', 'metavar': '<output_directory>', 'help':
-    'set the output directory path.'}), ('source-roots', {'type':
-    'glob_paths_csv', 'metavar': '<path>[,<path>...]', 'default': (),
-    'help':
-    'Add paths to the list of the source roots. Supports globbing patterns. The source root is an absolute path or a path relative to the current working directory used to determine a package namespace for modules located under the source root.'
-    }), ('verbose', {'action': 'store_true', 'default': False, 'help':
-    'Makes pyreverse more verbose/talkative. Mostly useful for debugging.'}))
+                                attributes""",
+        },
+    ),
+    (
+        "class",
+        {
+            "short": "c",
+            "action": "extend",
+            "metavar": "<class>",
+            "type": "csv",
+            "dest": "classes",
+            "default": None,
+            "help": "create a class diagram with all classes related to <class>;\
+ this uses by default the options -ASmy",
+        },
+    ),
+    (
+        "show-ancestors",
+        {
+            "short": "a",
+            "action": "store",
+            "metavar": "<ancestor>",
+            "type": "int",
+            "default": None,
+            "help": "show <ancestor> generations of ancestor classes not in <projects>",
+        },
+    ),
+    (
+        "all-ancestors",
+        {
+            "short": "A",
+            "default": None,
+            "action": "store_true",
+            "help": "show all ancestors off all classes in <projects>",
+        },
+    ),
+    (
+        "show-associated",
+        {
+            "short": "s",
+            "action": "store",
+            "metavar": "<association_level>",
+            "type": "int",
+            "default": None,
+            "help": "show <association_level> levels of associated classes not in <projects>",
+        },
+    ),
+    (
+        "all-associated",
+        {
+            "short": "S",
+            "default": None,
+            "action": "store_true",
+            "help": "show recursively all associated off all associated classes",
+        },
+    ),
+    (
+        "show-builtin",
+        {
+            "short": "b",
+            "action": "store_true",
+            "default": False,
+            "help": "include builtin objects in representation of classes",
+        },
+    ),
+    (
+        "show-stdlib",
+        {
+            "short": "L",
+            "action": "store_true",
+            "default": False,
+            "help": "include standard library objects in representation of classes",
+        },
+    ),
+    (
+        "module-names",
+        {
+            "short": "m",
+            "default": None,
+            "type": "yn",
+            "metavar": "<y or n>",
+            "help": "include module name in representation of classes",
+        },
+    ),
+    (
+        "only-classnames",
+        {
+            "short": "k",
+            "action": "store_true",
+            "default": False,
+            "help": "don't show attributes and methods in the class boxes; this disables -f values",
+        },
+    ),
+    (
+        "no-standalone",
+        {
+            "action": "store_true",
+            "default": False,
+            "help": "only show nodes with connections",
+        },
+    ),
+    (
+        "output",
+        {
+            "short": "o",
+            "dest": "output_format",
+            "action": "store",
+            "default": "dot",
+            "metavar": "<format>",
+            "type": "string",
+            "help": (
+                "create a *.<format> output file if format is available. Available "
+                f"formats are: {', '.join(DIRECTLY_SUPPORTED_FORMATS)}. Any other "
+                f"format will be tried to create by means of the 'dot' command line "
+                f"tool, which requires a graphviz installation."
+            ),
+        },
+    ),
+    (
+        "colorized",
+        {
+            "dest": "colorized",
+            "action": "store_true",
+            "default": False,
+            "help": "Use colored output. Classes/modules of the same package get the same color.",
+        },
+    ),
+    (
+        "max-color-depth",
+        {
+            "dest": "max_color_depth",
+            "action": "store",
+            "default": 2,
+            "metavar": "<depth>",
+            "type": "int",
+            "help": "Use separate colors up to package depth of <depth>",
+        },
+    ),
+    (
+        "color-palette",
+        {
+            "dest": "color_palette",
+            "action": "store",
+            "default": DEFAULT_COLOR_PALETTE,
+            "metavar": "<color1,color2,...>",
+            "type": "csv",
+            "help": "Comma separated list of colors to use",
+        },
+    ),
+    (
+        "ignore",
+        {
+            "type": "csv",
+            "metavar": "<file[,file...]>",
+            "dest": "ignore_list",
+            "default": constants.DEFAULT_IGNORE_LIST,
+            "help": "Files or directories to be skipped. They should be base names, not paths.",
+        },
+    ),
+    (
+        "project",
+        {
+            "default": "",
+            "type": "string",
+            "short": "p",
+            "metavar": "<project name>",
+            "help": "set the project name.",
+        },
+    ),
+    (
+        "output-directory",
+        {
+            "default": "",
+            "type": "path",
+            "short": "d",
+            "action": "store",
+            "metavar": "<output_directory>",
+            "help": "set the output directory path.",
+        },
+    ),
+    (
+        "source-roots",
+        {
+            "type": "glob_paths_csv",
+            "metavar": "<path>[,<path>...]",
+            "default": (),
+            "help": "Add paths to the list of the source roots. Supports globbing patterns. The "
+            "source root is an absolute path or a path relative to the current working directory "
+            "used to determine a package namespace for modules located under the source root.",
+        },
+    ),
+    (
+        "verbose",
+        {
+            "action": "store_true",
+            "default": False,
+            "help": "Makes pyreverse more verbose/talkative. Mostly useful for debugging.",
+        },
+    ),
+)


 class Run(_ArgumentsManager, _ArgumentsProvider):
     """Base class providing common behaviour for pyreverse commands."""
+
     options = OPTIONS
-    name = 'pyreverse'
+    name = "pyreverse"

-    def __init__(self, args: Sequence[str]) ->NoReturn:
-        if '--version' in args:
-            print('pyreverse is included in pylint:')
+    def __init__(self, args: Sequence[str]) -> NoReturn:
+        # Immediately exit if user asks for version
+        if "--version" in args:
+            print("pyreverse is included in pylint:")
             print(constants.full_version)
             sys.exit(0)
-        _ArgumentsManager.__init__(self, prog='pyreverse', description=__doc__)
+
+        _ArgumentsManager.__init__(self, prog="pyreverse", description=__doc__)
         _ArgumentsProvider.__init__(self, self)
+
+        # Parse options
         insert_default_options()
         args = self._parse_command_line_configuration(args)
+
         if self.config.output_format not in DIRECTLY_SUPPORTED_FORMATS:
             check_graphviz_availability()
             print(
-                f'Format {self.config.output_format} is not supported natively. Pyreverse will try to generate it using Graphviz...'
-                )
+                f"Format {self.config.output_format} is not supported natively."
+                " Pyreverse will try to generate it using Graphviz..."
+            )
             check_if_graphviz_supports_format(self.config.output_format)
+
         sys.exit(self.run(args))

-    def run(self, args: list[str]) ->int:
+    def run(self, args: list[str]) -> int:
         """Checking arguments and run project."""
-        pass
+        if not args:
+            print(self.help())
+            return 1
+        extra_packages_paths = list(
+            {discover_package_path(arg, self.config.source_roots) for arg in args}
+        )
+        with augmented_sys_path(extra_packages_paths):
+            project = project_from_files(
+                args,
+                project_name=self.config.project,
+                black_list=self.config.ignore_list,
+                verbose=self.config.verbose,
+            )
+            linker = Linker(project, tag=True)
+            handler = DiadefsHandler(self.config)
+            diadefs = handler.get_diadefs(project, linker)
+        writer.DiagramWriter(self.config).write(diadefs)
+        return 0


-if __name__ == '__main__':
+if __name__ == "__main__":
     Run(sys.argv[1:])
diff --git a/pylint/pyreverse/mermaidjs_printer.py b/pylint/pyreverse/mermaidjs_printer.py
index 5c9dac6c4..24fa92776 100644
--- a/pylint/pyreverse/mermaidjs_printer.py
+++ b/pylint/pyreverse/mermaidjs_printer.py
@@ -1,46 +1,112 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Class to generate files in mermaidjs format."""
+
 from __future__ import annotations
+
 from pylint.pyreverse.printer import EdgeType, NodeProperties, NodeType, Printer
 from pylint.pyreverse.utils import get_annotation_label


 class MermaidJSPrinter(Printer):
     """Printer for MermaidJS diagrams."""
-    DEFAULT_COLOR = 'black'
-    NODES: dict[NodeType, str] = {NodeType.CLASS: 'class', NodeType.PACKAGE:
-        'class'}
-    ARROWS: dict[EdgeType, str] = {EdgeType.INHERITS: '--|>', EdgeType.
-        ASSOCIATION: '--*', EdgeType.AGGREGATION: '--o', EdgeType.USES:
-        '-->', EdgeType.TYPE_DEPENDENCY: '..>'}
-
-    def _open_graph(self) ->None:
+
+    DEFAULT_COLOR = "black"
+
+    NODES: dict[NodeType, str] = {
+        NodeType.CLASS: "class",
+        NodeType.PACKAGE: "class",
+    }
+    ARROWS: dict[EdgeType, str] = {
+        EdgeType.INHERITS: "--|>",
+        EdgeType.ASSOCIATION: "--*",
+        EdgeType.AGGREGATION: "--o",
+        EdgeType.USES: "-->",
+        EdgeType.TYPE_DEPENDENCY: "..>",
+    }
+
+    def _open_graph(self) -> None:
         """Emit the header lines."""
-        pass
+        self.emit("classDiagram")
+        self._inc_indent()

-    def emit_node(self, name: str, type_: NodeType, properties: (
-        NodeProperties | None)=None) ->None:
+    def emit_node(
+        self,
+        name: str,
+        type_: NodeType,
+        properties: NodeProperties | None = None,
+    ) -> None:
         """Create a new node.

         Nodes can be classes, packages, participants etc.
         """
-        pass
+        # pylint: disable=duplicate-code
+        if properties is None:
+            properties = NodeProperties(label=name)
+        nodetype = self.NODES[type_]
+        body = []
+        if properties.attrs:
+            body.extend(properties.attrs)
+        if properties.methods:
+            for func in properties.methods:
+                args = self._get_method_arguments(func)
+                line = f"{func.name}({', '.join(args)})"
+                line += "*" if func.is_abstract() else ""
+                if func.returns:
+                    line += f" {get_annotation_label(func.returns)}"
+                body.append(line)
+        name = name.split(".")[-1]
+        self.emit(f"{nodetype} {name} {{")
+        self._inc_indent()
+        for line in body:
+            self.emit(line)
+        self._dec_indent()
+        self.emit("}")

-    def emit_edge(self, from_node: str, to_node: str, type_: EdgeType,
-        label: (str | None)=None) ->None:
+    def emit_edge(
+        self,
+        from_node: str,
+        to_node: str,
+        type_: EdgeType,
+        label: str | None = None,
+    ) -> None:
         """Create an edge from one node to another to display relationships."""
-        pass
+        from_node = from_node.split(".")[-1]
+        to_node = to_node.split(".")[-1]
+        edge = f"{from_node} {self.ARROWS[type_]} {to_node}"
+        if label:
+            edge += f" : {label}"
+        self.emit(edge)

-    def _close_graph(self) ->None:
+    def _close_graph(self) -> None:
         """Emit the lines needed to properly close the graph."""
-        pass
+        self._dec_indent()


 class HTMLMermaidJSPrinter(MermaidJSPrinter):
     """Printer for MermaidJS diagrams wrapped in a html boilerplate."""
+
     HTML_OPEN_BOILERPLATE = """<html>
   <body>
     <script src="https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js"></script>
       <div class="mermaid">
     """
-    HTML_CLOSE_BOILERPLATE = '\n       </div>\n  </body>\n</html>\n'
+    HTML_CLOSE_BOILERPLATE = """
+       </div>
+  </body>
+</html>
+"""
     GRAPH_INDENT_LEVEL = 4
+
+    def _open_graph(self) -> None:
+        self.emit(self.HTML_OPEN_BOILERPLATE)
+        for _ in range(self.GRAPH_INDENT_LEVEL):
+            self._inc_indent()
+        super()._open_graph()
+
+    def _close_graph(self) -> None:
+        for _ in range(self.GRAPH_INDENT_LEVEL):
+            self._dec_indent()
+        self.emit(self.HTML_CLOSE_BOILERPLATE)
diff --git a/pylint/pyreverse/plantuml_printer.py b/pylint/pyreverse/plantuml_printer.py
index ed05f0b13..379d57a4c 100644
--- a/pylint/pyreverse/plantuml_printer.py
+++ b/pylint/pyreverse/plantuml_printer.py
@@ -1,35 +1,99 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Class to generate files in dot format and image formats supported by Graphviz."""
+
 from __future__ import annotations
+
 from pylint.pyreverse.printer import EdgeType, Layout, NodeProperties, NodeType, Printer
 from pylint.pyreverse.utils import get_annotation_label


 class PlantUmlPrinter(Printer):
     """Printer for PlantUML diagrams."""
-    DEFAULT_COLOR = 'black'
-    NODES: dict[NodeType, str] = {NodeType.CLASS: 'class', NodeType.PACKAGE:
-        'package'}
-    ARROWS: dict[EdgeType, str] = {EdgeType.INHERITS: '--|>', EdgeType.
-        ASSOCIATION: '--*', EdgeType.AGGREGATION: '--o', EdgeType.USES:
-        '-->', EdgeType.TYPE_DEPENDENCY: '..>'}
-
-    def _open_graph(self) ->None:
+
+    DEFAULT_COLOR = "black"
+
+    NODES: dict[NodeType, str] = {
+        NodeType.CLASS: "class",
+        NodeType.PACKAGE: "package",
+    }
+    ARROWS: dict[EdgeType, str] = {
+        EdgeType.INHERITS: "--|>",
+        EdgeType.ASSOCIATION: "--*",
+        EdgeType.AGGREGATION: "--o",
+        EdgeType.USES: "-->",
+        EdgeType.TYPE_DEPENDENCY: "..>",
+    }
+
+    def _open_graph(self) -> None:
         """Emit the header lines."""
-        pass
+        self.emit("@startuml " + self.title)
+        if not self.use_automatic_namespace:
+            self.emit("set namespaceSeparator none")
+        if self.layout:
+            if self.layout is Layout.LEFT_TO_RIGHT:
+                self.emit("left to right direction")
+            elif self.layout is Layout.TOP_TO_BOTTOM:
+                self.emit("top to bottom direction")
+            else:
+                raise ValueError(
+                    f"Unsupported layout {self.layout}. PlantUmlPrinter only "
+                    "supports left to right and top to bottom layout."
+                )

-    def emit_node(self, name: str, type_: NodeType, properties: (
-        NodeProperties | None)=None) ->None:
+    def emit_node(
+        self,
+        name: str,
+        type_: NodeType,
+        properties: NodeProperties | None = None,
+    ) -> None:
         """Create a new node.

         Nodes can be classes, packages, participants etc.
         """
-        pass
+        if properties is None:
+            properties = NodeProperties(label=name)
+        nodetype = self.NODES[type_]
+        if properties.color and properties.color != self.DEFAULT_COLOR:
+            color = f" #{properties.color.lstrip('#')}"
+        else:
+            color = ""
+        body = []
+        if properties.attrs:
+            body.extend(properties.attrs)
+        if properties.methods:
+            for func in properties.methods:
+                args = self._get_method_arguments(func)
+                line = "{abstract}" if func.is_abstract() else ""
+                line += f"{func.name}({', '.join(args)})"
+                if func.returns:
+                    line += " -> " + get_annotation_label(func.returns)
+                body.append(line)
+        label = properties.label if properties.label is not None else name
+        if properties.fontcolor and properties.fontcolor != self.DEFAULT_COLOR:
+            label = f"<color:{properties.fontcolor}>{label}</color>"
+        self.emit(f'{nodetype} "{label}" as {name}{color} {{')
+        self._inc_indent()
+        for line in body:
+            self.emit(line)
+        self._dec_indent()
+        self.emit("}")

-    def emit_edge(self, from_node: str, to_node: str, type_: EdgeType,
-        label: (str | None)=None) ->None:
+    def emit_edge(
+        self,
+        from_node: str,
+        to_node: str,
+        type_: EdgeType,
+        label: str | None = None,
+    ) -> None:
         """Create an edge from one node to another to display relationships."""
-        pass
+        edge = f"{from_node} {self.ARROWS[type_]} {to_node}"
+        if label:
+            edge += f" : {label}"
+        self.emit(edge)

-    def _close_graph(self) ->None:
+    def _close_graph(self) -> None:
         """Emit the lines needed to properly close the graph."""
-        pass
+        self.emit("@enduml")
diff --git a/pylint/pyreverse/printer.py b/pylint/pyreverse/printer.py
index 289b213ad..caa7917ca 100644
--- a/pylint/pyreverse/printer.py
+++ b/pylint/pyreverse/printer.py
@@ -1,30 +1,38 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Base class defining the interface for a printer."""
+
 from __future__ import annotations
+
 from abc import ABC, abstractmethod
 from enum import Enum
 from typing import NamedTuple
+
 from astroid import nodes
+
 from pylint.pyreverse.utils import get_annotation_label


 class NodeType(Enum):
-    CLASS = 'class'
-    PACKAGE = 'package'
+    CLASS = "class"
+    PACKAGE = "package"


 class EdgeType(Enum):
-    INHERITS = 'inherits'
-    ASSOCIATION = 'association'
-    AGGREGATION = 'aggregation'
-    USES = 'uses'
-    TYPE_DEPENDENCY = 'type_dependency'
+    INHERITS = "inherits"
+    ASSOCIATION = "association"
+    AGGREGATION = "aggregation"
+    USES = "uses"
+    TYPE_DEPENDENCY = "type_dependency"


 class Layout(Enum):
-    LEFT_TO_RIGHT = 'LR'
-    RIGHT_TO_LEFT = 'RL'
-    TOP_TO_BOTTOM = 'TB'
-    BOTTOM_TO_TOP = 'BT'
+    LEFT_TO_RIGHT = "LR"
+    RIGHT_TO_LEFT = "RL"
+    TOP_TO_BOTTOM = "TB"
+    BOTTOM_TO_TOP = "BT"


 class NodeProperties(NamedTuple):
@@ -38,50 +46,87 @@ class NodeProperties(NamedTuple):
 class Printer(ABC):
     """Base class defining the interface for a printer."""

-    def __init__(self, title: str, layout: (Layout | None)=None,
-        use_automatic_namespace: (bool | None)=None) ->None:
+    def __init__(
+        self,
+        title: str,
+        layout: Layout | None = None,
+        use_automatic_namespace: bool | None = None,
+    ) -> None:
         self.title: str = title
         self.layout = layout
         self.use_automatic_namespace = use_automatic_namespace
         self.lines: list[str] = []
-        self._indent = ''
+        self._indent = ""
         self._open_graph()

-    def _inc_indent(self) ->None:
+    def _inc_indent(self) -> None:
         """Increment indentation."""
-        pass
+        self._indent += "  "

-    def _dec_indent(self) ->None:
+    def _dec_indent(self) -> None:
         """Decrement indentation."""
-        pass
+        self._indent = self._indent[:-2]

     @abstractmethod
-    def _open_graph(self) ->None:
+    def _open_graph(self) -> None:
         """Emit the header lines, i.e. all boilerplate code that defines things like
         layout etc.
         """
-        pass
+
+    def emit(self, line: str, force_newline: bool | None = True) -> None:
+        if force_newline and not line.endswith("\n"):
+            line += "\n"
+        self.lines.append(self._indent + line)

     @abstractmethod
-    def emit_node(self, name: str, type_: NodeType, properties: (
-        NodeProperties | None)=None) ->None:
+    def emit_node(
+        self,
+        name: str,
+        type_: NodeType,
+        properties: NodeProperties | None = None,
+    ) -> None:
         """Create a new node.

         Nodes can be classes, packages, participants etc.
         """
-        pass

     @abstractmethod
-    def emit_edge(self, from_node: str, to_node: str, type_: EdgeType,
-        label: (str | None)=None) ->None:
+    def emit_edge(
+        self,
+        from_node: str,
+        to_node: str,
+        type_: EdgeType,
+        label: str | None = None,
+    ) -> None:
         """Create an edge from one node to another to display relationships."""
-        pass

-    def generate(self, outputfile: str) ->None:
+    @staticmethod
+    def _get_method_arguments(method: nodes.FunctionDef) -> list[str]:
+        if method.args.args is None:
+            return []
+
+        first_arg = 0 if method.type in {"function", "staticmethod"} else 1
+        arguments: list[nodes.AssignName] = method.args.args[first_arg:]
+
+        annotations = dict(zip(arguments, method.args.annotations[first_arg:]))
+        for arg in arguments:
+            annotation_label = ""
+            ann = annotations.get(arg)
+            if ann:
+                annotation_label = get_annotation_label(ann)
+            annotations[arg] = annotation_label
+
+        return [
+            f"{arg.name}: {ann}" if ann else f"{arg.name}"
+            for arg, ann in annotations.items()
+        ]
+
+    def generate(self, outputfile: str) -> None:
         """Generate and save the final outputfile."""
-        pass
+        self._close_graph()
+        with open(outputfile, "w", encoding="utf-8") as outfile:
+            outfile.writelines(self.lines)

     @abstractmethod
-    def _close_graph(self) ->None:
+    def _close_graph(self) -> None:
         """Emit the lines needed to properly close the graph."""
-        pass
diff --git a/pylint/pyreverse/printer_factory.py b/pylint/pyreverse/printer_factory.py
index 80244671a..fdbe480ed 100644
--- a/pylint/pyreverse/printer_factory.py
+++ b/pylint/pyreverse/printer_factory.py
@@ -1,8 +1,22 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from pylint.pyreverse.dot_printer import DotPrinter
 from pylint.pyreverse.mermaidjs_printer import HTMLMermaidJSPrinter, MermaidJSPrinter
 from pylint.pyreverse.plantuml_printer import PlantUmlPrinter
 from pylint.pyreverse.printer import Printer
-filetype_to_printer: dict[str, type[Printer]] = {'plantuml':
-    PlantUmlPrinter, 'puml': PlantUmlPrinter, 'mmd': MermaidJSPrinter,
-    'html': HTMLMermaidJSPrinter, 'dot': DotPrinter}
+
+filetype_to_printer: dict[str, type[Printer]] = {
+    "plantuml": PlantUmlPrinter,
+    "puml": PlantUmlPrinter,
+    "mmd": MermaidJSPrinter,
+    "html": HTMLMermaidJSPrinter,
+    "dot": DotPrinter,
+}
+
+
+def get_printer_for_filetype(filetype: str) -> type[Printer]:
+    return filetype_to_printer.get(filetype, DotPrinter)
diff --git a/pylint/pyreverse/utils.py b/pylint/pyreverse/utils.py
index 4d75d3e1a..bdd28dc7c 100644
--- a/pylint/pyreverse/utils.py
+++ b/pylint/pyreverse/utils.py
@@ -1,67 +1,118 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Generic classes/functions for pyreverse core/extensions."""
+
 from __future__ import annotations
+
 import os
 import re
 import shutil
 import subprocess
 import sys
 from typing import TYPE_CHECKING, Any, Callable, Optional, Tuple, Union
+
 import astroid
 from astroid import nodes
 from astroid.typing import InferenceResult
+
 if TYPE_CHECKING:
     from pylint.pyreverse.diagrams import ClassDiagram, PackageDiagram
-    _CallbackT = Callable[[nodes.NodeNG], Union[Tuple[ClassDiagram], Tuple[
-        PackageDiagram, ClassDiagram], None]]
+
+    _CallbackT = Callable[
+        [nodes.NodeNG],
+        Union[Tuple[ClassDiagram], Tuple[PackageDiagram, ClassDiagram], None],
+    ]
     _CallbackTupleT = Tuple[Optional[_CallbackT], Optional[_CallbackT]]
-RCFILE = '.pyreverserc'


-def get_default_options() ->list[str]:
+RCFILE = ".pyreverserc"
+
+
+def get_default_options() -> list[str]:
     """Read config file and return list of options."""
-    pass
+    options = []
+    home = os.environ.get("HOME", "")
+    if home:
+        rcfile = os.path.join(home, RCFILE)
+        try:
+            with open(rcfile, encoding="utf-8") as file_handle:
+                options = file_handle.read().split()
+        except OSError:
+            pass  # ignore if no config file found
+    return options


-def insert_default_options() ->None:
+def insert_default_options() -> None:
     """Insert default options to sys.argv."""
-    pass
+    options = get_default_options()
+    options.reverse()
+    for arg in options:
+        sys.argv.insert(1, arg)


-SPECIAL = re.compile('^__([^\\W_]_*)+__$')
-PRIVATE = re.compile('^__(_*[^\\W_])+_?$')
-PROTECTED = re.compile('^_\\w*$')
+# astroid utilities ###########################################################
+SPECIAL = re.compile(r"^__([^\W_]_*)+__$")
+PRIVATE = re.compile(r"^__(_*[^\W_])+_?$")
+PROTECTED = re.compile(r"^_\w*$")


-def get_visibility(name: str) ->str:
+def get_visibility(name: str) -> str:
     """Return the visibility from a name: public, protected, private or special."""
-    pass
+    if SPECIAL.match(name):
+        visibility = "special"
+    elif PRIVATE.match(name):
+        visibility = "private"
+    elif PROTECTED.match(name):
+        visibility = "protected"
+
+    else:
+        visibility = "public"
+    return visibility
+

+def is_exception(node: nodes.ClassDef) -> bool:
+    # bw compatibility
+    return node.type == "exception"  # type: ignore[no-any-return]
+
+
+# Helpers #####################################################################

 _SPECIAL = 2
 _PROTECTED = 4
 _PRIVATE = 8
-MODES = {'ALL': 0, 'PUB_ONLY': _SPECIAL + _PROTECTED + _PRIVATE, 'SPECIAL':
-    _SPECIAL, 'OTHER': _PROTECTED + _PRIVATE}
-VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED, 'private':
-    _PRIVATE, 'public': 0}
+MODES = {
+    "ALL": 0,
+    "PUB_ONLY": _SPECIAL + _PROTECTED + _PRIVATE,
+    "SPECIAL": _SPECIAL,
+    "OTHER": _PROTECTED + _PRIVATE,
+}
+VIS_MOD = {
+    "special": _SPECIAL,
+    "protected": _PROTECTED,
+    "private": _PRIVATE,
+    "public": 0,
+}


 class FilterMixIn:
     """Filter nodes according to a mode and nodes' visibility."""

-    def __init__(self, mode: str) ->None:
+    def __init__(self, mode: str) -> None:
         """Init filter modes."""
         __mode = 0
-        for nummod in mode.split('+'):
+        for nummod in mode.split("+"):
             try:
                 __mode += MODES[nummod]
             except KeyError as ex:
-                print(f'Unknown filter mode {ex}', file=sys.stderr)
+                print(f"Unknown filter mode {ex}", file=sys.stderr)
         self.__mode = __mode

-    def show_attr(self, node: (nodes.NodeNG | str)) ->bool:
+    def show_attr(self, node: nodes.NodeNG | str) -> bool:
         """Return true if the node should be treated."""
-        pass
+        visibility = get_visibility(getattr(node, "name", node))
+        return not self.__mode & VIS_MOD[visibility]


 class LocalsVisitor:
@@ -74,46 +125,146 @@ class LocalsVisitor:
     the node in lower case
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._cache: dict[type[nodes.NodeNG], _CallbackTupleT] = {}
         self._visited: set[nodes.NodeNG] = set()

-    def get_callbacks(self, node: nodes.NodeNG) ->_CallbackTupleT:
+    def get_callbacks(self, node: nodes.NodeNG) -> _CallbackTupleT:
         """Get callbacks from handler for the visited node."""
-        pass
+        klass = node.__class__
+        methods = self._cache.get(klass)
+        if methods is None:
+            kid = klass.__name__.lower()
+            e_method = getattr(
+                self, f"visit_{kid}", getattr(self, "visit_default", None)
+            )
+            l_method = getattr(
+                self, f"leave_{kid}", getattr(self, "leave_default", None)
+            )
+            self._cache[klass] = (e_method, l_method)
+        else:
+            e_method, l_method = methods
+        return e_method, l_method

-    def visit(self, node: nodes.NodeNG) ->Any:
+    def visit(self, node: nodes.NodeNG) -> Any:
         """Launch the visit starting from the given node."""
-        pass
+        if node in self._visited:
+            return None
+
+        self._visited.add(node)
+        methods = self.get_callbacks(node)
+        if methods[0] is not None:
+            methods[0](node)
+        if hasattr(node, "locals"):  # skip Instance and other proxy
+            for local_node in node.values():
+                self.visit(local_node)
+        if methods[1] is not None:
+            return methods[1](node)
+        return None
+
+
+def get_annotation_label(ann: nodes.Name | nodes.NodeNG) -> str:
+    if isinstance(ann, nodes.Name) and ann.name is not None:
+        return ann.name  # type: ignore[no-any-return]
+    if isinstance(ann, nodes.NodeNG):
+        return ann.as_string()  # type: ignore[no-any-return]
+    return ""


-def get_annotation(node: (nodes.AssignAttr | nodes.AssignName)) ->(nodes.
-    Name | nodes.Subscript | None):
+def get_annotation(
+    node: nodes.AssignAttr | nodes.AssignName,
+) -> nodes.Name | nodes.Subscript | None:
     """Return the annotation for `node`."""
-    pass
+    ann = None
+    if isinstance(node.parent, nodes.AnnAssign):
+        ann = node.parent.annotation
+    elif isinstance(node, nodes.AssignAttr):
+        init_method = node.parent.parent
+        try:
+            annotations = dict(zip(init_method.locals, init_method.args.annotations))
+            ann = annotations.get(node.parent.value.name)
+        except AttributeError:
+            pass
+    else:
+        return ann
+
+    try:
+        default, *_ = node.infer()
+    except astroid.InferenceError:
+        default = ""
+
+    label = get_annotation_label(ann)
+
+    if (
+        ann
+        and getattr(default, "value", "value") is None
+        and not label.startswith("Optional")
+        and (
+            not isinstance(ann, nodes.BinOp)
+            or not any(
+                isinstance(child, nodes.Const) and child.value is None
+                for child in ann.get_children()
+            )
+        )
+    ):
+        label = rf"Optional[{label}]"
+
+    if label and ann:
+        ann.name = label
+    return ann


-def infer_node(node: (nodes.AssignAttr | nodes.AssignName)) ->set[
-    InferenceResult]:
+def infer_node(node: nodes.AssignAttr | nodes.AssignName) -> set[InferenceResult]:
     """Return a set containing the node annotation if it exists
     otherwise return a set of the inferred types using the NodeNG.infer method.
     """
-    pass
+    ann = get_annotation(node)
+    try:
+        if ann:
+            if isinstance(ann, nodes.Subscript) or (
+                isinstance(ann, nodes.BinOp) and ann.op == "|"
+            ):
+                return {ann}
+            return set(ann.infer())
+        return set(node.infer())
+    except astroid.InferenceError:
+        return {ann} if ann else set()


-def check_graphviz_availability() ->None:
+def check_graphviz_availability() -> None:
     """Check if the ``dot`` command is available on the machine.

     This is needed if image output is desired and ``dot`` is used to convert
     from *.dot or *.gv into the final output format.
     """
-    pass
+    if shutil.which("dot") is None:
+        print("'Graphviz' needs to be installed for your chosen output format.")
+        sys.exit(32)


-def check_if_graphviz_supports_format(output_format: str) ->None:
+def check_if_graphviz_supports_format(output_format: str) -> None:
     """Check if the ``dot`` command supports the requested output format.

     This is needed if image output is desired and ``dot`` is used to convert
     from *.gv into the final output format.
     """
-    pass
+    dot_output = subprocess.run(
+        ["dot", "-T?"], capture_output=True, check=False, encoding="utf-8"
+    )
+    match = re.match(
+        pattern=r".*Use one of: (?P<formats>(\S*\s?)+)",
+        string=dot_output.stderr.strip(),
+    )
+    if not match:
+        print(
+            "Unable to determine Graphviz supported output formats. "
+            "Pyreverse will continue, but subsequent error messages "
+            "regarding the output format may come from Graphviz directly."
+        )
+        return
+    supported_formats = match.group("formats")
+    if output_format not in supported_formats.split():
+        print(
+            f"Format {output_format} is not supported by Graphviz. It supports: {supported_formats}"
+        )
+        sys.exit(32)
diff --git a/pylint/pyreverse/writer.py b/pylint/pyreverse/writer.py
index 55d8714f1..093c45959 100644
--- a/pylint/pyreverse/writer.py
+++ b/pylint/pyreverse/writer.py
@@ -1,12 +1,26 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Utilities for creating diagrams."""
+
 from __future__ import annotations
+
 import argparse
 import itertools
 import os
 from collections import defaultdict
 from collections.abc import Iterable
+
 from astroid import modutils, nodes
-from pylint.pyreverse.diagrams import ClassDiagram, ClassEntity, DiagramEntity, PackageDiagram, PackageEntity
+
+from pylint.pyreverse.diagrams import (
+    ClassDiagram,
+    ClassEntity,
+    DiagramEntity,
+    PackageDiagram,
+    PackageEntity,
+)
 from pylint.pyreverse.printer import EdgeType, NodeProperties, NodeType, Printer
 from pylint.pyreverse.printer_factory import get_printer_for_filetype
 from pylint.pyreverse.utils import is_exception
@@ -15,44 +29,171 @@ from pylint.pyreverse.utils import is_exception
 class DiagramWriter:
     """Base class for writing project diagrams."""

-    def __init__(self, config: argparse.Namespace) ->None:
+    def __init__(self, config: argparse.Namespace) -> None:
         self.config = config
-        self.printer_class = get_printer_for_filetype(self.config.output_format
-            )
-        self.printer: Printer
-        self.file_name = ''
+        self.printer_class = get_printer_for_filetype(self.config.output_format)
+        self.printer: Printer  # defined in set_printer
+        self.file_name = ""  # defined in set_printer
         self.depth = self.config.max_color_depth
+        # default colors are an adaption of the seaborn colorblind palette
         self.available_colors = itertools.cycle(self.config.color_palette)
         self.used_colors: dict[str, str] = {}

-    def write(self, diadefs: Iterable[ClassDiagram | PackageDiagram]) ->None:
+    def write(self, diadefs: Iterable[ClassDiagram | PackageDiagram]) -> None:
         """Write files for <project> according to <diadefs>."""
-        pass
+        for diagram in diadefs:
+            basename = diagram.title.strip().replace("/", "_").replace(" ", "_")
+            file_name = f"{basename}.{self.config.output_format}"
+            if os.path.exists(self.config.output_directory):
+                file_name = os.path.join(self.config.output_directory, file_name)
+            self.set_printer(file_name, basename)
+            if isinstance(diagram, PackageDiagram):
+                self.write_packages(diagram)
+            else:
+                self.write_classes(diagram)
+            self.save()

-    def write_packages(self, diagram: PackageDiagram) ->None:
+    def write_packages(self, diagram: PackageDiagram) -> None:
         """Write a package diagram."""
-        pass
+        module_info: dict[str, dict[str, int]] = {}
+
+        # sorted to get predictable (hence testable) results
+        for module in sorted(diagram.modules(), key=lambda x: x.title):
+            module.fig_id = module.node.qname()

-    def write_classes(self, diagram: ClassDiagram) ->None:
+            if self.config.no_standalone and not any(
+                module in (rel.from_object, rel.to_object)
+                for rel in diagram.get_relationships("depends")
+            ):
+                continue
+
+            self.printer.emit_node(
+                module.fig_id,
+                type_=NodeType.PACKAGE,
+                properties=self.get_package_properties(module),
+            )
+
+            module_info[module.fig_id] = {
+                "imports": 0,
+                "imported": 0,
+            }
+
+        # package dependencies
+        for rel in diagram.get_relationships("depends"):
+            from_id = rel.from_object.fig_id
+            to_id = rel.to_object.fig_id
+
+            self.printer.emit_edge(
+                from_id,
+                to_id,
+                type_=EdgeType.USES,
+            )
+
+            module_info[from_id]["imports"] += 1
+            module_info[to_id]["imported"] += 1
+
+        for rel in diagram.get_relationships("type_depends"):
+            from_id = rel.from_object.fig_id
+            to_id = rel.to_object.fig_id
+
+            self.printer.emit_edge(
+                from_id,
+                to_id,
+                type_=EdgeType.TYPE_DEPENDENCY,
+            )
+
+            module_info[from_id]["imports"] += 1
+            module_info[to_id]["imported"] += 1
+
+        print(
+            f"Analysed {len(module_info)} modules with a total "
+            f"of {sum(mod['imports'] for mod in module_info.values())} imports"
+        )
+
+    def write_classes(self, diagram: ClassDiagram) -> None:
         """Write a class diagram."""
-        pass
+        # sorted to get predictable (hence testable) results
+        for obj in sorted(diagram.objects, key=lambda x: x.title):
+            obj.fig_id = obj.node.qname()
+            if self.config.no_standalone and not any(
+                obj in (rel.from_object, rel.to_object)
+                for rel_type in ("specialization", "association", "aggregation")
+                for rel in diagram.get_relationships(rel_type)
+            ):
+                continue
+
+            self.printer.emit_node(
+                obj.fig_id,
+                type_=NodeType.CLASS,
+                properties=self.get_class_properties(obj),
+            )
+        # inheritance links
+        for rel in diagram.get_relationships("specialization"):
+            self.printer.emit_edge(
+                rel.from_object.fig_id,
+                rel.to_object.fig_id,
+                type_=EdgeType.INHERITS,
+            )
+        associations: dict[str, set[str]] = defaultdict(set)
+        # generate associations
+        for rel in diagram.get_relationships("association"):
+            associations[rel.from_object.fig_id].add(rel.to_object.fig_id)
+            self.printer.emit_edge(
+                rel.from_object.fig_id,
+                rel.to_object.fig_id,
+                label=rel.name,
+                type_=EdgeType.ASSOCIATION,
+            )
+        # generate aggregations
+        for rel in diagram.get_relationships("aggregation"):
+            if rel.to_object.fig_id in associations[rel.from_object.fig_id]:
+                continue
+            self.printer.emit_edge(
+                rel.from_object.fig_id,
+                rel.to_object.fig_id,
+                label=rel.name,
+                type_=EdgeType.AGGREGATION,
+            )

-    def set_printer(self, file_name: str, basename: str) ->None:
+    def set_printer(self, file_name: str, basename: str) -> None:
         """Set printer."""
-        pass
+        self.printer = self.printer_class(basename)
+        self.file_name = file_name

-    def get_package_properties(self, obj: PackageEntity) ->NodeProperties:
+    def get_package_properties(self, obj: PackageEntity) -> NodeProperties:
         """Get label and shape for packages."""
-        pass
+        return NodeProperties(
+            label=obj.title,
+            color=self.get_shape_color(obj) if self.config.colorized else "black",
+        )

-    def get_class_properties(self, obj: ClassEntity) ->NodeProperties:
+    def get_class_properties(self, obj: ClassEntity) -> NodeProperties:
         """Get label and shape for classes."""
-        pass
+        properties = NodeProperties(
+            label=obj.title,
+            attrs=obj.attrs if not self.config.only_classnames else None,
+            methods=obj.methods if not self.config.only_classnames else None,
+            fontcolor="red" if is_exception(obj.node) else "black",
+            color=self.get_shape_color(obj) if self.config.colorized else "black",
+        )
+        return properties

-    def get_shape_color(self, obj: DiagramEntity) ->str:
+    def get_shape_color(self, obj: DiagramEntity) -> str:
         """Get shape color."""
-        pass
+        qualified_name = obj.node.qname()
+        if modutils.is_stdlib_module(qualified_name.split(".", maxsplit=1)[0]):
+            return "grey"
+        if isinstance(obj.node, nodes.ClassDef):
+            package = qualified_name.rsplit(".", maxsplit=2)[0]
+        elif obj.node.package:
+            package = qualified_name
+        else:
+            package = qualified_name.rsplit(".", maxsplit=1)[0]
+        base_name = ".".join(package.split(".", self.depth)[: self.depth])
+        if base_name not in self.used_colors:
+            self.used_colors[base_name] = next(self.available_colors)
+        return self.used_colors[base_name]

-    def save(self) ->None:
+    def save(self) -> None:
         """Write to disk."""
-        pass
+        self.printer.generate(self.file_name)
diff --git a/pylint/reporters/base_reporter.py b/pylint/reporters/base_reporter.py
index 2a79b16e1..d370b1910 100644
--- a/pylint/reporters/base_reporter.py
+++ b/pylint/reporters/base_reporter.py
@@ -1,10 +1,17 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 import sys
 from typing import TYPE_CHECKING, TextIO
+
 from pylint.message import Message
 from pylint.reporters.ureports.nodes import Text
 from pylint.utils import LinterStats
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter
     from pylint.reporters.ureports.nodes import Section
@@ -15,34 +22,43 @@ class BaseReporter:

     symbols: show short symbolic names for messages.
     """
-    extension = ''
-    name = 'base'
+
+    extension = ""
+
+    name = "base"
     """Name of the reporter."""

-    def __init__(self, output: (TextIO | None)=None) ->None:
+    def __init__(self, output: TextIO | None = None) -> None:
         self.linter: PyLinter
         self.section = 0
         self.out: TextIO = output or sys.stdout
         self.messages: list[Message] = []
+        # Build the path prefix to strip to get relative paths
         self.path_strip_prefix = os.getcwd() + os.sep

-    def handle_message(self, msg: Message) ->None:
+    def handle_message(self, msg: Message) -> None:
         """Handle a new message triggered on the current file."""
-        pass
+        self.messages.append(msg)

-    def writeln(self, string: str='') ->None:
+    def writeln(self, string: str = "") -> None:
         """Write a line in the output buffer."""
-        pass
+        print(string, file=self.out)

-    def display_reports(self, layout: Section) ->None:
+    def display_reports(self, layout: Section) -> None:
         """Display results encapsulated in the layout tree."""
-        pass
+        self.section = 0
+        if layout.report_id:
+            if isinstance(layout.children[0].children[0], Text):
+                layout.children[0].children[0].data += f" ({layout.report_id})"
+            else:
+                raise ValueError(f"Incorrect child for {layout.children[0].children}")
+        self._display(layout)

-    def _display(self, layout: Section) ->None:
+    def _display(self, layout: Section) -> None:
         """Display the layout."""
-        pass
+        raise NotImplementedError()

-    def display_messages(self, layout: (Section | None)) ->None:
+    def display_messages(self, layout: Section | None) -> None:
         """Hook for displaying the messages of the reporter.

         This will be called whenever the underlying messages
@@ -52,14 +68,15 @@ class BaseReporter:
         This method can be implemented to display them after they've
         been aggregated.
         """
-        pass

-    def on_set_current_module(self, module: str, filepath: (str | None)
-        ) ->None:
+    # Event callbacks
+
+    def on_set_current_module(self, module: str, filepath: str | None) -> None:
         """Hook called when a module starts to be analysed."""
-        pass

-    def on_close(self, stats: LinterStats, previous_stats: (LinterStats | None)
-        ) ->None:
+    def on_close(
+        self,
+        stats: LinterStats,
+        previous_stats: LinterStats | None,
+    ) -> None:
         """Hook called when a module finished analyzing."""
-        pass
diff --git a/pylint/reporters/collecting_reporter.py b/pylint/reporters/collecting_reporter.py
index fffdba810..943a74d55 100644
--- a/pylint/reporters/collecting_reporter.py
+++ b/pylint/reporters/collecting_reporter.py
@@ -1,14 +1,28 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from pylint.reporters.base_reporter import BaseReporter
+
 if TYPE_CHECKING:
     from pylint.reporters.ureports.nodes import Section


 class CollectingReporter(BaseReporter):
     """Collects messages."""
-    name = 'collector'

-    def __init__(self) ->None:
+    name = "collector"
+
+    def __init__(self) -> None:
         super().__init__()
         self.messages = []
+
+    def reset(self) -> None:
+        self.messages = []
+
+    def _display(self, layout: Section) -> None:
+        pass
diff --git a/pylint/reporters/json_reporter.py b/pylint/reporters/json_reporter.py
index ddf8abc09..7135dfc66 100644
--- a/pylint/reporters/json_reporter.py
+++ b/pylint/reporters/json_reporter.py
@@ -1,18 +1,40 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """JSON reporter."""
+
 from __future__ import annotations
+
 import json
 from typing import TYPE_CHECKING, Optional, TypedDict
+
 from pylint.interfaces import CONFIDENCE_MAP, UNDEFINED
 from pylint.message import Message
 from pylint.reporters.base_reporter import BaseReporter
 from pylint.typing import MessageLocationTuple
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter
     from pylint.reporters.ureports.nodes import Section
-OldJsonExport = TypedDict('OldJsonExport', {'type': str, 'module': str,
-    'obj': str, 'line': int, 'column': int, 'endLine': Optional[int],
-    'endColumn': Optional[int], 'path': str, 'symbol': str, 'message': str,
-    'message-id': str})
+
+# Since message-id is an invalid name we need to use the alternative syntax
+OldJsonExport = TypedDict(
+    "OldJsonExport",
+    {
+        "type": str,
+        "module": str,
+        "obj": str,
+        "line": int,
+        "column": int,
+        "endLine": Optional[int],
+        "endColumn": Optional[int],
+        "path": str,
+        "symbol": str,
+        "message": str,
+        "message-id": str,
+    },
+)


 class JSONReporter(BaseReporter):
@@ -21,20 +43,55 @@ class JSONReporter(BaseReporter):
     Consider using JSON2Reporter instead, as it is superior and this reporter
     is no longer maintained.
     """
-    name = 'json'
-    extension = 'json'

-    def display_messages(self, layout: (Section | None)) ->None:
+    name = "json"
+    extension = "json"
+
+    def display_messages(self, layout: Section | None) -> None:
         """Launch layouts display."""
-        pass
+        json_dumpable = [self.serialize(message) for message in self.messages]
+        print(json.dumps(json_dumpable, indent=4), file=self.out)

-    def display_reports(self, layout: Section) ->None:
+    def display_reports(self, layout: Section) -> None:
         """Don't do anything in this reporter."""
-        pass

-    def _display(self, layout: Section) ->None:
+    def _display(self, layout: Section) -> None:
         """Do nothing."""
-        pass
+
+    @staticmethod
+    def serialize(message: Message) -> OldJsonExport:
+        return {
+            "type": message.category,
+            "module": message.module,
+            "obj": message.obj,
+            "line": message.line,
+            "column": message.column,
+            "endLine": message.end_line,
+            "endColumn": message.end_column,
+            "path": message.path,
+            "symbol": message.symbol,
+            "message": message.msg or "",
+            "message-id": message.msg_id,
+        }
+
+    @staticmethod
+    def deserialize(message_as_json: OldJsonExport) -> Message:
+        return Message(
+            msg_id=message_as_json["message-id"],
+            symbol=message_as_json["symbol"],
+            msg=message_as_json["message"],
+            location=MessageLocationTuple(
+                abspath=message_as_json["path"],
+                path=message_as_json["path"],
+                module=message_as_json["module"],
+                obj=message_as_json["obj"],
+                line=message_as_json["line"],
+                column=message_as_json["column"],
+                end_line=message_as_json["endLine"],
+                end_column=message_as_json["endColumn"],
+            ),
+            confidence=UNDEFINED,
+        )


 class JSONMessage(TypedDict):
@@ -54,21 +111,91 @@ class JSONMessage(TypedDict):


 class JSON2Reporter(BaseReporter):
-    name = 'json2'
-    extension = 'json2'
+    name = "json2"
+    extension = "json2"

-    def display_reports(self, layout: Section) ->None:
+    def display_reports(self, layout: Section) -> None:
         """Don't do anything in this reporter."""
-        pass

-    def _display(self, layout: Section) ->None:
+    def _display(self, layout: Section) -> None:
         """Do nothing."""
-        pass

-    def display_messages(self, layout: (Section | None)) ->None:
+    def display_messages(self, layout: Section | None) -> None:
         """Launch layouts display."""
-        pass
+        output = {
+            "messages": [self.serialize(message) for message in self.messages],
+            "statistics": self.serialize_stats(),
+        }
+        print(json.dumps(output, indent=4), file=self.out)
+
+    @staticmethod
+    def serialize(message: Message) -> JSONMessage:
+        return JSONMessage(
+            type=message.category,
+            symbol=message.symbol,
+            message=message.msg or "",
+            messageId=message.msg_id,
+            confidence=message.confidence.name,
+            module=message.module,
+            obj=message.obj,
+            line=message.line,
+            column=message.column,
+            endLine=message.end_line,
+            endColumn=message.end_column,
+            path=message.path,
+            absolutePath=message.abspath,
+        )

-    def serialize_stats(self) ->dict[str, str | int | dict[str, int]]:
+    @staticmethod
+    def deserialize(message_as_json: JSONMessage) -> Message:
+        return Message(
+            msg_id=message_as_json["messageId"],
+            symbol=message_as_json["symbol"],
+            msg=message_as_json["message"],
+            location=MessageLocationTuple(
+                abspath=message_as_json["absolutePath"],
+                path=message_as_json["path"],
+                module=message_as_json["module"],
+                obj=message_as_json["obj"],
+                line=message_as_json["line"],
+                column=message_as_json["column"],
+                end_line=message_as_json["endLine"],
+                end_column=message_as_json["endColumn"],
+            ),
+            confidence=CONFIDENCE_MAP[message_as_json["confidence"]],
+        )
+
+    def serialize_stats(self) -> dict[str, str | int | dict[str, int]]:
         """Serialize the linter stats into something JSON dumpable."""
-        pass
+        stats = self.linter.stats
+
+        counts_dict = {
+            "fatal": stats.fatal,
+            "error": stats.error,
+            "warning": stats.warning,
+            "refactor": stats.refactor,
+            "convention": stats.convention,
+            "info": stats.info,
+        }
+
+        # Calculate score based on the evaluation option
+        evaluation = self.linter.config.evaluation
+        try:
+            note: int = eval(  # pylint: disable=eval-used
+                evaluation, {}, {**counts_dict, "statement": stats.statement or 1}
+            )
+        except Exception as ex:  # pylint: disable=broad-except
+            score: str | int = f"An exception occurred while rating: {ex}"
+        else:
+            score = round(note, 2)
+
+        return {
+            "messageTypeCount": counts_dict,
+            "modulesLinted": len(stats.by_module),
+            "score": score,
+        }
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_reporter(JSONReporter)
+    linter.register_reporter(JSON2Reporter)
diff --git a/pylint/reporters/multi_reporter.py b/pylint/reporters/multi_reporter.py
index 933044920..0c27293b7 100644
--- a/pylint/reporters/multi_reporter.py
+++ b/pylint/reporters/multi_reporter.py
@@ -1,11 +1,18 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 from collections.abc import Callable
 from copy import copy
 from typing import TYPE_CHECKING, TextIO
+
 from pylint.message import Message
 from pylint.reporters.base_reporter import BaseReporter
 from pylint.utils import LinterStats
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
     from pylint.reporters.ureports.nodes import Section
@@ -13,11 +20,22 @@ if TYPE_CHECKING:

 class MultiReporter:
     """Reports messages and layouts in plain text."""
-    name = '_internal_multi_reporter'
-    extension = ''

-    def __init__(self, sub_reporters: list[BaseReporter],
-        close_output_files: Callable[[], None], output: (TextIO | None)=None):
+    name = "_internal_multi_reporter"
+    # Note: do not register this reporter with linter.register_reporter as it is
+    #       not intended to be used directly like a regular reporter, but is
+    #       instead used to implement the
+    #       `--output-format=json:somefile.json,colorized`
+    #       multiple output formats feature
+
+    extension = ""
+
+    def __init__(
+        self,
+        sub_reporters: list[BaseReporter],
+        close_output_files: Callable[[], None],
+        output: TextIO | None = None,
+    ):
         self._sub_reporters = sub_reporters
         self.close_output_files = close_output_files
         self._path_strip_prefix = os.getcwd() + os.sep
@@ -25,40 +43,69 @@ class MultiReporter:
         self.out = output
         self.messages: list[Message] = []

+    @property
+    def out(self) -> TextIO | None:
+        return self.__out
+
     @out.setter
-    def out(self, output: (TextIO | None)=None) ->None:
+    def out(self, output: TextIO | None = None) -> None:
         """MultiReporter doesn't have its own output.

         This method is only provided for API parity with BaseReporter
         and should not be called with non-None values for 'output'.
         """
-        pass
+        self.__out = None
+        if output is not None:
+            raise NotImplementedError("MultiReporter does not support direct output.")

-    def __del__(self) ->None:
+    def __del__(self) -> None:
         self.close_output_files()

-    def handle_message(self, msg: Message) ->None:
+    @property
+    def path_strip_prefix(self) -> str:
+        return self._path_strip_prefix
+
+    @property
+    def linter(self) -> PyLinter | None:
+        return self._linter
+
+    @linter.setter
+    def linter(self, value: PyLinter) -> None:
+        self._linter = value
+        for rep in self._sub_reporters:
+            rep.linter = value
+
+    def handle_message(self, msg: Message) -> None:
         """Handle a new message triggered on the current file."""
-        pass
+        for rep in self._sub_reporters:
+            # We provide a copy so reporters can't modify message for others.
+            rep.handle_message(copy(msg))

-    def writeln(self, string: str='') ->None:
+    def writeln(self, string: str = "") -> None:
         """Write a line in the output buffer."""
-        pass
+        for rep in self._sub_reporters:
+            rep.writeln(string)

-    def display_reports(self, layout: Section) ->None:
+    def display_reports(self, layout: Section) -> None:
         """Display results encapsulated in the layout tree."""
-        pass
+        for rep in self._sub_reporters:
+            rep.display_reports(layout)

-    def display_messages(self, layout: (Section | None)) ->None:
+    def display_messages(self, layout: Section | None) -> None:
         """Hook for displaying the messages of the reporter."""
-        pass
+        for rep in self._sub_reporters:
+            rep.display_messages(layout)

-    def on_set_current_module(self, module: str, filepath: (str | None)
-        ) ->None:
+    def on_set_current_module(self, module: str, filepath: str | None) -> None:
         """Hook called when a module starts to be analysed."""
-        pass
+        for rep in self._sub_reporters:
+            rep.on_set_current_module(module, filepath)

-    def on_close(self, stats: LinterStats, previous_stats: (LinterStats | None)
-        ) ->None:
+    def on_close(
+        self,
+        stats: LinterStats,
+        previous_stats: LinterStats | None,
+    ) -> None:
         """Hook called when a module finished analyzing."""
-        pass
+        for rep in self._sub_reporters:
+            rep.on_close(stats, previous_stats)
diff --git a/pylint/reporters/reports_handler_mix_in.py b/pylint/reporters/reports_handler_mix_in.py
index c68428765..95d45ba91 100644
--- a/pylint/reporters/reports_handler_mix_in.py
+++ b/pylint/reporters/reports_handler_mix_in.py
@@ -1,16 +1,23 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import collections
 from collections.abc import MutableSequence
 from typing import TYPE_CHECKING, DefaultDict, List, Tuple
+
 from pylint.exceptions import EmptyReportError
 from pylint.reporters.ureports.nodes import Section
 from pylint.typing import ReportsCallable
 from pylint.utils import LinterStats
+
 if TYPE_CHECKING:
     from pylint.checkers import BaseChecker
     from pylint.lint.pylinter import PyLinter
-ReportsDict = DefaultDict['BaseChecker', List[Tuple[str, str, ReportsCallable]]
-    ]
+
+ReportsDict = DefaultDict["BaseChecker", List[Tuple[str, str, ReportsCallable]]]


 class ReportsHandlerMixIn:
@@ -18,16 +25,17 @@ class ReportsHandlerMixIn:
     related methods for the main lint class.
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._reports: ReportsDict = collections.defaultdict(list)
         self._reports_state: dict[str, bool] = {}

-    def report_order(self) ->MutableSequence[BaseChecker]:
+    def report_order(self) -> MutableSequence[BaseChecker]:
         """Return a list of reporters."""
-        pass
+        return list(self._reports)

-    def register_report(self, reportid: str, r_title: str, r_cb:
-        ReportsCallable, checker: BaseChecker) ->None:
+    def register_report(
+        self, reportid: str, r_title: str, r_cb: ReportsCallable, checker: BaseChecker
+    ) -> None:
         """Register a report.

         :param reportid: The unique identifier for the report
@@ -35,21 +43,39 @@ class ReportsHandlerMixIn:
         :param r_cb: The method to call to make the report
         :param checker: The checker defining the report
         """
-        pass
+        reportid = reportid.upper()
+        self._reports[checker].append((reportid, r_title, r_cb))

-    def enable_report(self, reportid: str) ->None:
+    def enable_report(self, reportid: str) -> None:
         """Enable the report of the given id."""
-        pass
+        reportid = reportid.upper()
+        self._reports_state[reportid] = True

-    def disable_report(self, reportid: str) ->None:
+    def disable_report(self, reportid: str) -> None:
         """Disable the report of the given id."""
-        pass
+        reportid = reportid.upper()
+        self._reports_state[reportid] = False

-    def report_is_enabled(self, reportid: str) ->bool:
+    def report_is_enabled(self, reportid: str) -> bool:
         """Is the report associated to the given identifier enabled ?"""
-        pass
+        return self._reports_state.get(reportid, True)

-    def make_reports(self: PyLinter, stats: LinterStats, old_stats: (
-        LinterStats | None)) ->Section:
+    def make_reports(  # type: ignore[misc] # ReportsHandlerMixIn is always mixed with PyLinter
+        self: PyLinter,
+        stats: LinterStats,
+        old_stats: LinterStats | None,
+    ) -> Section:
         """Render registered reports."""
-        pass
+        sect = Section("Report", f"{self.stats.statement} statements analysed.")
+        for checker in self.report_order():
+            for reportid, r_title, r_cb in self._reports[checker]:
+                if not self.report_is_enabled(reportid):
+                    continue
+                report_sect = Section(r_title)
+                try:
+                    r_cb(report_sect, stats, old_stats)
+                except EmptyReportError:
+                    continue
+                report_sect.report_id = reportid
+                sect.append(report_sect)
+        return sect
diff --git a/pylint/reporters/text.py b/pylint/reporters/text.py
index 913080584..0e3577199 100644
--- a/pylint/reporters/text.py
+++ b/pylint/reporters/text.py
@@ -1,18 +1,26 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Plain text reporters:.

 :text: the default one grouping messages by module
 :colorized: an ANSI colorized text reporter
 """
+
 from __future__ import annotations
+
 import os
 import re
 import sys
 import warnings
 from dataclasses import asdict, fields
 from typing import TYPE_CHECKING, Dict, NamedTuple, TextIO
+
 from pylint.message import Message
 from pylint.reporters import BaseReporter
 from pylint.reporters.ureports.text_writer import TextWriter
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
     from pylint.reporters.ureports.nodes import Section
@@ -20,6 +28,7 @@ if TYPE_CHECKING:

 class MessageStyle(NamedTuple):
     """Styling of a message."""
+
     color: str | None
     """The color name (see `ANSI_COLORS` for available values)
     or the color number when 256 colors are available.
@@ -27,74 +36,146 @@ class MessageStyle(NamedTuple):
     style: tuple[str, ...] = ()
     """Tuple of style strings (see `ANSI_COLORS` for available values)."""

-    def __get_ansi_code(self) ->str:
+    def __get_ansi_code(self) -> str:
         """Return ANSI escape code corresponding to color and style.

         :raise KeyError: if a nonexistent color or style identifier is given

         :return: the built escape code
         """
-        pass
+        ansi_code = [ANSI_STYLES[effect] for effect in self.style]
+        if self.color:
+            if self.color.isdigit():
+                ansi_code.extend(["38", "5"])
+                ansi_code.append(self.color)
+            else:
+                ansi_code.append(ANSI_COLORS[self.color])
+        if ansi_code:
+            return ANSI_PREFIX + ";".join(ansi_code) + ANSI_END
+        return ""
+
+    def _colorize_ansi(self, msg: str) -> str:
+        if self.color is None and len(self.style) == 0:
+            # If both color and style are not defined, then leave the text as is.
+            return msg
+        escape_code = self.__get_ansi_code()
+        # If invalid (or unknown) color, don't wrap msg with ANSI codes
+        if escape_code:
+            return f"{escape_code}{msg}{ANSI_RESET}"
+        return msg


 ColorMappingDict = Dict[str, MessageStyle]
-TITLE_UNDERLINES = ['', '=', '-', '.']
-ANSI_PREFIX = '\x1b['
-ANSI_END = 'm'
-ANSI_RESET = '\x1b[0m'
-ANSI_STYLES = {'reset': '0', 'bold': '1', 'italic': '3', 'underline': '4',
-    'blink': '5', 'inverse': '7', 'strike': '9'}
-ANSI_COLORS = {'reset': '0', 'black': '30', 'red': '31', 'green': '32',
-    'yellow': '33', 'blue': '34', 'magenta': '35', 'cyan': '36', 'white': '37'}
+
+TITLE_UNDERLINES = ["", "=", "-", "."]
+
+ANSI_PREFIX = "\033["
+ANSI_END = "m"
+ANSI_RESET = "\033[0m"
+ANSI_STYLES = {
+    "reset": "0",
+    "bold": "1",
+    "italic": "3",
+    "underline": "4",
+    "blink": "5",
+    "inverse": "7",
+    "strike": "9",
+}
+ANSI_COLORS = {
+    "reset": "0",
+    "black": "30",
+    "red": "31",
+    "green": "32",
+    "yellow": "33",
+    "blue": "34",
+    "magenta": "35",
+    "cyan": "36",
+    "white": "37",
+}
+
 MESSAGE_FIELDS = {i.name for i in fields(Message)}
 """All fields of the Message class."""


-def colorize_ansi(msg: str, msg_style: MessageStyle) ->str:
+def colorize_ansi(msg: str, msg_style: MessageStyle) -> str:
     """Colorize message by wrapping it with ANSI escape codes."""
-    pass
+    return msg_style._colorize_ansi(msg)
+
+
+def make_header(msg: Message) -> str:
+    return f"************* Module {msg.module}"


 class TextReporter(BaseReporter):
     """Reports messages and layouts in plain text."""
-    name = 'text'
-    extension = 'txt'
-    line_format = '{path}:{line}:{column}: {msg_id}: {msg} ({symbol})'

-    def __init__(self, output: (TextIO | None)=None) ->None:
+    name = "text"
+    extension = "txt"
+    line_format = "{path}:{line}:{column}: {msg_id}: {msg} ({symbol})"
+
+    def __init__(self, output: TextIO | None = None) -> None:
         super().__init__(output)
         self._modules: set[str] = set()
         self._template = self.line_format
         self._fixed_template = self.line_format
         """The output format template with any unrecognized arguments removed."""

-    def on_set_current_module(self, module: str, filepath: (str | None)
-        ) ->None:
+    def on_set_current_module(self, module: str, filepath: str | None) -> None:
         """Set the format template to be used and check for unrecognized arguments."""
-        pass
-
-    def write_message(self, msg: Message) ->None:
+        template = str(self.linter.config.msg_template or self._template)
+
+        # Return early if the template is the same as the previous one
+        if template == self._template:
+            return
+
+        # Set template to the currently selected template
+        self._template = template
+
+        # Check to see if all parameters in the template are attributes of the Message
+        arguments = re.findall(r"\{(\w+?)(:.*)?\}", template)
+        for argument in arguments:
+            if argument[0] not in MESSAGE_FIELDS:
+                warnings.warn(
+                    f"Don't recognize the argument '{argument[0]}' in the --msg-template. "
+                    "Are you sure it is supported on the current version of pylint?",
+                    stacklevel=2,
+                )
+                template = re.sub(r"\{" + argument[0] + r"(:.*?)?\}", "", template)
+        self._fixed_template = template
+
+    def write_message(self, msg: Message) -> None:
         """Convenience method to write a formatted message with class default
         template.
         """
-        pass
+        self_dict = asdict(msg)
+        for key in ("end_line", "end_column"):
+            self_dict[key] = self_dict[key] or ""
+
+        self.writeln(self._fixed_template.format(**self_dict))

-    def handle_message(self, msg: Message) ->None:
+    def handle_message(self, msg: Message) -> None:
         """Manage message of different type and in the context of path."""
-        pass
+        if msg.module not in self._modules:
+            self.writeln(make_header(msg))
+            self._modules.add(msg.module)
+        self.write_message(msg)

-    def _display(self, layout: Section) ->None:
+    def _display(self, layout: Section) -> None:
         """Launch layouts display."""
-        pass
+        print(file=self.out)
+        TextWriter().format(layout, self.out)


 class NoHeaderReporter(TextReporter):
     """Reports messages and layouts in plain text without a module header."""
-    name = 'no-header'

-    def handle_message(self, msg: Message) ->None:
+    name = "no-header"
+
+    def handle_message(self, msg: Message) -> None:
         """Write message(s) without module header."""
-        pass
+        if msg.module not in self._modules:
+            self._modules.add(msg.module)
+        self.write_message(msg)


 class ParseableTextReporter(TextReporter):
@@ -103,60 +184,106 @@ class ParseableTextReporter(TextReporter):

     <filename>:<linenum>:<msg>
     """
-    name = 'parseable'
-    line_format = '{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'

-    def __init__(self, output: (TextIO | None)=None) ->None:
+    name = "parseable"
+    line_format = "{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"
+
+    def __init__(self, output: TextIO | None = None) -> None:
         warnings.warn(
-            f'{self.name} output format is deprecated. This is equivalent to --msg-template={self.line_format}'
-            , DeprecationWarning, stacklevel=2)
+            f"{self.name} output format is deprecated. This is equivalent to --msg-template={self.line_format}",
+            DeprecationWarning,
+            stacklevel=2,
+        )
         super().__init__(output)


 class VSTextReporter(ParseableTextReporter):
     """Visual studio text reporter."""
-    name = 'msvs'
-    line_format = '{path}({line}): [{msg_id}({symbol}){obj}] {msg}'
+
+    name = "msvs"
+    line_format = "{path}({line}): [{msg_id}({symbol}){obj}] {msg}"


 class ColorizedTextReporter(TextReporter):
     """Simple TextReporter that colorizes text output."""
-    name = 'colorized'
-    COLOR_MAPPING: ColorMappingDict = {'I': MessageStyle('green'), 'C':
-        MessageStyle(None, ('bold',)), 'R': MessageStyle('magenta', ('bold',
-        'italic')), 'W': MessageStyle('magenta'), 'E': MessageStyle('red',
-        ('bold',)), 'F': MessageStyle('red', ('bold', 'underline')), 'S':
-        MessageStyle('yellow', ('inverse',))}
-
-    def __init__(self, output: (TextIO | None)=None, color_mapping: (
-        ColorMappingDict | None)=None) ->None:
+
+    name = "colorized"
+    COLOR_MAPPING: ColorMappingDict = {
+        "I": MessageStyle("green"),
+        "C": MessageStyle(None, ("bold",)),
+        "R": MessageStyle("magenta", ("bold", "italic")),
+        "W": MessageStyle("magenta"),
+        "E": MessageStyle("red", ("bold",)),
+        "F": MessageStyle("red", ("bold", "underline")),
+        "S": MessageStyle("yellow", ("inverse",)),  # S stands for module Separator
+    }
+
+    def __init__(
+        self,
+        output: TextIO | None = None,
+        color_mapping: ColorMappingDict | None = None,
+    ) -> None:
         super().__init__(output)
-        self.color_mapping = (color_mapping or ColorizedTextReporter.
-            COLOR_MAPPING)
-        ansi_terms = ['xterm-16color', 'xterm-256color']
-        if os.environ.get('TERM') not in ansi_terms:
-            if sys.platform == 'win32':
+        self.color_mapping = color_mapping or ColorizedTextReporter.COLOR_MAPPING
+        ansi_terms = ["xterm-16color", "xterm-256color"]
+        if os.environ.get("TERM") not in ansi_terms:
+            if sys.platform == "win32":
+                # pylint: disable=import-outside-toplevel
                 import colorama
+
                 self.out = colorama.AnsiToWin32(self.out)

-    def _get_decoration(self, msg_id: str) ->MessageStyle:
+    def _get_decoration(self, msg_id: str) -> MessageStyle:
         """Returns the message style as defined in self.color_mapping."""
-        pass
+        return self.color_mapping.get(msg_id[0]) or MessageStyle(None)

-    def handle_message(self, msg: Message) ->None:
+    def handle_message(self, msg: Message) -> None:
         """Manage message of different types, and colorize output
         using ANSI escape codes.
         """
-        pass
+        if msg.module not in self._modules:
+            msg_style = self._get_decoration("S")
+            modsep = colorize_ansi(make_header(msg), msg_style)
+            self.writeln(modsep)
+            self._modules.add(msg.module)
+        msg_style = self._get_decoration(msg.C)
+
+        msg.msg = colorize_ansi(msg.msg, msg_style)
+        msg.symbol = colorize_ansi(msg.symbol, msg_style)
+        msg.category = colorize_ansi(msg.category, msg_style)
+        msg.C = colorize_ansi(msg.C, msg_style)
+        self.write_message(msg)


 class GithubReporter(TextReporter):
     """Report messages in GitHub's special format to annotate code in its user
     interface.
     """
-    name = 'github'
-    line_format = (
-        '::{category} file={path},line={line},endline={end_line},col={column},title={msg_id}::{msg}'
-        )
-    category_map = {'F': 'error', 'E': 'error', 'W': 'warning', 'C':
-        'notice', 'R': 'notice', 'I': 'notice'}
+
+    name = "github"
+    line_format = "::{category} file={path},line={line},endline={end_line},col={column},title={msg_id}::{msg}"
+    category_map = {
+        "F": "error",
+        "E": "error",
+        "W": "warning",
+        "C": "notice",
+        "R": "notice",
+        "I": "notice",
+    }
+
+    def write_message(self, msg: Message) -> None:
+        self_dict = asdict(msg)
+        for key in ("end_line", "end_column"):
+            self_dict[key] = self_dict[key] or ""
+
+        self_dict["category"] = self.category_map.get(msg.C) or "error"
+        self.writeln(self._fixed_template.format(**self_dict))
+
+
+def register(linter: PyLinter) -> None:
+    linter.register_reporter(TextReporter)
+    linter.register_reporter(NoHeaderReporter)
+    linter.register_reporter(ParseableTextReporter)
+    linter.register_reporter(VSTextReporter)
+    linter.register_reporter(ColorizedTextReporter)
+    linter.register_reporter(GithubReporter)
diff --git a/pylint/reporters/ureports/base_writer.py b/pylint/reporters/ureports/base_writer.py
index 8b1244c33..9a12123cb 100644
--- a/pylint/reporters/ureports/base_writer.py
+++ b/pylint/reporters/ureports/base_writer.py
@@ -1,64 +1,107 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Universal report objects and some formatting drivers.

 A way to create simple reports using python objects, primarily designed to be
 formatted as text and html.
 """
+
 from __future__ import annotations
+
 import sys
 from collections.abc import Iterator
 from io import StringIO
 from typing import TYPE_CHECKING, TextIO
+
 if TYPE_CHECKING:
-    from pylint.reporters.ureports.nodes import BaseLayout, EvaluationSection, Paragraph, Section, Table
+    from pylint.reporters.ureports.nodes import (
+        BaseLayout,
+        EvaluationSection,
+        Paragraph,
+        Section,
+        Table,
+    )


 class BaseWriter:
     """Base class for ureport writers."""

-    def format(self, layout: BaseLayout, stream: TextIO=sys.stdout,
-        encoding: (str | None)=None) ->None:
+    def format(
+        self,
+        layout: BaseLayout,
+        stream: TextIO = sys.stdout,
+        encoding: str | None = None,
+    ) -> None:
         """Format and write the given layout into the stream object.

         unicode policy: unicode strings may be found in the layout;
         try to call 'stream.write' with it, but give it back encoded using
         the given encoding if it fails
         """
-        pass
+        if not encoding:
+            encoding = getattr(stream, "encoding", "UTF-8")
+        self.encoding = encoding or "UTF-8"
+        self.out = stream
+        self.begin_format()
+        layout.accept(self)
+        self.end_format()

-    def format_children(self, layout: (EvaluationSection | Paragraph | Section)
-        ) ->None:
+    def format_children(self, layout: EvaluationSection | Paragraph | Section) -> None:
         """Recurse on the layout children and call their accept method
         (see the Visitor pattern).
         """
-        pass
+        for child in getattr(layout, "children", ()):
+            child.accept(self)

-    def writeln(self, string: str='') ->None:
+    def writeln(self, string: str = "") -> None:
         """Write a line in the output buffer."""
-        pass
+        self.write(string + "\n")

-    def write(self, string: str) ->None:
+    def write(self, string: str) -> None:
         """Write a string in the output buffer."""
-        pass
+        self.out.write(string)

-    def begin_format(self) ->None:
+    def begin_format(self) -> None:
         """Begin to format a layout."""
-        pass
+        self.section = 0

-    def end_format(self) ->None:
+    def end_format(self) -> None:
         """Finished formatting a layout."""
-        pass

-    def get_table_content(self, table: Table) ->list[list[str]]:
+    def get_table_content(self, table: Table) -> list[list[str]]:
         """Trick to get table content without actually writing it.

         return an aligned list of lists containing table cells values as string
         """
-        pass
+        result: list[list[str]] = [[]]
+        cols = table.cols
+        for cell in self.compute_content(table):
+            if cols == 0:
+                result.append([])
+                cols = table.cols
+            cols -= 1
+            result[-1].append(cell)
+        # fill missing cells
+        result[-1] += [""] * (cols - len(result[-1]))
+        return result

-    def compute_content(self, layout: BaseLayout) ->Iterator[str]:
+    def compute_content(self, layout: BaseLayout) -> Iterator[str]:
         """Trick to compute the formatting of children layout before actually
         writing it.

         return an iterator on strings (one for each child element)
         """
-        pass
+        # Patch the underlying output stream with a fresh-generated stream,
+        # which is used to store a temporary representation of a child
+        # node.
+        out = self.out
+        try:
+            for child in layout.children:
+                stream = StringIO()
+                self.out = stream
+                child.accept(self)
+                yield stream.getvalue()
+        finally:
+            self.out = out
diff --git a/pylint/reporters/ureports/nodes.py b/pylint/reporters/ureports/nodes.py
index 9787ce5b8..59443996d 100644
--- a/pylint/reporters/ureports/nodes.py
+++ b/pylint/reporters/ureports/nodes.py
@@ -1,26 +1,45 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Micro reports objects.

 A micro report is a tree of layout and content objects.
 """
+
 from __future__ import annotations
+
 from collections.abc import Iterable, Iterator
 from typing import Any, Callable, TypeVar
+
 from pylint.reporters.ureports.base_writer import BaseWriter
-_T = TypeVar('_T')
-_VNodeT = TypeVar('_VNodeT', bound='VNode')
+
+_T = TypeVar("_T")
+_VNodeT = TypeVar("_VNodeT", bound="VNode")
 VisitLeaveFunction = Callable[[_T, Any, Any], None]


 class VNode:
-
-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self.parent: BaseLayout | None = None
         self.children: list[VNode] = []
         self.visitor_name: str = self.__class__.__name__.lower()

-    def __iter__(self) ->Iterator[VNode]:
+    def __iter__(self) -> Iterator[VNode]:
         return iter(self.children)

+    def accept(self: _VNodeT, visitor: BaseWriter, *args: Any, **kwargs: Any) -> None:
+        func: VisitLeaveFunction[_VNodeT] = getattr(
+            visitor, f"visit_{self.visitor_name}"
+        )
+        return func(self, *args, **kwargs)
+
+    def leave(self: _VNodeT, visitor: BaseWriter, *args: Any, **kwargs: Any) -> None:
+        func: VisitLeaveFunction[_VNodeT] = getattr(
+            visitor, f"leave_{self.visitor_name}"
+        )
+        return func(self, *args, **kwargs)
+

 class BaseLayout(VNode):
     """Base container node.
@@ -29,7 +48,7 @@ class BaseLayout(VNode):
     * children : components in this table (i.e. the table's cells)
     """

-    def __init__(self, children: Iterable[Text | str]=()) ->None:
+    def __init__(self, children: Iterable[Text | str] = ()) -> None:
         super().__init__()
         for child in children:
             if isinstance(child, VNode):
@@ -37,21 +56,30 @@ class BaseLayout(VNode):
             else:
                 self.add_text(child)

-    def append(self, child: VNode) ->None:
+    def append(self, child: VNode) -> None:
         """Add a node to children."""
-        pass
+        assert child not in self.parents()
+        self.children.append(child)
+        child.parent = self

-    def insert(self, index: int, child: VNode) ->None:
+    def insert(self, index: int, child: VNode) -> None:
         """Insert a child node."""
-        pass
+        self.children.insert(index, child)
+        child.parent = self

-    def parents(self) ->list[BaseLayout]:
+    def parents(self) -> list[BaseLayout]:
         """Return the ancestor nodes."""
-        pass
+        assert self.parent is not self
+        if self.parent is None:
+            return []
+        return [self.parent, *self.parent.parents()]

-    def add_text(self, text: str) ->None:
+    def add_text(self, text: str) -> None:
         """Shortcut to add text data."""
-        pass
+        self.children.append(Text(text))
+
+
+# non container nodes #########################################################


 class Text(VNode):
@@ -61,7 +89,7 @@ class Text(VNode):
     * data : the text value as an encoded or unicode string
     """

-    def __init__(self, data: str, escaped: bool=True) ->None:
+    def __init__(self, data: str, escaped: bool = True) -> None:
         super().__init__()
         self.escaped = escaped
         self.data = data
@@ -75,6 +103,9 @@ class VerbatimText(Text):
     """


+# container nodes #############################################################
+
+
 class Section(BaseLayout):
     """A section.

@@ -87,22 +118,25 @@ class Section(BaseLayout):
     as a first paragraph
     """

-    def __init__(self, title: (str | None)=None, description: (str | None)=
-        None, children: Iterable[Text | str]=()) ->None:
+    def __init__(
+        self,
+        title: str | None = None,
+        description: str | None = None,
+        children: Iterable[Text | str] = (),
+    ) -> None:
         super().__init__(children=children)
         if description:
             self.insert(0, Paragraph([Text(description)]))
         if title:
             self.insert(0, Title(children=(title,)))
-        self.report_id: str = ''
+        self.report_id: str = ""  # Used in ReportHandlerMixin.make_reports


 class EvaluationSection(Section):
-
-    def __init__(self, message: str, children: Iterable[Text | str]=()) ->None:
+    def __init__(self, message: str, children: Iterable[Text | str] = ()) -> None:
         super().__init__(children=children)
         title = Paragraph()
-        title.append(Text('-' * len(message)))
+        title.append(Text("-" * len(message)))
         self.append(title)
         message_body = Paragraph()
         message_body.append(Text(message))
@@ -140,8 +174,14 @@ class Table(BaseLayout):
     * title : the table's optional title
     """

-    def __init__(self, cols: int, title: (str | None)=None, rheaders: int=0,
-        cheaders: int=0, children: Iterable[Text | str]=()) ->None:
+    def __init__(
+        self,
+        cols: int,
+        title: str | None = None,
+        rheaders: int = 0,
+        cheaders: int = 0,
+        children: Iterable[Text | str] = (),
+    ) -> None:
         super().__init__(children=children)
         assert isinstance(cols, int)
         self.cols = cols
diff --git a/pylint/reporters/ureports/text_writer.py b/pylint/reporters/ureports/text_writer.py
index a827d6c5f..5dd6a5d08 100644
--- a/pylint/reporters/ureports/text_writer.py
+++ b/pylint/reporters/ureports/text_writer.py
@@ -1,11 +1,28 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Text formatting drivers for ureports."""
+
 from __future__ import annotations
+
 from typing import TYPE_CHECKING
+
 from pylint.reporters.ureports.base_writer import BaseWriter
+
 if TYPE_CHECKING:
-    from pylint.reporters.ureports.nodes import EvaluationSection, Paragraph, Section, Table, Text, Title, VerbatimText
-TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
-BULLETS = ['*', '-']
+    from pylint.reporters.ureports.nodes import (
+        EvaluationSection,
+        Paragraph,
+        Section,
+        Table,
+        Text,
+        Title,
+        VerbatimText,
+    )
+
+TITLE_UNDERLINES = ["", "=", "-", "`", ".", "~", "^"]
+BULLETS = ["*", "-"]


 class TextWriter(BaseWriter):
@@ -13,35 +30,79 @@ class TextWriter(BaseWriter):
     (ReStructured inspiration but not totally handled yet).
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         super().__init__()
         self.list_level = 0

-    def visit_section(self, layout: Section) ->None:
+    def visit_section(self, layout: Section) -> None:
         """Display a section as text."""
-        pass
+        self.section += 1
+        self.writeln()
+        self.format_children(layout)
+        self.section -= 1
+        self.writeln()

-    def visit_evaluationsection(self, layout: EvaluationSection) ->None:
+    def visit_evaluationsection(self, layout: EvaluationSection) -> None:
         """Display an evaluation section as a text."""
-        pass
+        self.section += 1
+        self.format_children(layout)
+        self.section -= 1
+        self.writeln()
+
+    def visit_title(self, layout: Title) -> None:
+        title = "".join(list(self.compute_content(layout)))
+        self.writeln(title)
+        try:
+            self.writeln(TITLE_UNDERLINES[self.section] * len(title))
+        except IndexError:
+            print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT")

-    def visit_paragraph(self, layout: Paragraph) ->None:
+    def visit_paragraph(self, layout: Paragraph) -> None:
         """Enter a paragraph."""
-        pass
+        self.format_children(layout)
+        self.writeln()

-    def visit_table(self, layout: Table) ->None:
+    def visit_table(self, layout: Table) -> None:
         """Display a table as text."""
-        pass
+        table_content = self.get_table_content(layout)
+        # get columns width
+        cols_width = [0] * len(table_content[0])
+        for row in table_content:
+            for index, col in enumerate(row):
+                cols_width[index] = max(cols_width[index], len(col))
+        self.default_table(layout, table_content, cols_width)
+        self.writeln()

-    def default_table(self, layout: Table, table_content: list[list[str]],
-        cols_width: list[int]) ->None:
+    def default_table(
+        self, layout: Table, table_content: list[list[str]], cols_width: list[int]
+    ) -> None:
         """Format a table."""
-        pass
+        cols_width = [size + 1 for size in cols_width]
+        format_strings = " ".join(["%%-%ss"] * len(cols_width))
+        format_strings %= tuple(cols_width)
+
+        table_linesep = "\n+" + "+".join("-" * w for w in cols_width) + "+\n"
+        headsep = "\n+" + "+".join("=" * w for w in cols_width) + "+\n"
+
+        self.write(table_linesep)
+        split_strings = format_strings.split(" ")
+        for index, line in enumerate(table_content):
+            self.write("|")
+            for line_index, at_index in enumerate(line):
+                self.write(split_strings[line_index] % at_index)
+                self.write("|")
+            if index == 0 and layout.rheaders:
+                self.write(headsep)
+            else:
+                self.write(table_linesep)

-    def visit_verbatimtext(self, layout: VerbatimText) ->None:
+    def visit_verbatimtext(self, layout: VerbatimText) -> None:
         """Display a verbatim layout as text (so difficult ;)."""
-        pass
+        self.writeln("::\n")
+        for line in layout.data.splitlines():
+            self.writeln("    " + line)
+        self.writeln()

-    def visit_text(self, layout: Text) ->None:
+    def visit_text(self, layout: Text) -> None:
         """Add some text."""
-        pass
+        self.write(f"{layout.data}")
diff --git a/pylint/testutils/_primer/package_to_lint.py b/pylint/testutils/_primer/package_to_lint.py
index ff547e409..fb65a90ad 100644
--- a/pylint/testutils/_primer/package_to_lint.py
+++ b/pylint/testutils/_primer/package_to_lint.py
@@ -1,50 +1,70 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import logging
 from pathlib import Path
 from typing import Literal
+
 from git import GitCommandError
 from git.cmd import Git
 from git.repo import Repo
-PRIMER_DIRECTORY_PATH = Path('tests') / '.pylint_primer_tests'
+
+PRIMER_DIRECTORY_PATH = Path("tests") / ".pylint_primer_tests"


 class DirtyPrimerDirectoryException(Exception):
     """We can't pull if there's local changes."""

-    def __init__(self, path: (Path | str)):
+    def __init__(self, path: Path | str):
         super().__init__(
-            f"""
+            rf"""

-/!\\ Can't pull /!\\
+/!\ Can't pull /!\

 In order for the prepare command to be able to pull please cleanup your local repo:
 cd {path}
 git diff
 """
-            )
+        )


 class PackageToLint:
     """Represents data about a package to be tested during primer tests."""
+
     url: str
     """URL of the repository to clone."""
+
     branch: str
     """Branch of the repository to clone."""
+
     directories: list[str]
     """Directories within the repository to run pylint over."""
+
     commit: str | None
     """Commit hash to pin the repository on."""
+
     pylint_additional_args: list[str]
     """Arguments to give to pylint."""
+
     pylintrc_relpath: str | None
     """Path relative to project's main directory to the pylintrc if it exists."""
+
     minimum_python: str | None
     """Minimum python version supported by the package."""

-    def __init__(self, url: str, branch: str, directories: list[str],
-        commit: (str | None)=None, pylint_additional_args: (list[str] |
-        None)=None, pylintrc_relpath: (str | None)=None, minimum_python: (
-        str | None)=None) ->None:
+    def __init__(
+        self,
+        url: str,
+        branch: str,
+        directories: list[str],
+        commit: str | None = None,
+        pylint_additional_args: list[str] | None = None,
+        pylintrc_relpath: str | None = None,
+        minimum_python: str | None = None,
+    ) -> None:
         self.url = url
         self.branch = branch
         self.directories = directories
@@ -54,16 +74,31 @@ class PackageToLint:
         self.minimum_python = minimum_python

     @property
-    def clone_directory(self) ->Path:
+    def pylintrc(self) -> Path | Literal[""]:
+        if self.pylintrc_relpath is None:
+            # Fall back to "" to ensure pylint's own pylintrc is not discovered
+            return ""
+        return self.clone_directory / self.pylintrc_relpath
+
+    @property
+    def clone_directory(self) -> Path:
         """Directory to clone repository into."""
-        pass
+        clone_name = "/".join(self.url.split("/")[-2:]).replace(".git", "")
+        return PRIMER_DIRECTORY_PATH / clone_name

     @property
-    def paths_to_lint(self) ->list[str]:
+    def paths_to_lint(self) -> list[str]:
         """The paths we need to lint."""
-        pass
+        return [str(self.clone_directory / path) for path in self.directories]

-    def lazy_clone(self) ->str:
+    @property
+    def pylint_args(self) -> list[str]:
+        options: list[str] = []
+        # There is an error if rcfile is given but does not exist
+        options += [f"--rcfile={self.pylintrc}"]
+        return self.paths_to_lint + options + self.pylint_additional_args
+
+    def lazy_clone(self) -> str:  # pragma: no cover
         """Concatenates the target directory and clones the file.

         Not expected to be tested as the primer won't work if it doesn't.
@@ -72,4 +107,43 @@ class PackageToLint:
         we'll probably notice because we'll have a fatal when launching the
         primer locally.
         """
-        pass
+        logging.info("Lazy cloning %s", self.url)
+        if not self.clone_directory.exists():
+            return self._clone_repository()
+        return self._pull_repository()
+
+    def _clone_repository(self) -> str:
+        options: dict[str, str | int] = {
+            "url": self.url,
+            "to_path": str(self.clone_directory),
+            "branch": self.branch,
+            "depth": 1,
+        }
+        logging.info("Directory does not exists, cloning: %s", options)
+        repo = Repo.clone_from(
+            url=self.url, to_path=self.clone_directory, branch=self.branch, depth=1
+        )
+        return str(repo.head.object.hexsha)
+
+    def _pull_repository(self) -> str:
+        remote_sha1_commit = Git().ls_remote(self.url, self.branch).split("\t")[0]
+        local_sha1_commit = Repo(self.clone_directory).head.object.hexsha
+        if remote_sha1_commit != local_sha1_commit:
+            logging.info(
+                "Remote sha is '%s' while local sha is '%s': pulling new commits",
+                remote_sha1_commit,
+                local_sha1_commit,
+            )
+            try:
+                repo = Repo(self.clone_directory)
+                if repo.is_dirty():
+                    raise DirtyPrimerDirectoryException(self.clone_directory)
+                origin = repo.remotes.origin
+                origin.pull()
+            except GitCommandError as e:
+                raise SystemError(
+                    f"Failed to clone repository for {self.clone_directory}"
+                ) from e
+        else:
+            logging.info("Repository already up to date.")
+        return str(remote_sha1_commit)
diff --git a/pylint/testutils/_primer/primer.py b/pylint/testutils/_primer/primer.py
index feaf47462..87c37d3fe 100644
--- a/pylint/testutils/_primer/primer.py
+++ b/pylint/testutils/_primer/primer.py
@@ -1,8 +1,14 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import argparse
 import json
 import sys
 from pathlib import Path
+
 from pylint.testutils._primer import PackageToLint
 from pylint.testutils._primer.primer_command import PrimerCommand
 from pylint.testutils._primer.primer_compare_command import CompareCommand
@@ -13,47 +19,110 @@ from pylint.testutils._primer.primer_run_command import RunCommand
 class Primer:
     """Main class to handle priming of packages."""

-    def __init__(self, primer_directory: Path, json_path: Path) ->None:
+    def __init__(self, primer_directory: Path, json_path: Path) -> None:
+        # Preparing arguments
         self.primer_directory = primer_directory
-        self._argument_parser = argparse.ArgumentParser(prog='Pylint Primer')
-        self._subparsers = self._argument_parser.add_subparsers(dest=
-            'command', required=True)
-        prepare_parser = self._subparsers.add_parser('prepare')
-        prepare_parser.add_argument('--clone', help='Clone all packages.',
-            action='store_true', default=False)
-        prepare_parser.add_argument('--check', help=
-            'Check consistencies and commits of all packages.', action=
-            'store_true', default=False)
-        prepare_parser.add_argument('--make-commit-string', help=
-            'Get latest commit string.', action='store_true', default=False)
-        prepare_parser.add_argument('--read-commit-string', help=
-            'Print latest commit string.', action='store_true', default=False)
-        run_parser = self._subparsers.add_parser('run')
-        run_parser.add_argument('--type', choices=['main', 'pr'], required=
-            True, help='Type of primer run.')
-        run_parser.add_argument('--batches', required=False, type=int, help
-            ='Number of batches')
-        run_parser.add_argument('--batchIdx', required=False, type=int,
-            help='Portion of primer packages to run.')
-        compare_parser = self._subparsers.add_parser('compare')
-        compare_parser.add_argument('--base-file', required=True, help=
-            'Location of output file of the base run.')
-        compare_parser.add_argument('--new-file', required=True, help=
-            'Location of output file of the new run.')
-        compare_parser.add_argument('--commit', required=True, help=
-            'Commit hash of the PR commit being checked.')
-        compare_parser.add_argument('--batches', required=False, type=int,
-            help=
-            'Number of batches (filepaths with the placeholder BATCHIDX will be numbered)'
-            )
+        self._argument_parser = argparse.ArgumentParser(prog="Pylint Primer")
+        self._subparsers = self._argument_parser.add_subparsers(
+            dest="command", required=True
+        )
+
+        # All arguments for the prepare parser
+        prepare_parser = self._subparsers.add_parser("prepare")
+        prepare_parser.add_argument(
+            "--clone", help="Clone all packages.", action="store_true", default=False
+        )
+        prepare_parser.add_argument(
+            "--check",
+            help="Check consistencies and commits of all packages.",
+            action="store_true",
+            default=False,
+        )
+        prepare_parser.add_argument(
+            "--make-commit-string",
+            help="Get latest commit string.",
+            action="store_true",
+            default=False,
+        )
+        prepare_parser.add_argument(
+            "--read-commit-string",
+            help="Print latest commit string.",
+            action="store_true",
+            default=False,
+        )
+
+        # All arguments for the run parser
+        run_parser = self._subparsers.add_parser("run")
+        run_parser.add_argument(
+            "--type", choices=["main", "pr"], required=True, help="Type of primer run."
+        )
+        run_parser.add_argument(
+            "--batches",
+            required=False,
+            type=int,
+            help="Number of batches",
+        )
+        run_parser.add_argument(
+            "--batchIdx",
+            required=False,
+            type=int,
+            help="Portion of primer packages to run.",
+        )
+
+        # All arguments for the compare parser
+        compare_parser = self._subparsers.add_parser("compare")
+        compare_parser.add_argument(
+            "--base-file",
+            required=True,
+            help="Location of output file of the base run.",
+        )
+        compare_parser.add_argument(
+            "--new-file",
+            required=True,
+            help="Location of output file of the new run.",
+        )
+        compare_parser.add_argument(
+            "--commit",
+            required=True,
+            help="Commit hash of the PR commit being checked.",
+        )
+        compare_parser.add_argument(
+            "--batches",
+            required=False,
+            type=int,
+            help="Number of batches (filepaths with the placeholder BATCHIDX will be numbered)",
+        )
+
+        # Storing arguments
         self.config = self._argument_parser.parse_args()
+
         self.packages = self._get_packages_to_lint_from_json(json_path)
         """All packages to prime."""
-        if self.config.command == 'prepare':
+
+        if self.config.command == "prepare":
             command_class: type[PrimerCommand] = PrepareCommand
-        elif self.config.command == 'run':
+        elif self.config.command == "run":
             command_class = RunCommand
-        elif self.config.command == 'compare':
+        elif self.config.command == "compare":
             command_class = CompareCommand
-        self.command = command_class(self.primer_directory, self.packages,
-            self.config)
+        self.command = command_class(self.primer_directory, self.packages, self.config)
+
+    def run(self) -> None:
+        self.command.run()
+
+    @staticmethod
+    def _minimum_python_supported(package_data: dict[str, str]) -> bool:
+        min_python_str = package_data.get("minimum_python", None)
+        if not min_python_str:
+            return True
+        min_python_tuple = tuple(int(n) for n in min_python_str.split("."))
+        return min_python_tuple <= sys.version_info[:2]
+
+    @staticmethod
+    def _get_packages_to_lint_from_json(json_path: Path) -> dict[str, PackageToLint]:
+        with open(json_path, encoding="utf8") as f:
+            return {
+                name: PackageToLint(**package_data)
+                for name, package_data in json.load(f).items()
+                if Primer._minimum_python_supported(package_data)
+            }
diff --git a/pylint/testutils/_primer/primer_command.py b/pylint/testutils/_primer/primer_command.py
index 9c5537249..817c1a0d3 100644
--- a/pylint/testutils/_primer/primer_command.py
+++ b/pylint/testutils/_primer/primer_command.py
@@ -1,8 +1,14 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import abc
 import argparse
 from pathlib import Path
 from typing import Dict, TypedDict
+
 from pylint.reporters.json_reporter import OldJsonExport
 from pylint.testutils._primer import PackageToLint

@@ -18,8 +24,16 @@ PackageMessages = Dict[str, PackageData]
 class PrimerCommand:
     """Generic primer action with required arguments."""

-    def __init__(self, primer_directory: Path, packages: dict[str,
-        PackageToLint], config: argparse.Namespace) ->None:
+    def __init__(
+        self,
+        primer_directory: Path,
+        packages: dict[str, PackageToLint],
+        config: argparse.Namespace,
+    ) -> None:
         self.primer_directory = primer_directory
         self.packages = packages
         self.config = config
+
+    @abc.abstractmethod
+    def run(self) -> None:
+        pass
diff --git a/pylint/testutils/_primer/primer_compare_command.py b/pylint/testutils/_primer/primer_compare_command.py
index f71c9c3a6..7b245b9e1 100644
--- a/pylint/testutils/_primer/primer_compare_command.py
+++ b/pylint/testutils/_primer/primer_compare_command.py
@@ -1,13 +1,174 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
 from __future__ import annotations
+
 import json
 from pathlib import Path, PurePosixPath
+
 from pylint.reporters.json_reporter import OldJsonExport
-from pylint.testutils._primer.primer_command import PackageData, PackageMessages, PrimerCommand
+from pylint.testutils._primer.primer_command import (
+    PackageData,
+    PackageMessages,
+    PrimerCommand,
+)
+
 MAX_GITHUB_COMMENT_LENGTH = 65536


 class CompareCommand(PrimerCommand):
+    def run(self) -> None:
+        if self.config.batches is None:
+            main_data = self._load_json(self.config.base_file)
+            pr_data = self._load_json(self.config.new_file)
+        else:
+            main_data = {}
+            pr_data = {}
+            for idx in range(self.config.batches):
+                main_data.update(
+                    self._load_json(
+                        self.config.base_file.replace("BATCHIDX", "batch" + str(idx))
+                    )
+                )
+                pr_data.update(
+                    self._load_json(
+                        self.config.new_file.replace("BATCHIDX", "batch" + str(idx))
+                    )
+                )
+
+        missing_messages_data, new_messages_data = self._cross_reference(
+            main_data, pr_data
+        )
+        comment = self._create_comment(missing_messages_data, new_messages_data)
+        with open(self.primer_directory / "comment.txt", "w", encoding="utf-8") as f:
+            f.write(comment)
+
+    @staticmethod
+    def _cross_reference(
+        main_data: PackageMessages, pr_data: PackageMessages
+    ) -> tuple[PackageMessages, PackageMessages]:
+        missing_messages_data: PackageMessages = {}
+        for package, data in main_data.items():
+            package_missing_messages: list[OldJsonExport] = []
+            for message in data["messages"]:
+                try:
+                    pr_data[package]["messages"].remove(message)
+                except ValueError:
+                    package_missing_messages.append(message)
+            missing_messages_data[package] = PackageData(
+                commit=pr_data[package]["commit"], messages=package_missing_messages
+            )
+        return missing_messages_data, pr_data
+
+    @staticmethod
+    def _load_json(file_path: Path | str) -> PackageMessages:
+        with open(file_path, encoding="utf-8") as f:
+            result: PackageMessages = json.load(f)
+        return result
+
+    def _create_comment(
+        self, all_missing_messages: PackageMessages, all_new_messages: PackageMessages
+    ) -> str:
+        comment = ""
+        for package, missing_messages in all_missing_messages.items():
+            if len(comment) >= MAX_GITHUB_COMMENT_LENGTH:
+                break
+            new_messages = all_new_messages[package]
+            if not missing_messages["messages"] and not new_messages["messages"]:
+                continue
+            comment += self._create_comment_for_package(
+                package, new_messages, missing_messages
+            )
+        comment = (
+            f"🤖 **Effect of this PR on checked open source code:** 🤖\n\n{comment}"
+            if comment
+            else (
+                "🤖 According to the primer, this change has **no effect** on the"
+                " checked open source code. 🤖🎉\n\n"
+            )
+        )
+        return self._truncate_comment(comment)
+
+    def _create_comment_for_package(
+        self, package: str, new_messages: PackageData, missing_messages: PackageData
+    ) -> str:
+        comment = f"\n\n**Effect on [{package}]({self.packages[package].url}):**\n"
+        # Create comment for new messages
+        count = 1
+        astroid_errors = 0
+        new_non_astroid_messages = ""
+        if new_messages["messages"]:
+            print("Now emitted:")
+        for message in new_messages["messages"]:
+            filepath = str(
+                PurePosixPath(message["path"]).relative_to(
+                    self.packages[package].clone_directory
+                )
+            )
+            # Existing astroid errors may still show up as "new" because the timestamp
+            # in the message is slightly different.
+            if message["symbol"] == "astroid-error":
+                astroid_errors += 1
+            else:
+                new_non_astroid_messages += (
+                    f"{count}) {message['symbol']}:\n*{message['message']}*\n"
+                    f"{self.packages[package].url}/blob/{new_messages['commit']}/{filepath}#L{message['line']}\n"
+                )
+                print(message)
+                count += 1
+
+        if astroid_errors:
+            comment += (
+                f'{astroid_errors} "astroid error(s)" were found. '
+                "Please open the GitHub Actions log to see what failed or crashed.\n\n"
+            )
+        if new_non_astroid_messages:
+            comment += (
+                "The following messages are now emitted:\n\n<details>\n\n"
+                + new_non_astroid_messages
+                + "\n</details>\n\n"
+            )
+
+        # Create comment for missing messages
+        count = 1
+        if missing_messages["messages"]:
+            comment += "The following messages are no longer emitted:\n\n<details>\n\n"
+            print("No longer emitted:")
+        for message in missing_messages["messages"]:
+            comment += f"{count}) {message['symbol']}:\n*{message['message']}*\n"
+            filepath = str(
+                PurePosixPath(message["path"]).relative_to(
+                    self.packages[package].clone_directory
+                )
+            )
+            assert not self.packages[package].url.endswith(
+                ".git"
+            ), "You don't need the .git at the end of the github url."
+            comment += (
+                f"{self.packages[package].url}"
+                f"/blob/{new_messages['commit']}/{filepath}#L{message['line']}\n"
+            )
+            count += 1
+            print(message)
+        if missing_messages:
+            comment += "\n</details>\n\n"
+        return comment

-    def _truncate_comment(self, comment: str) ->str:
+    def _truncate_comment(self, comment: str) -> str:
         """GitHub allows only a set number of characters in a comment."""
-        pass
+        hash_information = (
+            f"*This comment was generated for commit {self.config.commit}*"
+        )
+        if len(comment) + len(hash_information) >= MAX_GITHUB_COMMENT_LENGTH:
+            truncation_information = (
+                f"*This comment was truncated because GitHub allows only"
+                f" {MAX_GITHUB_COMMENT_LENGTH} characters in a comment.*"
+            )
+            max_len = (
+                MAX_GITHUB_COMMENT_LENGTH
+                - len(hash_information)
+                - len(truncation_information)
+            )
+            comment = f"{comment[:max_len - 10]}...\n\n{truncation_information}\n\n"
+        comment += hash_information
+        return comment
diff --git a/pylint/testutils/_primer/primer_prepare_command.py b/pylint/testutils/_primer/primer_prepare_command.py
index 6ceb3a886..27e216bd5 100644
--- a/pylint/testutils/_primer/primer_prepare_command.py
+++ b/pylint/testutils/_primer/primer_prepare_command.py
@@ -1,9 +1,48 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
 from __future__ import annotations
+
 import sys
+
 from git.cmd import Git
 from git.repo import Repo
+
 from pylint.testutils._primer.primer_command import PrimerCommand


 class PrepareCommand(PrimerCommand):
-    pass
+    def run(self) -> None:
+        commit_string = ""
+        version_string = ".".join(str(x) for x in sys.version_info[:2])
+        # Shorten the SHA to avoid exceeding GitHub's 512 char ceiling
+        if self.config.clone:
+            for package, data in self.packages.items():
+                local_commit = data.lazy_clone()
+                print(f"Cloned '{package}' at commit '{local_commit}'.")
+                commit_string += local_commit[:8] + "_"
+        elif self.config.check:
+            for package, data in self.packages.items():
+                local_commit = Repo(data.clone_directory).head.object.hexsha
+                print(f"Found '{package}' at commit '{local_commit}'.")
+                commit_string += local_commit[:8] + "_"
+        elif self.config.make_commit_string:
+            for package, data in self.packages.items():
+                remote_sha1_commit = (
+                    Git().ls_remote(data.url, data.branch).split("\t")[0][:8]
+                )
+                print(f"'{package}' remote is at commit '{remote_sha1_commit}'.")
+                commit_string += remote_sha1_commit + "_"
+        elif self.config.read_commit_string:
+            with open(
+                self.primer_directory / f"commit_string_{version_string}.txt",
+                encoding="utf-8",
+            ) as f:
+                print(f.read())
+        if commit_string:
+            with open(
+                self.primer_directory / f"commit_string_{version_string}.txt",
+                "w",
+                encoding="utf-8",
+            ) as f:
+                f.write(commit_string)
diff --git a/pylint/testutils/_primer/primer_run_command.py b/pylint/testutils/_primer/primer_run_command.py
index 9a5876619..96a1440e7 100644
--- a/pylint/testutils/_primer/primer_run_command.py
+++ b/pylint/testutils/_primer/primer_run_command.py
@@ -1,21 +1,109 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import json
 import sys
 import warnings
 from io import StringIO
+
 from git.repo import Repo
+
 from pylint.lint import Run
 from pylint.message import Message
 from pylint.reporters.json_reporter import JSONReporter, OldJsonExport
 from pylint.testutils._primer.package_to_lint import PackageToLint
-from pylint.testutils._primer.primer_command import PackageData, PackageMessages, PrimerCommand
-GITHUB_CRASH_TEMPLATE_LOCATION = '/home/runner/.cache'
-CRASH_TEMPLATE_INTRO = 'There is a pre-filled template'
+from pylint.testutils._primer.primer_command import (
+    PackageData,
+    PackageMessages,
+    PrimerCommand,
+)
+
+GITHUB_CRASH_TEMPLATE_LOCATION = "/home/runner/.cache"
+CRASH_TEMPLATE_INTRO = "There is a pre-filled template"


 class RunCommand(PrimerCommand):
+    def run(self) -> None:
+        packages: PackageMessages = {}
+        fatal_msgs: list[Message] = []
+        package_data_iter = (
+            self.packages.items()
+            if self.config.batches is None
+            else list(self.packages.items())[
+                self.config.batchIdx :: self.config.batches
+            ]
+        )
+        for package, data in package_data_iter:
+            messages, p_fatal_msgs = self._lint_package(package, data)
+            fatal_msgs += p_fatal_msgs
+            local_commit = Repo(data.clone_directory).head.object.hexsha
+            packages[package] = PackageData(commit=local_commit, messages=messages)
+        path = self.primer_directory / (
+            f"output_{'.'.join(str(i) for i in sys.version_info[:2])}_{self.config.type}"
+            + (f"_batch{self.config.batchIdx}.txt" if self.config.batches else "")
+        )
+        print(f"Writing result in {path}")
+        with open(path, "w", encoding="utf-8") as f:
+            json.dump(packages, f)
+        # Assert that a PR run does not introduce new fatal errors
+        if self.config.type == "pr":
+            plural = "s" if len(fatal_msgs) > 1 else ""
+            assert (
+                not fatal_msgs
+            ), f"We encountered {len(fatal_msgs)} fatal error message{plural} (see log)."

     @staticmethod
-    def _filter_fatal_errors(messages: list[OldJsonExport]) ->list[Message]:
+    def _filter_fatal_errors(
+        messages: list[OldJsonExport],
+    ) -> list[Message]:
         """Separate fatal errors so we can report them independently."""
-        pass
+        fatal_msgs: list[Message] = []
+        for raw_message in messages:
+            message = JSONReporter.deserialize(raw_message)
+            if message.category == "fatal":
+                if GITHUB_CRASH_TEMPLATE_LOCATION in message.msg:
+                    # Remove the crash template location if we're running on GitHub.
+                    # We were falsely getting "new" errors when the timestamp changed.
+                    message.msg = message.msg.rsplit(CRASH_TEMPLATE_INTRO)[0]
+                fatal_msgs.append(message)
+        return fatal_msgs
+
+    @staticmethod
+    def _print_msgs(msgs: list[Message]) -> str:
+        return "\n".join(f"- {JSONReporter.serialize(m)}" for m in msgs)
+
+    def _lint_package(
+        self, package_name: str, data: PackageToLint
+    ) -> tuple[list[OldJsonExport], list[Message]]:
+        # We want to test all the code we can
+        enables = ["--enable-all-extensions", "--enable=all"]
+        # Duplicate code takes too long and is relatively safe
+        # TODO: Find a way to allow cyclic-import and compare output correctly
+        disables = ["--disable=duplicate-code,cyclic-import"]
+        additional = ["--clear-cache-post-run=y"]
+        arguments = data.pylint_args + enables + disables + additional
+        output = StringIO()
+        reporter = JSONReporter(output)
+        print(f"Running 'pylint {', '.join(arguments)}'")
+        pylint_exit_code = -1
+        try:
+            Run(arguments, reporter=reporter)
+        except SystemExit as e:
+            pylint_exit_code = int(e.code)  # type: ignore[arg-type]
+        readable_messages: str = output.getvalue()
+        messages: list[OldJsonExport] = json.loads(readable_messages)
+        fatal_msgs: list[Message] = []
+        if pylint_exit_code % 2 == 0:
+            print(f"Successfully primed {package_name}.")
+        else:
+            fatal_msgs = self._filter_fatal_errors(messages)
+            if fatal_msgs:
+                warnings.warn(
+                    f"Encountered fatal errors while priming {package_name} !\n"
+                    f"{self._print_msgs(fatal_msgs)}\n\n",
+                    stacklevel=2,
+                )
+        return messages, fatal_msgs
diff --git a/pylint/testutils/_run.py b/pylint/testutils/_run.py
index 8752caca2..f42cb8d6a 100644
--- a/pylint/testutils/_run.py
+++ b/pylint/testutils/_run.py
@@ -1,17 +1,26 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Classes and functions used to mimic normal pylint runs.

 This module is considered private and can change at any time.
 """
+
 from __future__ import annotations
+
 from collections.abc import Sequence
+
 from pylint.lint import Run as LintRun
 from pylint.reporters.base_reporter import BaseReporter
 from pylint.testutils.lint_module_test import PYLINTRC


-def _add_rcfile_default_pylintrc(args: list[str]) ->list[str]:
+def _add_rcfile_default_pylintrc(args: list[str]) -> list[str]:
     """Add a default pylintrc with the rcfile option in a list of pylint args."""
-    pass
+    if not any("--rcfile" in arg for arg in args):
+        args.insert(0, f"--rcfile={PYLINTRC}")
+    return args


 class _Run(LintRun):
@@ -22,7 +31,11 @@ class _Run(LintRun):
     But we want to see if the changes to the default break tests.
     """

-    def __init__(self, args: Sequence[str], reporter: (BaseReporter | None)
-        =None, exit: bool=True) ->None:
+    def __init__(
+        self,
+        args: Sequence[str],
+        reporter: BaseReporter | None = None,
+        exit: bool = True,  # pylint: disable=redefined-builtin
+    ) -> None:
         args = _add_rcfile_default_pylintrc(list(args))
         super().__init__(args, reporter, exit)
diff --git a/pylint/testutils/checker_test_case.py b/pylint/testutils/checker_test_case.py
index fa4a782ae..3ffbbc44a 100644
--- a/pylint/testutils/checker_test_case.py
+++ b/pylint/testutils/checker_test_case.py
@@ -1,8 +1,15 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import contextlib
 from collections.abc import Generator, Iterator
 from typing import Any
+
 from astroid import nodes
+
 from pylint.constants import IS_PYPY, PY39_PLUS
 from pylint.testutils.global_test_linter import linter
 from pylint.testutils.output_line import MessageTest
@@ -12,17 +19,29 @@ from pylint.utils import ASTWalker

 class CheckerTestCase:
     """A base testcase class for unit testing individual checker classes."""
+
+    # TODO: Figure out way to type this as type[BaseChecker] while also
+    # setting self.checker correctly.
     CHECKER_CLASS: Any
     CONFIG: dict[str, Any] = {}

+    def setup_method(self) -> None:
+        self.linter = UnittestLinter()
+        self.checker = self.CHECKER_CLASS(self.linter)
+        for key, value in self.CONFIG.items():
+            setattr(self.checker.linter.config, key, value)
+        self.checker.open()
+
     @contextlib.contextmanager
-    def assertNoMessages(self) ->Iterator[None]:
+    def assertNoMessages(self) -> Iterator[None]:
         """Assert that no messages are added by the given method."""
-        pass
+        with self.assertAddsMessages():
+            yield

     @contextlib.contextmanager
-    def assertAddsMessages(self, *messages: MessageTest, ignore_position:
-        bool=False) ->Generator[None, None, None]:
+    def assertAddsMessages(
+        self, *messages: MessageTest, ignore_position: bool = False
+    ) -> Generator[None, None, None]:
         """Assert that exactly the given method adds the given messages.

         The list of messages must exactly match *all* the messages added by the
@@ -33,8 +52,36 @@ class CheckerTestCase:
         arguments (line, col_offset, ...) will be skipped. This can be used to
         just test messages for the correct node.
         """
-        pass
+        yield
+        got = self.linter.release_messages()
+        no_msg = "No message."
+        expected = "\n".join(repr(m) for m in messages) or no_msg
+        got_str = "\n".join(repr(m) for m in got) or no_msg
+        msg = (
+            "Expected messages did not match actual.\n"
+            f"\nExpected:\n{expected}\n\nGot:\n{got_str}\n"
+        )
+
+        assert len(messages) == len(got), msg
+
+        for expected_msg, gotten_msg in zip(messages, got):
+            assert expected_msg.msg_id == gotten_msg.msg_id, msg
+            assert expected_msg.node == gotten_msg.node, msg
+            assert expected_msg.args == gotten_msg.args, msg
+            assert expected_msg.confidence == gotten_msg.confidence, msg
+
+            if ignore_position:
+                # Do not check for line, col_offset etc...
+                continue
+
+            assert expected_msg.line == gotten_msg.line, msg
+            assert expected_msg.col_offset == gotten_msg.col_offset, msg
+            if not IS_PYPY or PY39_PLUS:
+                assert expected_msg.end_line == gotten_msg.end_line, msg
+                assert expected_msg.end_col_offset == gotten_msg.end_col_offset, msg

-    def walk(self, node: nodes.NodeNG) ->None:
+    def walk(self, node: nodes.NodeNG) -> None:
         """Recursive walk on the given node."""
-        pass
+        walker = ASTWalker(linter)
+        walker.add_checker(self.checker)
+        walker.walk(node)
diff --git a/pylint/testutils/configuration_test.py b/pylint/testutils/configuration_test.py
index 3c00e103a..a38c8646b 100644
--- a/pylint/testutils/configuration_test.py
+++ b/pylint/testutils/configuration_test.py
@@ -1,5 +1,11 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Utility functions for configuration testing."""
+
 from __future__ import annotations
+
 import copy
 import json
 import logging
@@ -7,40 +13,140 @@ import unittest
 from pathlib import Path
 from typing import Any, Dict
 from unittest.mock import Mock
+
 from pylint.lint import Run
+
+# We use Any in this typing because the configuration contains real objects and constants
+# that could be a lot of things.
 ConfigurationValue = Any
 PylintConfiguration = Dict[str, ConfigurationValue]


-def get_expected_or_default(tested_configuration_file: (str | Path), suffix:
-    str, default: str) ->str:
+def get_expected_or_default(
+    tested_configuration_file: str | Path,
+    suffix: str,
+    default: str,
+) -> str:
     """Return the expected value from the file if it exists, or the given default."""
-    pass
+    expected = default
+    path = Path(tested_configuration_file)
+    expected_result_path = path.parent / f"{path.stem}.{suffix}"
+    if expected_result_path.exists():
+        with open(expected_result_path, encoding="utf8") as f:
+            expected = f.read()
+        # logging is helpful to realize your file is not taken into
+        # account after a misspelling of the file name. The output of the
+        # program is checked during the test so printing messes with the result.
+        logging.info("%s exists.", expected_result_path)
+    else:
+        logging.info("%s not found, using '%s'.", expected_result_path, default)
+    return expected


-EXPECTED_CONF_APPEND_KEY = 'functional_append'
-EXPECTED_CONF_REMOVE_KEY = 'functional_remove'
+EXPECTED_CONF_APPEND_KEY = "functional_append"
+EXPECTED_CONF_REMOVE_KEY = "functional_remove"


-def get_expected_configuration(configuration_path: str,
-    default_configuration: PylintConfiguration) ->PylintConfiguration:
+def get_expected_configuration(
+    configuration_path: str, default_configuration: PylintConfiguration
+) -> PylintConfiguration:
     """Get the expected parsed configuration of a configuration functional test."""
-    pass
+    result = copy.deepcopy(default_configuration)
+    config_as_json = get_expected_or_default(
+        configuration_path, suffix="result.json", default="{}"
+    )
+    to_override = json.loads(config_as_json)
+    for key, value in to_override.items():
+        if key == EXPECTED_CONF_APPEND_KEY:
+            for fkey, fvalue in value.items():
+                result[fkey] += fvalue
+        elif key == EXPECTED_CONF_REMOVE_KEY:
+            for fkey, fvalue in value.items():
+                new_value = []
+                for old_value in result[fkey]:
+                    if old_value not in fvalue:
+                        new_value.append(old_value)
+                result[fkey] = new_value
+        else:
+            result[key] = value
+    return result


-def get_related_files(tested_configuration_file: (str | Path),
-    suffix_filter: str) ->list[Path]:
+def get_related_files(
+    tested_configuration_file: str | Path, suffix_filter: str
+) -> list[Path]:
     """Return all the file related to a test conf file ending with a suffix."""
-    pass
+    conf_path = Path(tested_configuration_file)
+    return [
+        p
+        for p in conf_path.parent.iterdir()
+        if str(p.stem).startswith(conf_path.stem) and str(p).endswith(suffix_filter)
+    ]


-def get_expected_output(configuration_path: (str | Path),
-    user_specific_path: Path) ->tuple[int, str]:
+def get_expected_output(
+    configuration_path: str | Path, user_specific_path: Path
+) -> tuple[int, str]:
     """Get the expected output of a functional test."""
-    pass
+    exit_code = 0
+    msg = (
+        "we expect a single file of the form 'filename.32.out' where 'filename' represents "
+        "the name of the configuration file, and '32' the expected error code."
+    )
+    possible_out_files = get_related_files(configuration_path, suffix_filter="out")
+    if len(possible_out_files) > 1:
+        logging.error(
+            "Too much .out files for %s %s.",
+            configuration_path,
+            msg,
+        )
+        return -1, "out file is broken"
+    if not possible_out_files:
+        # logging is helpful to see what the expected exit code is and why.
+        # The output of the program is checked during the test so printing
+        # messes with the result.
+        logging.info(".out file does not exists, so the expected exit code is 0")
+        return 0, ""
+    path = possible_out_files[0]
+    try:
+        exit_code = int(str(path.stem).rsplit(".", maxsplit=1)[-1])
+    except Exception as e:  # pylint: disable=broad-except
+        logging.error(
+            "Wrong format for .out file name for %s %s: %s",
+            configuration_path,
+            msg,
+            e,
+        )
+        return -1, "out file is broken"
+
+    output = get_expected_or_default(
+        configuration_path, suffix=f"{exit_code}.out", default=""
+    )
+    logging.info(
+        "Output exists for %s so the expected exit code is %s",
+        configuration_path,
+        exit_code,
+    )
+    return exit_code, output.format(
+        abspath=configuration_path,
+        relpath=Path(configuration_path).relative_to(user_specific_path),
+    )


-def run_using_a_configuration_file(configuration_path: (Path | str),
-    file_to_lint: str=__file__) ->tuple[Mock, Mock, Run]:
+def run_using_a_configuration_file(
+    configuration_path: Path | str, file_to_lint: str = __file__
+) -> tuple[Mock, Mock, Run]:
     """Simulate a run with a configuration without really launching the checks."""
-    pass
+    configuration_path = str(configuration_path)
+    args = ["--rcfile", configuration_path, file_to_lint]
+    # We do not capture the `SystemExit` as then the `runner` variable
+    # would not be accessible outside the `with` block.
+    with unittest.mock.patch("sys.exit") as mocked_exit:
+        # Do not actually run checks, that could be slow. We don't mock
+        # `PyLinter.check`: it calls `PyLinter.initialize` which is
+        # needed to properly set up messages inclusion/exclusion
+        # in `_msg_states`, used by `is_message_enabled`.
+        check = "pylint.lint.pylinter.check_parallel"
+        with unittest.mock.patch(check) as mocked_check_parallel:
+            runner = Run(args)
+    return mocked_exit, mocked_check_parallel, runner
diff --git a/pylint/testutils/constants.py b/pylint/testutils/constants.py
index b1477583d..956b44096 100644
--- a/pylint/testutils/constants.py
+++ b/pylint/testutils/constants.py
@@ -1,14 +1,29 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 import operator
 import re
 import sys
 from pathlib import Path
-SYS_VERS_STR = '%d%d%d' % sys.version_info[:3]
-TITLE_UNDERLINES = ['', '=', '-', '.']
-UPDATE_OPTION = '--update-functional-output'
-UPDATE_FILE = Path('pylint-functional-test-update')
-_MESSAGE = {'msg': '[a-z][a-z\\-]+'}
+
+SYS_VERS_STR = (
+    "%d%d%d" % sys.version_info[:3]  # pylint: disable=consider-using-f-string
+)
+TITLE_UNDERLINES = ["", "=", "-", "."]
+UPDATE_OPTION = "--update-functional-output"
+UPDATE_FILE = Path("pylint-functional-test-update")
+# Common sub-expressions.
+_MESSAGE = {"msg": r"[a-z][a-z\-]+"}
+# Matches a #,
+#  - followed by a comparison operator and a Python version (optional),
+#  - followed by a line number with a +/- (optional),
+#  - followed by a list of bracketed message symbols.
+# Used to extract expected messages from testdata files.
 _EXPECTED_RE = re.compile(
-    '\\s*#\\s*(?:(?P<line>[+-]?[0-9]+):)?(?:(?P<op>[><=]+) *(?P<version>[0-9.]+):)?\\s*\\[(?P<msgs>{msg}(?:,\\s*{msg})*)]'
-    .format(**_MESSAGE))
-_OPERATORS = {'>': operator.gt, '<': operator.lt, '>=': operator.ge, '<=':
-    operator.le}
+    r"\s*#\s*(?:(?P<line>[+-]?[0-9]+):)?"  # pylint: disable=consider-using-f-string
+    r"(?:(?P<op>[><=]+) *(?P<version>[0-9.]+):)?"
+    r"\s*\[(?P<msgs>{msg}(?:,\s*{msg})*)]".format(**_MESSAGE)
+)
+
+_OPERATORS = {">": operator.gt, "<": operator.lt, ">=": operator.ge, "<=": operator.le}
diff --git a/pylint/testutils/decorator.py b/pylint/testutils/decorator.py
index 50a300f10..c20692132 100644
--- a/pylint/testutils/decorator.py
+++ b/pylint/testutils/decorator.py
@@ -1,15 +1,37 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import functools
 from collections.abc import Callable
 from typing import Any
+
 from pylint.testutils.checker_test_case import CheckerTestCase


-def set_config(**kwargs: Any) ->Callable[[Callable[..., None]], Callable[
-    ..., None]]:
+def set_config(**kwargs: Any) -> Callable[[Callable[..., None]], Callable[..., None]]:
     """Decorator for setting an option on the linter.

     Passing the args and kwargs back to the test function itself
     allows this decorator to be used on parameterized test cases.
     """
-    pass
+
+    def _wrapper(fun: Callable[..., None]) -> Callable[..., None]:
+        @functools.wraps(fun)
+        def _forward(
+            self: CheckerTestCase, *args: Any, **test_function_kwargs: Any
+        ) -> None:
+            """Set option via argparse."""
+            for key, value in kwargs.items():
+                self.linter.set_option(key, value)
+
+            # Reopen checker in case, it may be interested in configuration change
+            self.checker.open()
+
+            fun(self, *args, **test_function_kwargs)
+
+        return _forward
+
+    return _wrapper
diff --git a/pylint/testutils/functional/find_functional_tests.py b/pylint/testutils/functional/find_functional_tests.py
index 79a06c358..f2e636687 100644
--- a/pylint/testutils/functional/find_functional_tests.py
+++ b/pylint/testutils/functional/find_functional_tests.py
@@ -1,31 +1,139 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import os
 from collections.abc import Iterator
 from pathlib import Path
+
 from pylint.testutils.functional.test_file import FunctionalTestFile
+
 REASONABLY_DISPLAYABLE_VERTICALLY = 49
 """'Wet finger' number of files that are reasonable to display by an IDE.

 'Wet finger' as in 'in my settings there are precisely this many'.
 """
-IGNORED_PARENT_DIRS = {'deprecated_relative_import', 'ext', 'regression',
-    'regression_02'}
+
+IGNORED_PARENT_DIRS = {
+    "deprecated_relative_import",
+    "ext",
+    "regression",
+    "regression_02",
+}
 """Direct parent directories that should be ignored."""
-IGNORED_PARENT_PARENT_DIRS = {'docparams', 'deprecated_relative_import', 'ext'}
+
+IGNORED_PARENT_PARENT_DIRS = {
+    "docparams",
+    "deprecated_relative_import",
+    "ext",
+}
 """Parents of direct parent directories that should be ignored."""


-def get_functional_test_files_from_directory(input_dir: (Path | str),
-    max_file_per_directory: int=REASONABLY_DISPLAYABLE_VERTICALLY) ->list[
-    FunctionalTestFile]:
+def get_functional_test_files_from_directory(
+    input_dir: Path | str,
+    max_file_per_directory: int = REASONABLY_DISPLAYABLE_VERTICALLY,
+) -> list[FunctionalTestFile]:
     """Get all functional tests in the input_dir."""
-    pass
+    suite = []
+
+    _check_functional_tests_structure(Path(input_dir), max_file_per_directory)

+    for dirpath, dirnames, filenames in os.walk(input_dir):
+        if dirpath.endswith("__pycache__"):
+            continue
+        dirnames.sort()
+        filenames.sort()
+        for filename in filenames:
+            if filename != "__init__.py" and filename.endswith(".py"):
+                suite.append(FunctionalTestFile(dirpath, filename))
+    return suite

-def _check_functional_tests_structure(directory: Path,
-    max_file_per_directory: int) ->None:
+
+def _check_functional_tests_structure(
+    directory: Path, max_file_per_directory: int
+) -> None:
     """Check if test directories follow correct file/folder structure.

     Ignore underscored directories or files.
     """
-    pass
+    if Path(directory).stem.startswith("_"):
+        return
+
+    files: set[Path] = set()
+    dirs: set[Path] = set()
+
+    def _get_files_from_dir(
+        path: Path, violations: list[tuple[Path, int]]
+    ) -> list[Path]:
+        """Return directories and files from a directory and handles violations."""
+        files_without_leading_underscore = list(
+            p for p in path.iterdir() if not p.stem.startswith("_")
+        )
+        if len(files_without_leading_underscore) > max_file_per_directory:
+            violations.append((path, len(files_without_leading_underscore)))
+        return files_without_leading_underscore
+
+    def walk(path: Path) -> Iterator[Path]:
+        violations: list[tuple[Path, int]] = []
+        violations_msgs: set[str] = set()
+        parent_dir_files = _get_files_from_dir(path, violations)
+        error_msg = (
+            "The following directory contains too many functional tests files:\n"
+        )
+        for _file_or_dir in parent_dir_files:
+            if _file_or_dir.is_dir():
+                _files = _get_files_from_dir(_file_or_dir, violations)
+                yield _file_or_dir.resolve()
+                try:
+                    yield from walk(_file_or_dir)
+                except AssertionError as e:
+                    violations_msgs.add(str(e).replace(error_msg, ""))
+            else:
+                yield _file_or_dir.resolve()
+        if violations or violations_msgs:
+            _msg = error_msg
+            for offending_file, number in violations:
+                _msg += f"- {offending_file}: {number} when the max is {max_file_per_directory}\n"
+            for error_msg in violations_msgs:
+                _msg += error_msg
+            raise AssertionError(_msg)
+
+    # Collect all sub-directories and files in directory
+    for file_or_dir in walk(directory):
+        if file_or_dir.is_dir():
+            dirs.add(file_or_dir)
+        elif file_or_dir.suffix == ".py":
+            files.add(file_or_dir)
+
+    directory_does_not_exists: list[tuple[Path, Path]] = []
+    misplaced_file: list[Path] = []
+    for file in files:
+        possible_dir = file.parent / file.stem.split("_")[0]
+        if possible_dir.exists():
+            directory_does_not_exists.append((file, possible_dir))
+        # Exclude some directories as they follow a different structure
+        if (
+            not len(file.parent.stem) == 1  # First letter sub-directories
+            and file.parent.stem not in IGNORED_PARENT_DIRS
+            and file.parent.parent.stem not in IGNORED_PARENT_PARENT_DIRS
+        ):
+            if not file.stem.startswith(file.parent.stem):
+                misplaced_file.append(file)
+
+    if directory_does_not_exists or misplaced_file:
+        msg = "The following functional tests are disorganized:\n"
+        for file, possible_dir in directory_does_not_exists:
+            msg += (
+                f"- In '{directory}', '{file.relative_to(directory)}' "
+                f"should go in '{possible_dir.relative_to(directory)}'\n"
+            )
+        for file in misplaced_file:
+            msg += (
+                f"- In '{directory}', {file.relative_to(directory)} should go in a directory"
+                f" that starts with the first letters"
+                f" of '{file.stem}' (not '{file.parent.stem}')\n"
+            )
+        raise AssertionError(msg)
diff --git a/pylint/testutils/functional/lint_module_output_update.py b/pylint/testutils/functional/lint_module_output_update.py
index d8e3eb6ac..38ed465aa 100644
--- a/pylint/testutils/functional/lint_module_output_update.py
+++ b/pylint/testutils/functional/lint_module_output_update.py
@@ -1,6 +1,12 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import csv
 import os
+
 from pylint.testutils.lint_module_test import LintModuleTest, MessageCounter
 from pylint.testutils.output_line import OutputLine

@@ -10,14 +16,28 @@ class LintModuleOutputUpdate(LintModuleTest):
     checked.
     """

-
     class TestDialect(csv.excel):
         """Dialect used by the csv writer."""
-        delimiter = ':'
-        lineterminator = '\n'
-    csv.register_dialect('test', TestDialect)

-    def _check_output_text(self, _: MessageCounter, expected_output: list[
-        OutputLine], actual_output: list[OutputLine]) ->None:
+        delimiter = ":"
+        lineterminator = "\n"
+
+    csv.register_dialect("test", TestDialect)
+
+    def _check_output_text(
+        self,
+        _: MessageCounter,
+        expected_output: list[OutputLine],
+        actual_output: list[OutputLine],
+    ) -> None:
         """Overwrite or remove the expected output file based on actual output."""
-        pass
+        # Remove the expected file if no output is actually emitted and a file exists
+        if not actual_output:
+            if os.path.exists(self._test_file.expected_output):
+                os.remove(self._test_file.expected_output)
+            return
+        # Write file with expected output
+        with open(self._test_file.expected_output, "w", encoding="utf-8") as f:
+            writer = csv.writer(f, dialect="test")
+            for line in actual_output:
+                writer.writerow(line.to_csv())
diff --git a/pylint/testutils/functional/test_file.py b/pylint/testutils/functional/test_file.py
index 6fe0e17be..37ba3a5fc 100644
--- a/pylint/testutils/functional/test_file.py
+++ b/pylint/testutils/functional/test_file.py
@@ -1,13 +1,18 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import configparser
 from collections.abc import Callable
 from os.path import basename, exists, join
 from typing import TypedDict


-def parse_python_version(ver_str: str) ->tuple[int, ...]:
+def parse_python_version(ver_str: str) -> tuple[int, ...]:
     """Convert python version to a tuple of integers for easy comparison."""
-    pass
+    return tuple(int(digit) for digit in ver_str.split("."))


 class NoFileError(Exception):
@@ -24,29 +29,84 @@ class TestFileOptions(TypedDict):
     exclude_from_minimal_messages_config: bool


-POSSIBLE_TEST_OPTIONS = {'min_pyver', 'max_pyver', 'min_pyver_end_position',
-    'requires', 'except_implementations', 'exclude_platforms',
-    'exclude_from_minimal_messages_config'}
+# mypy need something literal, we can't create this dynamically from TestFileOptions
+POSSIBLE_TEST_OPTIONS = {
+    "min_pyver",
+    "max_pyver",
+    "min_pyver_end_position",
+    "requires",
+    "except_implementations",
+    "exclude_platforms",
+    "exclude_from_minimal_messages_config",
+}


 class FunctionalTestFile:
     """A single functional test case file with options."""
+
     _CONVERTERS: dict[str, Callable[[str], tuple[int, ...] | list[str]]] = {
-        'min_pyver': parse_python_version, 'max_pyver':
-        parse_python_version, 'min_pyver_end_position':
-        parse_python_version, 'requires': lambda s: [i.strip() for i in s.
-        split(',')], 'except_implementations': lambda s: [i.strip() for i in
-        s.split(',')], 'exclude_platforms': lambda s: [i.strip() for i in s
-        .split(',')]}
-
-    def __init__(self, directory: str, filename: str) ->None:
+        "min_pyver": parse_python_version,
+        "max_pyver": parse_python_version,
+        "min_pyver_end_position": parse_python_version,
+        "requires": lambda s: [i.strip() for i in s.split(",")],
+        "except_implementations": lambda s: [i.strip() for i in s.split(",")],
+        "exclude_platforms": lambda s: [i.strip() for i in s.split(",")],
+    }
+
+    def __init__(self, directory: str, filename: str) -> None:
         self._directory = directory
-        self.base = filename.replace('.py', '')
-        self.options: TestFileOptions = {'min_pyver': (2, 5), 'max_pyver':
-            (4, 0), 'min_pyver_end_position': (3, 8), 'requires': [],
-            'except_implementations': [], 'exclude_platforms': [],
-            'exclude_from_minimal_messages_config': False}
+        self.base = filename.replace(".py", "")
+        # TODO:4.0: Deprecate FunctionalTestFile.options and related code
+        # We should just parse these options like a normal configuration file.
+        self.options: TestFileOptions = {
+            "min_pyver": (2, 5),
+            "max_pyver": (4, 0),
+            "min_pyver_end_position": (3, 8),
+            "requires": [],
+            "except_implementations": [],
+            "exclude_platforms": [],
+            "exclude_from_minimal_messages_config": False,
+        }
         self._parse_options()

-    def __repr__(self) ->str:
-        return f'FunctionalTest:{self.base}'
+    def __repr__(self) -> str:
+        return f"FunctionalTest:{self.base}"
+
+    def _parse_options(self) -> None:
+        cp = configparser.ConfigParser()
+        cp.add_section("testoptions")
+        try:
+            cp.read(self.option_file)
+        except NoFileError:
+            pass
+
+        for name, value in cp.items("testoptions"):
+            conv = self._CONVERTERS.get(name, lambda v: v)
+
+            assert (
+                name in POSSIBLE_TEST_OPTIONS
+            ), f"[testoptions]' can only contains one of {POSSIBLE_TEST_OPTIONS} and had '{name}'"
+            self.options[name] = conv(value)  # type: ignore[literal-required]
+
+    @property
+    def option_file(self) -> str:
+        return self._file_type(".rc")
+
+    @property
+    def module(self) -> str:
+        package = basename(self._directory)
+        return ".".join([package, self.base])
+
+    @property
+    def expected_output(self) -> str:
+        return self._file_type(".txt", check_exists=False)
+
+    @property
+    def source(self) -> str:
+        return self._file_type(".py")
+
+    def _file_type(self, ext: str, check_exists: bool = True) -> str:
+        name = join(self._directory, self.base + ext)
+        if not check_exists or exists(name):
+            return name
+        raise NoFileError(f"Cannot find '{name}'.")
diff --git a/pylint/testutils/get_test_info.py b/pylint/testutils/get_test_info.py
index 4d1b573f1..eb2c78cfd 100644
--- a/pylint/testutils/get_test_info.py
+++ b/pylint/testutils/get_test_info.py
@@ -1,11 +1,18 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from glob import glob
 from os.path import basename, join, splitext
+
 from pylint.testutils.constants import SYS_VERS_STR


-def _get_tests_info(input_dir: str, msg_dir: str, prefix: str, suffix: str
-    ) ->list[tuple[str, str]]:
+def _get_tests_info(
+    input_dir: str, msg_dir: str, prefix: str, suffix: str
+) -> list[tuple[str, str]]:
     """Get python input examples and output messages.

     We use following conventions for input files and messages:
@@ -16,4 +23,28 @@ def _get_tests_info(input_dir: str, msg_dir: str, prefix: str, suffix: str
         message for python >=  x.y ->  message =  <name>_pyxy.txt
         lower versions             ->  message with highest num
     """
-    pass
+    result = []
+    for fname in glob(join(input_dir, prefix + "*" + suffix)):
+        infile = basename(fname)
+        fbase = splitext(infile)[0]
+        # filter input files :
+        pyrestr = fbase.rsplit("_py", 1)[-1]  # like _26 or 26
+        if pyrestr.isdigit():  # '24', '25'...
+            if pyrestr.isdigit() and int(SYS_VERS_STR) < int(pyrestr):
+                continue
+        if pyrestr.startswith("_") and pyrestr[1:].isdigit():
+            # skip test for higher python versions
+            if pyrestr[1:].isdigit() and int(SYS_VERS_STR) >= int(pyrestr[1:]):
+                continue
+        messages = glob(join(msg_dir, fbase + "*.txt"))
+        # the last one will be without ext, i.e. for all or upper versions:
+        if messages:
+            for outfile in sorted(messages, reverse=True):
+                py_rest = outfile.rsplit("_py", 1)[-1][:-4]
+                if py_rest.isdigit() and int(SYS_VERS_STR) >= int(py_rest):
+                    break
+        else:
+            # This will provide an error message indicating the missing filename.
+            outfile = join(msg_dir, fbase + ".txt")
+        result.append((infile, outfile))
+    return result
diff --git a/pylint/testutils/global_test_linter.py b/pylint/testutils/global_test_linter.py
index 6ea5f1796..2e0d3d170 100644
--- a/pylint/testutils/global_test_linter.py
+++ b/pylint/testutils/global_test_linter.py
@@ -1,4 +1,20 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from pylint import checkers
 from pylint.lint import PyLinter
 from pylint.testutils.reporter_for_tests import GenericTestReporter
+
+
+def create_test_linter() -> PyLinter:
+    test_reporter = GenericTestReporter()
+    linter_ = PyLinter()
+    linter_.set_reporter(test_reporter)
+    linter_.config.persistent = 0
+    checkers.initialize(linter_)
+    return linter_
+
+
+# Can't be renamed to a constant (easily), it breaks countless tests
 linter = create_test_linter()
diff --git a/pylint/testutils/lint_module_test.py b/pylint/testutils/lint_module_test.py
index 646c8986f..48ee5a0b2 100644
--- a/pylint/testutils/lint_module_test.py
+++ b/pylint/testutils/lint_module_test.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import csv
 import operator
 import platform
@@ -8,81 +13,150 @@ from io import StringIO
 from pathlib import Path
 from typing import Counter as CounterType
 from typing import TextIO, Tuple
+
 import pytest
 from _pytest.config import Config
+
 from pylint import checkers
 from pylint.config.config_initialization import _config_initialization
 from pylint.constants import IS_PYPY
 from pylint.lint import PyLinter
 from pylint.message.message import Message
 from pylint.testutils.constants import _EXPECTED_RE, _OPERATORS, UPDATE_OPTION
-from pylint.testutils.functional.test_file import FunctionalTestFile, NoFileError, parse_python_version
+
+# need to import from functional.test_file to avoid cyclic import
+from pylint.testutils.functional.test_file import (
+    FunctionalTestFile,
+    NoFileError,
+    parse_python_version,
+)
 from pylint.testutils.output_line import OutputLine
 from pylint.testutils.reporter_for_tests import FunctionalTestReporter
+
 MessageCounter = CounterType[Tuple[int, str]]
-PYLINTRC = Path(__file__).parent / 'testing_pylintrc'
+
+PYLINTRC = Path(__file__).parent / "testing_pylintrc"


 class LintModuleTest:
     maxDiff = None

-    def __init__(self, test_file: FunctionalTestFile, config: (Config |
-        None)=None) ->None:
+    def __init__(
+        self, test_file: FunctionalTestFile, config: Config | None = None
+    ) -> None:
         _test_reporter = FunctionalTestReporter()
         self._linter = PyLinter()
         self._linter.config.persistent = 0
         checkers.initialize(self._linter)
+
+        # See if test has its own .rc file, if so we use that one
         rc_file: Path | str = PYLINTRC
         try:
             rc_file = test_file.option_file
-            self._linter.disable('suppressed-message')
-            self._linter.disable('locally-disabled')
-            self._linter.disable('useless-suppression')
+            self._linter.disable("suppressed-message")
+            self._linter.disable("locally-disabled")
+            self._linter.disable("useless-suppression")
         except NoFileError:
             pass
+
         self._test_file = test_file
         try:
             args = [test_file.source]
         except NoFileError:
-            args = ['']
-        if config and config.getoption('minimal_messages_config'):
+            # If we're still raising NoFileError the actual source file doesn't exist
+            args = [""]
+        if config and config.getoption("minimal_messages_config"):
             with self._open_source_file() as f:
-                messages_to_enable = {msg[1] for msg in self.
-                    get_expected_messages(f)}
-                messages_to_enable.add('astroid-error')
-                messages_to_enable.add('fatal')
-                messages_to_enable.add('syntax-error')
-            args.extend(['--disable=all',
-                f"--enable={','.join(messages_to_enable)}"])
-        self._linter._arg_parser.add_argument('--min_pyver', type=
-            parse_python_version, default=(2, 5))
-        self._linter._arg_parser.add_argument('--max_pyver', type=
-            parse_python_version, default=(4, 0))
-        self._linter._arg_parser.add_argument('--min_pyver_end_position',
-            type=parse_python_version, default=(3, 8))
-        self._linter._arg_parser.add_argument('--requires', type=lambda s:
-            [i.strip() for i in s.split(',')], default=[])
-        self._linter._arg_parser.add_argument('--except_implementations',
-            type=lambda s: [i.strip() for i in s.split(',')], default=[])
-        self._linter._arg_parser.add_argument('--exclude_platforms', type=
-            lambda s: [i.strip() for i in s.split(',')], default=[])
+                messages_to_enable = {msg[1] for msg in self.get_expected_messages(f)}
+                # Always enable fatal errors
+                messages_to_enable.add("astroid-error")
+                messages_to_enable.add("fatal")
+                messages_to_enable.add("syntax-error")
+            args.extend(["--disable=all", f"--enable={','.join(messages_to_enable)}"])
+
+        # Add testoptions
+        self._linter._arg_parser.add_argument(
+            "--min_pyver", type=parse_python_version, default=(2, 5)
+        )
+        self._linter._arg_parser.add_argument(
+            "--max_pyver", type=parse_python_version, default=(4, 0)
+        )
+        self._linter._arg_parser.add_argument(
+            "--min_pyver_end_position", type=parse_python_version, default=(3, 8)
+        )
         self._linter._arg_parser.add_argument(
-            '--exclude_from_minimal_messages_config', default=False)
-        _config_initialization(self._linter, args_list=args, config_file=
-            rc_file, reporter=_test_reporter)
-        self._check_end_position = (sys.version_info >= self._linter.config
-            .min_pyver_end_position)
+            "--requires", type=lambda s: [i.strip() for i in s.split(",")], default=[]
+        )
+        self._linter._arg_parser.add_argument(
+            "--except_implementations",
+            type=lambda s: [i.strip() for i in s.split(",")],
+            default=[],
+        )
+        self._linter._arg_parser.add_argument(
+            "--exclude_platforms",
+            type=lambda s: [i.strip() for i in s.split(",")],
+            default=[],
+        )
+        self._linter._arg_parser.add_argument(
+            "--exclude_from_minimal_messages_config", default=False
+        )
+
+        _config_initialization(
+            self._linter, args_list=args, config_file=rc_file, reporter=_test_reporter
+        )
+
+        self._check_end_position = (
+            sys.version_info >= self._linter.config.min_pyver_end_position
+        )
+        # TODO: PY3.9: PyPy supports end_lineno from 3.9 and above
         if self._check_end_position and IS_PYPY:
-            self._check_end_position = sys.version_info >= (3, 9)
+            self._check_end_position = sys.version_info >= (3, 9)  # pragma: no cover
+
         self._config = config

-    def __str__(self) ->str:
-        return (
-            f'{self._test_file.base} ({self.__class__.__module__}.{self.__class__.__name__})'
+    def setUp(self) -> None:
+        if self._should_be_skipped_due_to_version():
+            pytest.skip(
+                f"Test cannot run with Python {sys.version.split(' ', maxsplit=1)[0]}."
             )
+        missing = []
+        for requirement in self._linter.config.requires:
+            try:
+                __import__(requirement)
+            except ImportError:
+                missing.append(requirement)
+        if missing:
+            pytest.skip(f"Requires {','.join(missing)} to be present.")
+        except_implementations = self._linter.config.except_implementations
+        if except_implementations:
+            if platform.python_implementation() in except_implementations:
+                msg = "Test cannot run with Python implementation %r"
+                pytest.skip(msg % platform.python_implementation())
+        excluded_platforms = self._linter.config.exclude_platforms
+        if excluded_platforms:
+            if sys.platform.lower() in excluded_platforms:
+                pytest.skip(f"Test cannot run on platform {sys.platform!r}")
+        if (
+            self._config
+            and self._config.getoption("minimal_messages_config")
+            and self._linter.config.exclude_from_minimal_messages_config
+        ):
+            pytest.skip("Test excluded from --minimal-messages-config")
+
+    def runTest(self) -> None:
+        self._runTest()
+
+    def _should_be_skipped_due_to_version(self) -> bool:
+        return (  # type: ignore[no-any-return]
+            sys.version_info < self._linter.config.min_pyver
+            or sys.version_info > self._linter.config.max_pyver
+        )
+
+    def __str__(self) -> str:
+        return f"{self._test_file.base} ({self.__class__.__module__}.{self.__class__.__name__})"

     @staticmethod
-    def get_expected_messages(stream: TextIO) ->MessageCounter:
+    def get_expected_messages(stream: TextIO) -> MessageCounter:
         """Parses a file and get expected messages.

         :param stream: File-like input stream.
@@ -90,21 +164,163 @@ class LintModuleTest:
         :returns: A dict mapping line,msg-symbol tuples to the count on this line.
         :rtype: dict
         """
-        pass
+        messages: MessageCounter = Counter()
+        for i, line in enumerate(stream):
+            match = _EXPECTED_RE.search(line)
+            if match is None:
+                continue
+            line = match.group("line")
+            if line is None:
+                lineno = i + 1
+            elif line.startswith(("+", "-")):
+                lineno = i + 1 + int(line)
+            else:
+                lineno = int(line)
+
+            version = match.group("version")
+            op = match.group("op")
+            if version:
+                required = parse_python_version(version)
+                if not _OPERATORS[op](sys.version_info, required):
+                    continue
+
+            for msg_id in match.group("msgs").split(","):
+                messages[lineno, msg_id.strip()] += 1
+        return messages

     @staticmethod
-    def multiset_difference(expected_entries: MessageCounter,
-        actual_entries: MessageCounter) ->tuple[MessageCounter, dict[tuple[
-        int, str], int]]:
+    def multiset_difference(
+        expected_entries: MessageCounter,
+        actual_entries: MessageCounter,
+    ) -> tuple[MessageCounter, dict[tuple[int, str], int]]:
         """Takes two multisets and compares them.

         A multiset is a dict with the cardinality of the key as the value.
         """
-        pass
+        missing = expected_entries.copy()
+        missing.subtract(actual_entries)
+        unexpected = {}
+        for key, value in list(missing.items()):
+            if value <= 0:
+                missing.pop(key)
+                if value < 0:
+                    unexpected[key] = -value
+        return missing, unexpected
+
+    def _open_expected_file(self) -> TextIO:
+        try:
+            return open(self._test_file.expected_output, encoding="utf-8")
+        except FileNotFoundError:
+            return StringIO("")
+
+    def _open_source_file(self) -> TextIO:
+        if self._test_file.base == "invalid_encoded_data":
+            return open(self._test_file.source, encoding="utf-8")
+        if "latin1" in self._test_file.base:
+            return open(self._test_file.source, encoding="latin1")
+        return open(self._test_file.source, encoding="utf8")
+
+    def _get_expected(self) -> tuple[MessageCounter, list[OutputLine]]:
+        with self._open_source_file() as f:
+            expected_msgs = self.get_expected_messages(f)
+        if not expected_msgs:
+            expected_msgs = Counter()
+        with self._open_expected_file() as f:
+            expected_output_lines = [
+                OutputLine.from_csv(row, self._check_end_position)
+                for row in csv.reader(f, "test")
+            ]
+        return expected_msgs, expected_output_lines
+
+    def _get_actual(self) -> tuple[MessageCounter, list[OutputLine]]:
+        messages: list[Message] = self._linter.reporter.messages
+        messages.sort(key=lambda m: (m.line, m.symbol, m.msg))
+        received_msgs: MessageCounter = Counter()
+        received_output_lines = []
+        for msg in messages:
+            assert (
+                msg.symbol != "fatal"
+            ), f"Pylint analysis failed because of '{msg.msg}'"
+            received_msgs[msg.line, msg.symbol] += 1
+            received_output_lines.append(
+                OutputLine.from_msg(msg, self._check_end_position)
+            )
+        return received_msgs, received_output_lines
+
+    def _runTest(self) -> None:
+        __tracebackhide__ = True  # pylint: disable=unused-variable
+        modules_to_check = [self._test_file.source]
+        self._linter.check(modules_to_check)
+        expected_messages, expected_output = self._get_expected()
+        actual_messages, actual_output = self._get_actual()
+        assert (
+            expected_messages == actual_messages
+        ), self.error_msg_for_unequal_messages(
+            actual_messages, expected_messages, actual_output
+        )
+        self._check_output_text(expected_messages, expected_output, actual_output)
+
+    def error_msg_for_unequal_messages(
+        self,
+        actual_messages: MessageCounter,
+        expected_messages: MessageCounter,
+        actual_output: list[OutputLine],
+    ) -> str:
+        msg = [f'Wrong message(s) raised for "{Path(self._test_file.source).name}":']
+        missing, unexpected = self.multiset_difference(
+            expected_messages, actual_messages
+        )
+        if missing:
+            msg.append("\nExpected in testdata:")
+            msg.extend(f" {msg[0]:3}: {msg[1]}" for msg in sorted(missing))
+        if unexpected:
+            msg.append("\nUnexpected in testdata:")
+            msg.extend(f" {msg[0]:3}: {msg[1]}" for msg in sorted(unexpected))
+        error_msg = "\n".join(msg)
+        if self._config and self._config.getoption("verbose") > 0:
+            error_msg += "\n\nActual pylint output for this file:\n"
+            error_msg += "\n".join(str(o) for o in actual_output)
+        return error_msg
+
+    def error_msg_for_unequal_output(
+        self,
+        expected_lines: list[OutputLine],
+        received_lines: list[OutputLine],
+    ) -> str:
+        missing = set(expected_lines) - set(received_lines)
+        unexpected = set(received_lines) - set(expected_lines)
+        error_msg = f'Wrong output for "{Path(self._test_file.expected_output).name}":'
+        sort_by_line_number = operator.attrgetter("lineno")
+        if missing:
+            error_msg += "\n- Missing lines:\n"
+            for line in sorted(missing, key=sort_by_line_number):
+                error_msg += f"{line}\n"
+        if unexpected:
+            error_msg += "\n- Unexpected lines:\n"
+            for line in sorted(unexpected, key=sort_by_line_number):
+                error_msg += f"{line}\n"
+            error_msg += (
+                "\nYou can update the expected output automatically with:\n'"
+                f"python tests/test_functional.py {UPDATE_OPTION} -k "
+                f'"test_functional[{self._test_file.base}]"\'\n\n'
+                "Here's the update text in case you can't:\n"
+            )
+            expected_csv = StringIO()
+            writer = csv.writer(expected_csv, dialect="test")
+            for line in sorted(received_lines, key=sort_by_line_number):
+                writer.writerow(line.to_csv())
+            error_msg += expected_csv.getvalue()
+        return error_msg

-    def _check_output_text(self, _: MessageCounter, expected_output: list[
-        OutputLine], actual_output: list[OutputLine]) ->None:
+    def _check_output_text(
+        self,
+        _: MessageCounter,
+        expected_output: list[OutputLine],
+        actual_output: list[OutputLine],
+    ) -> None:
         """This is a function because we want to be able to update the text in
         LintModuleOutputUpdate.
         """
-        pass
+        assert expected_output == actual_output, self.error_msg_for_unequal_output(
+            expected_output, actual_output
+        )
diff --git a/pylint/testutils/output_line.py b/pylint/testutils/output_line.py
index 50d52257c..c979a049c 100644
--- a/pylint/testutils/output_line.py
+++ b/pylint/testutils/output_line.py
@@ -1,10 +1,18 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from collections.abc import Sequence
 from typing import Any, NamedTuple, TypeVar
+
 from astroid import nodes
+
 from pylint.interfaces import UNDEFINED, Confidence
 from pylint.message.message import Message
-_T = TypeVar('_T')
+
+_T = TypeVar("_T")


 class MessageTest(NamedTuple):
@@ -33,37 +41,81 @@ class OutputLine(NamedTuple):
     confidence: str

     @classmethod
-    def from_msg(cls, msg: Message, check_endline: bool=True) ->OutputLine:
+    def from_msg(cls, msg: Message, check_endline: bool = True) -> OutputLine:
         """Create an OutputLine from a Pylint Message."""
-        pass
+        column = cls._get_column(msg.column)
+        end_line = cls._get_py38_none_value(msg.end_line, check_endline)
+        end_column = cls._get_py38_none_value(msg.end_column, check_endline)
+        return cls(
+            msg.symbol,
+            msg.line,
+            column,
+            end_line,
+            end_column,
+            msg.obj or "",
+            msg.msg.replace("\r\n", "\n"),
+            msg.confidence.name,
+        )

     @staticmethod
-    def _get_column(column: (str | int)) ->int:
+    def _get_column(column: str | int) -> int:
         """Handle column numbers."""
-        pass
+        return int(column)

     @staticmethod
-    def _get_py38_none_value(value: _T, check_endline: bool) ->(_T | None):
+    def _get_py38_none_value(value: _T, check_endline: bool) -> _T | None:
         """Used to make end_line and end_column None as indicated by our version
         compared to `min_pyver_end_position`.
         """
-        pass
+        if not check_endline:
+            return None  # pragma: no cover
+        return value

     @classmethod
-    def from_csv(cls, row: (Sequence[str] | str), check_endline: bool=True
-        ) ->OutputLine:
+    def from_csv(
+        cls, row: Sequence[str] | str, check_endline: bool = True
+    ) -> OutputLine:
         """Create an OutputLine from a comma separated list (the functional tests
         expected output .txt files).
         """
-        pass
+        if isinstance(row, str):
+            row = row.split(",")
+        try:
+            line = int(row[1])
+            column = cls._get_column(row[2])
+            end_line = cls._value_to_optional_int(
+                cls._get_py38_none_value(row[3], check_endline)
+            )
+            end_column = cls._value_to_optional_int(
+                cls._get_py38_none_value(row[4], check_endline)
+            )
+            # symbol, line, column, end_line, end_column, node, msg, confidences
+            assert len(row) == 8
+            return cls(
+                row[0], line, column, end_line, end_column, row[5], row[6], row[7]
+            )
+        except Exception:  # pylint: disable=broad-except
+            # We need this to not fail for the update script to work.
+            return cls("", 0, 0, None, None, "", "", "")

-    def to_csv(self) ->tuple[str, str, str, str, str, str, str, str]:
+    def to_csv(self) -> tuple[str, str, str, str, str, str, str, str]:
         """Convert an OutputLine to a tuple of string to be written by a
         csv-writer.
         """
-        pass
+        return (
+            str(self.symbol),
+            str(self.lineno),
+            str(self.column),
+            str(self.end_lineno),
+            str(self.end_column),
+            str(self.object),
+            str(self.msg),
+            str(self.confidence),
+        )

     @staticmethod
-    def _value_to_optional_int(value: (str | None)) ->(int | None):
+    def _value_to_optional_int(value: str | None) -> int | None:
         """Checks if a (stringified) value should be None or a Python integer."""
-        pass
+        if value == "None" or not value:
+            return None
+        return int(value)
diff --git a/pylint/testutils/pyreverse.py b/pylint/testutils/pyreverse.py
index f9eeb3f8c..c621f9e7a 100644
--- a/pylint/testutils/pyreverse.py
+++ b/pylint/testutils/pyreverse.py
@@ -1,27 +1,49 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import argparse
 import configparser
 import shlex
 from pathlib import Path
 from typing import NamedTuple, TypedDict
+
 from pylint.pyreverse.main import DEFAULT_COLOR_PALETTE


-class PyreverseConfig(argparse.Namespace):
+# This class could and should be replaced with a simple dataclass when support for Python < 3.7 is dropped.
+# A NamedTuple is not possible as some tests need to modify attributes during the test.
+class PyreverseConfig(
+    argparse.Namespace
+):  # pylint: disable=too-many-instance-attributes, too-many-arguments
     """Holds the configuration options for Pyreverse.

     The default values correspond to the defaults of the options' parser.
     """

-    def __init__(self, mode: str='PUB_ONLY', classes: (list[str] | None)=
-        None, show_ancestors: (int | None)=None, all_ancestors: (bool |
-        None)=None, show_associated: (int | None)=None, all_associated: (
-        bool | None)=None, no_standalone: bool=False, show_builtin: bool=
-        False, show_stdlib: bool=False, module_names: (bool | None)=None,
-        only_classnames: bool=False, output_format: str='dot', colorized:
-        bool=False, max_color_depth: int=2, color_palette: tuple[str, ...]=
-        DEFAULT_COLOR_PALETTE, ignore_list: tuple[str, ...]=tuple(),
-        project: str='', output_directory: str='') ->None:
+    def __init__(
+        self,
+        mode: str = "PUB_ONLY",
+        classes: list[str] | None = None,
+        show_ancestors: int | None = None,
+        all_ancestors: bool | None = None,
+        show_associated: int | None = None,
+        all_associated: bool | None = None,
+        no_standalone: bool = False,
+        show_builtin: bool = False,
+        show_stdlib: bool = False,
+        module_names: bool | None = None,
+        only_classnames: bool = False,
+        output_format: str = "dot",
+        colorized: bool = False,
+        max_color_depth: int = 2,
+        color_palette: tuple[str, ...] = DEFAULT_COLOR_PALETTE,
+        ignore_list: tuple[str, ...] = tuple(),
+        project: str = "",
+        output_directory: str = "",
+    ) -> None:
         super().__init__()
         self.mode = mode
         if classes:
@@ -54,11 +76,50 @@ class TestFileOptions(TypedDict):

 class FunctionalPyreverseTestfile(NamedTuple):
     """Named tuple containing the test file and the expected output."""
+
     source: Path
     options: TestFileOptions


-def get_functional_test_files(root_directory: Path) ->list[
-    FunctionalPyreverseTestfile]:
+def get_functional_test_files(
+    root_directory: Path,
+) -> list[FunctionalPyreverseTestfile]:
     """Get all functional test files from the given directory."""
-    pass
+    test_files = []
+    for path in root_directory.rglob("*.py"):
+        if path.stem.startswith("_"):
+            continue
+        config_file = path.with_suffix(".rc")
+        if config_file.exists():
+            test_files.append(
+                FunctionalPyreverseTestfile(
+                    source=path, options=_read_config(config_file)
+                )
+            )
+        else:
+            test_files.append(
+                FunctionalPyreverseTestfile(
+                    source=path,
+                    options={
+                        "source_roots": [],
+                        "output_formats": ["mmd"],
+                        "command_line_args": [],
+                    },
+                )
+            )
+    return test_files
+
+
+def _read_config(config_file: Path) -> TestFileOptions:
+    config = configparser.ConfigParser()
+    config.read(str(config_file))
+    source_roots = config.get("testoptions", "source_roots", fallback=None)
+    return {
+        "source_roots": source_roots.split(",") if source_roots else [],
+        "output_formats": config.get(
+            "testoptions", "output_formats", fallback="mmd"
+        ).split(","),
+        "command_line_args": shlex.split(
+            config.get("testoptions", "command_line_args", fallback="")
+        ),
+    }
diff --git a/pylint/testutils/reporter_for_tests.py b/pylint/testutils/reporter_for_tests.py
index 194c5b93e..d3c06eecd 100644
--- a/pylint/testutils/reporter_for_tests.py
+++ b/pylint/testutils/reporter_for_tests.py
@@ -1,40 +1,79 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from io import StringIO
 from os import getcwd, sep
 from typing import TYPE_CHECKING
+
 from pylint.message import Message
 from pylint.reporters import BaseReporter
+
 if TYPE_CHECKING:
     from pylint.reporters.ureports.nodes import Section


 class GenericTestReporter(BaseReporter):
     """Reporter storing plain text messages."""
+
     out: StringIO

-    def __init__(self) ->None:
+    def __init__(  # pylint: disable=super-init-not-called # See https://github.com/pylint-dev/pylint/issues/4941
+        self,
+    ) -> None:
         self.path_strip_prefix: str = getcwd() + sep
         self.reset()

-    def handle_message(self, msg: Message) ->None:
+    def reset(self) -> None:
+        self.out = StringIO()
+        self.messages: list[Message] = []
+
+    def handle_message(self, msg: Message) -> None:
         """Append messages to the list of messages of the reporter."""
-        pass
+        self.messages.append(msg)

-    def finalize(self) ->str:
+    def finalize(self) -> str:
         """Format and print messages in the context of the path."""
+        messages: list[str] = []
+        for msg in self.messages:
+            obj = ""
+            if msg.obj:
+                obj = f":{msg.obj}"
+            messages.append(f"{msg.msg_id[0]}:{msg.line:>3}{obj}: {msg.msg}")
+
+        messages.sort()
+        for message in messages:
+            print(message, file=self.out)
+
+        result = self.out.getvalue()
+        self.reset()
+        return result
+
+    def on_set_current_module(self, module: str, filepath: str | None) -> None:
         pass

-    def display_reports(self, layout: Section) ->None:
+    # pylint: enable=unused-argument
+
+    def display_reports(self, layout: Section) -> None:
         """Ignore layouts."""
+
+    def _display(self, layout: Section) -> None:
         pass


 class MinimalTestReporter(BaseReporter):
-    pass
+    def on_set_current_module(self, module: str, filepath: str | None) -> None:
+        self.messages = []

+    def _display(self, layout: Section) -> None:
+        pass

-class FunctionalTestReporter(BaseReporter):

-    def display_reports(self, layout: Section) ->None:
+class FunctionalTestReporter(BaseReporter):
+    def display_reports(self, layout: Section) -> None:
         """Ignore layouts and don't call self._display()."""
+
+    def _display(self, layout: Section) -> None:
         pass
diff --git a/pylint/testutils/tokenize_str.py b/pylint/testutils/tokenize_str.py
index c8005f96c..dc9ada72a 100644
--- a/pylint/testutils/tokenize_str.py
+++ b/pylint/testutils/tokenize_str.py
@@ -1,4 +1,13 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import tokenize
 from io import StringIO
 from tokenize import TokenInfo
+
+
+def _tokenize_str(code: str) -> list[TokenInfo]:
+    return list(tokenize.generate_tokens(StringIO(code).readline))
diff --git a/pylint/testutils/unittest_linter.py b/pylint/testutils/unittest_linter.py
index e3aec6085..a19afec56 100644
--- a/pylint/testutils/unittest_linter.py
+++ b/pylint/testutils/unittest_linter.py
@@ -1,6 +1,15 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
+# pylint: disable=duplicate-code
+
 from __future__ import annotations
+
 from typing import Any, Literal
+
 from astroid import nodes
+
 from pylint.interfaces import UNDEFINED, Confidence
 from pylint.lint import PyLinter
 from pylint.testutils.output_line import MessageTest
@@ -9,13 +18,67 @@ from pylint.testutils.output_line import MessageTest
 class UnittestLinter(PyLinter):
     """A fake linter class to capture checker messages."""

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._messages: list[MessageTest] = []
         super().__init__()

-    def add_message(self, msgid: str, line: (int | None)=None, node: (nodes
-        .NodeNG | None)=None, args: Any=None, confidence: (Confidence |
-        None)=None, col_offset: (int | None)=None, end_lineno: (int | None)
-        =None, end_col_offset: (int | None)=None) ->None:
+    def release_messages(self) -> list[MessageTest]:
+        try:
+            return self._messages
+        finally:
+            self._messages = []
+
+    def add_message(
+        self,
+        msgid: str,
+        line: int | None = None,
+        # TODO: Make node non optional
+        node: nodes.NodeNG | None = None,
+        args: Any = None,
+        confidence: Confidence | None = None,
+        col_offset: int | None = None,
+        end_lineno: int | None = None,
+        end_col_offset: int | None = None,
+    ) -> None:
         """Add a MessageTest to the _messages attribute of the linter class."""
-        pass
+        # If confidence is None we set it to UNDEFINED as well in PyLinter
+        if confidence is None:
+            confidence = UNDEFINED
+
+        # Look up "location" data of node if not yet supplied
+        if node:
+            if node.position:
+                if not line:
+                    line = node.position.lineno
+                if not col_offset:
+                    col_offset = node.position.col_offset
+                if not end_lineno:
+                    end_lineno = node.position.end_lineno
+                if not end_col_offset:
+                    end_col_offset = node.position.end_col_offset
+            else:
+                if not line:
+                    line = node.fromlineno
+                if not col_offset:
+                    col_offset = node.col_offset
+                if not end_lineno:
+                    end_lineno = node.end_lineno
+                if not end_col_offset:
+                    end_col_offset = node.end_col_offset
+
+        self._messages.append(
+            MessageTest(
+                msgid,
+                line,
+                node,
+                args,
+                confidence,
+                col_offset,
+                end_lineno,
+                end_col_offset,
+            )
+        )
+
+    @staticmethod
+    def is_message_enabled(*unused_args: Any, **unused_kwargs: Any) -> Literal[True]:
+        return True
diff --git a/pylint/testutils/utils.py b/pylint/testutils/utils.py
index 8ca619cfa..1ff999b28 100644
--- a/pylint/testutils/utils.py
+++ b/pylint/testutils/utils.py
@@ -1,4 +1,9 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import contextlib
 import os
 import sys
@@ -9,12 +14,62 @@ from typing import TextIO


 @contextlib.contextmanager
-def _patch_streams(out: TextIO) ->Iterator[None]:
+def _patch_streams(out: TextIO) -> Iterator[None]:
     """Patch and subsequently reset a text stream."""
-    pass
+    sys.stderr = sys.stdout = out
+    try:
+        yield
+    finally:
+        sys.stderr = sys.__stderr__
+        sys.stdout = sys.__stdout__
+
+
+@contextlib.contextmanager
+def _test_sys_path(
+    replacement_sys_path: list[str] | None = None,
+) -> Generator[None, None, None]:
+    original_path = sys.path
+    try:
+        if replacement_sys_path is not None:
+            sys.path = copy(replacement_sys_path)
+        yield
+    finally:
+        sys.path = original_path
+
+
+@contextlib.contextmanager
+def _test_cwd(
+    current_working_directory: str | Path | None = None,
+) -> Generator[None, None, None]:
+    original_dir = os.getcwd()
+    try:
+        if current_working_directory is not None:
+            os.chdir(current_working_directory)
+        yield
+    finally:
+        os.chdir(original_dir)
+
+
+@contextlib.contextmanager
+def _test_environ_pythonpath(
+    new_pythonpath: str | None = None,
+) -> Generator[None, None, None]:
+    original_pythonpath = os.environ.get("PYTHONPATH")
+    if new_pythonpath:
+        os.environ["PYTHONPATH"] = new_pythonpath
+    elif new_pythonpath is None and original_pythonpath is not None:
+        # If new_pythonpath is None, make sure to delete PYTHONPATH if present
+        del os.environ["PYTHONPATH"]
+    try:
+        yield
+    finally:
+        if original_pythonpath is not None:
+            os.environ["PYTHONPATH"] = original_pythonpath
+        elif "PYTHONPATH" in os.environ:
+            del os.environ["PYTHONPATH"]


-def create_files(paths: list[str], chroot: str='.') ->None:
+def create_files(paths: list[str], chroot: str = ".") -> None:
     """Creates directories and files found in <path>.

     :param list paths: list of relative paths to files or directories
@@ -33,4 +88,20 @@ def create_files(paths: list[str], chroot: str='.') ->None:
     >>> isfile('/tmp/a/b/foo.py')
     True
     """
-    pass
+    dirs, files = set(), set()
+    for path in paths:
+        path = os.path.join(chroot, path)
+        filename = os.path.basename(path)
+        # path is a directory path
+        if not filename:
+            dirs.add(path)
+        # path is a filename path
+        else:
+            dirs.add(os.path.dirname(path))
+            files.add(path)
+    for dirpath in dirs:
+        if not os.path.isdir(dirpath):
+            os.makedirs(dirpath)
+    for filepath in files:
+        with open(filepath, "w", encoding="utf-8"):
+            pass
diff --git a/pylint/typing.py b/pylint/typing.py
index c4dc2c2f0..f9dde2e40 100644
--- a/pylint/typing.py
+++ b/pylint/typing.py
@@ -1,8 +1,30 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """A collection of typing utilities."""
+
 from __future__ import annotations
+
 import argparse
 from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Literal, NamedTuple, Optional, Pattern, Protocol, Tuple, Type, TypedDict, Union
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    Literal,
+    NamedTuple,
+    Optional,
+    Pattern,
+    Protocol,
+    Tuple,
+    Type,
+    TypedDict,
+    Union,
+)
+
 if TYPE_CHECKING:
     from pylint.config.callback_actions import _CallbackAction
     from pylint.pyreverse.inspector import Project
@@ -18,6 +40,7 @@ class FileItem(NamedTuple):
     - filepath: path of the file
     - modname: module name
     """
+
     name: str
     filepath: str
     modpath: str
@@ -25,6 +48,7 @@ class FileItem(NamedTuple):

 class ModuleDescriptionDict(TypedDict):
     """Represents data about a checked module."""
+
     path: str
     name: str
     isarg: bool
@@ -34,13 +58,15 @@ class ModuleDescriptionDict(TypedDict):

 class ErrorDescriptionDict(TypedDict):
     """Represents data about errors collected during checking of a module."""
-    key: Literal['fatal']
+
+    key: Literal["fatal"]
     mod: str
     ex: ImportError | SyntaxError


 class MessageLocationTuple(NamedTuple):
     """Tuple with information about the location of a to-be-displayed message."""
+
     abspath: str
     path: str
     module: str
@@ -53,6 +79,7 @@ class MessageLocationTuple(NamedTuple):

 class ManagedMessage(NamedTuple):
     """Tuple with information about a managed message of the linter."""
+
     name: str | None
     msgid: str
     symbol: str
@@ -60,20 +87,36 @@ class ManagedMessage(NamedTuple):
     is_disabled: bool


-MessageTypesFullName = Literal['convention', 'error', 'fatal', 'info',
-    'refactor', 'statement', 'warning']
+MessageTypesFullName = Literal[
+    "convention", "error", "fatal", "info", "refactor", "statement", "warning"
+]
 """All possible message categories."""
-OptionDict = Dict[str, Union[None, str, bool, int, Pattern[str], Iterable[
-    Union[str, int, Pattern[str]]], Type['_CallbackAction'], Callable[[Any],
-    Any], Callable[[Any, Any, Any, Any], Any]]]
+
+
+OptionDict = Dict[
+    str,
+    Union[
+        None,
+        str,
+        bool,
+        int,
+        Pattern[str],
+        Iterable[Union[str, int, Pattern[str]]],
+        Type["_CallbackAction"],
+        Callable[[Any], Any],
+        Callable[[Any, Any, Any, Any], Any],
+    ],
+]
 Options = Tuple[Tuple[str, OptionDict], ...]
-ReportsCallable = Callable[['Section', 'LinterStats', Optional[
-    'LinterStats']], None]
+
+
+ReportsCallable = Callable[["Section", "LinterStats", Optional["LinterStats"]], None]
 """Callable to create a report."""


-class ExtraMessageOptions(TypedDict, total=(False)):
+class ExtraMessageOptions(TypedDict, total=False):
     """All allowed keys in the extra options for message definitions."""
+
     scope: str
     old_names: list[tuple[str, str]]
     maxversion: tuple[int, int]
@@ -82,13 +125,14 @@ class ExtraMessageOptions(TypedDict, total=(False)):
     default_enabled: bool


-MessageDefinitionTuple = Union[Tuple[str, str, str], Tuple[str, str, str,
-    ExtraMessageOptions]]
-DirectoryNamespaceDict = Dict[Path, Tuple[argparse.Namespace,
-    'DirectoryNamespaceDict']]
+MessageDefinitionTuple = Union[
+    Tuple[str, str, str],
+    Tuple[str, str, str, ExtraMessageOptions],
+]
+DirectoryNamespaceDict = Dict[Path, Tuple[argparse.Namespace, "DirectoryNamespaceDict"]]


 class GetProjectCallable(Protocol):
-
-    def __call__(self, module: str, name: (str | None)='No Name') ->Project:
-        ...
+    def __call__(
+        self, module: str, name: str | None = "No Name"
+    ) -> Project: ...  # pragma: no cover
diff --git a/pylint/utils/ast_walker.py b/pylint/utils/ast_walker.py
index 8872be9e0..367a39b81 100644
--- a/pylint/utils/ast_walker.py
+++ b/pylint/utils/ast_walker.py
@@ -1,33 +1,106 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import sys
 import traceback
 from collections import defaultdict
 from collections.abc import Sequence
 from typing import TYPE_CHECKING, Callable
+
 from astroid import nodes
+
 if TYPE_CHECKING:
     from pylint.checkers.base_checker import BaseChecker
     from pylint.lint import PyLinter
+
+# Callable parameter type NodeNG not completely correct.
+# Due to contravariance of Callable parameter types,
+# it should be a Union of all NodeNG subclasses.
+# However, since the methods are only retrieved with
+# getattr(checker, member) and thus are inferred as Any,
+# NodeNG will work too.
 AstCallback = Callable[[nodes.NodeNG], None]


 class ASTWalker:
-
-    def __init__(self, linter: PyLinter) ->None:
+    def __init__(self, linter: PyLinter) -> None:
+        # callbacks per node types
         self.nbstatements = 0
-        self.visit_events: defaultdict[str, list[AstCallback]] = defaultdict(
-            list)
-        self.leave_events: defaultdict[str, list[AstCallback]] = defaultdict(
-            list)
+        self.visit_events: defaultdict[str, list[AstCallback]] = defaultdict(list)
+        self.leave_events: defaultdict[str, list[AstCallback]] = defaultdict(list)
         self.linter = linter
         self.exception_msg = False

-    def add_checker(self, checker: BaseChecker) ->None:
+    def _is_method_enabled(self, method: AstCallback) -> bool:
+        if not hasattr(method, "checks_msgs"):
+            return True
+        return any(self.linter.is_message_enabled(m) for m in method.checks_msgs)
+
+    def add_checker(self, checker: BaseChecker) -> None:
         """Walk to the checker's dir and collect visit and leave methods."""
-        pass
+        vcids: set[str] = set()
+        lcids: set[str] = set()
+        visits = self.visit_events
+        leaves = self.leave_events
+        for member in dir(checker):
+            cid = member[6:]
+            if cid == "default":
+                continue
+            if member.startswith("visit_"):
+                v_meth = getattr(checker, member)
+                # don't use visit_methods with no activated message:
+                if self._is_method_enabled(v_meth):
+                    visits[cid].append(v_meth)
+                    vcids.add(cid)
+            elif member.startswith("leave_"):
+                l_meth = getattr(checker, member)
+                # don't use leave_methods with no activated message:
+                if self._is_method_enabled(l_meth):
+                    leaves[cid].append(l_meth)
+                    lcids.add(cid)
+        visit_default = getattr(checker, "visit_default", None)
+        if visit_default:
+            for cls in nodes.ALL_NODE_CLASSES:
+                cid = cls.__name__.lower()
+                if cid not in vcids:
+                    visits[cid].append(visit_default)
+        # For now, we have no "leave_default" method in Pylint

-    def walk(self, astroid: nodes.NodeNG) ->None:
+    def walk(self, astroid: nodes.NodeNG) -> None:
         """Call visit events of astroid checkers for the given node, recurse on
         its children, then leave events.
         """
-        pass
+        cid = astroid.__class__.__name__.lower()
+
+        # Detect if the node is a new name for a deprecated alias.
+        # In this case, favour the methods for the deprecated
+        # alias if any,  in order to maintain backwards
+        # compatibility.
+        visit_events: Sequence[AstCallback] = self.visit_events.get(cid, ())
+        leave_events: Sequence[AstCallback] = self.leave_events.get(cid, ())
+
+        # pylint: disable = too-many-try-statements
+        try:
+            if astroid.is_statement:
+                self.nbstatements += 1
+            # generate events for this node on each checker
+            for callback in visit_events:
+                callback(astroid)
+            # recurse on children
+            for child in astroid.get_children():
+                self.walk(child)
+            for callback in leave_events:
+                callback(astroid)
+        except Exception:
+            if self.exception_msg is False:
+                file = getattr(astroid.root(), "file", None)
+                print(
+                    f"Exception on node {astroid!r} in file '{file}'",
+                    file=sys.stderr,
+                )
+                traceback.print_exc()
+                self.exception_msg = True
+            raise
diff --git a/pylint/utils/docs.py b/pylint/utils/docs.py
index 69b539e48..ba592c4a4 100644
--- a/pylint/utils/docs.py
+++ b/pylint/utils/docs.py
@@ -1,30 +1,96 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 """Various helper functions to create the docs of a linter object."""
+
 from __future__ import annotations
+
 import sys
 from typing import TYPE_CHECKING, Any, TextIO
+
 from pylint.constants import MAIN_CHECKER_NAME
 from pylint.utils.utils import get_rst_section, get_rst_title
+
 if TYPE_CHECKING:
     from pylint.lint.pylinter import PyLinter


-def _get_checkers_infos(linter: PyLinter) ->dict[str, dict[str, Any]]:
+def _get_checkers_infos(linter: PyLinter) -> dict[str, dict[str, Any]]:
     """Get info from a checker and handle KeyError."""
-    pass
+    by_checker: dict[str, dict[str, Any]] = {}
+    for checker in linter.get_checkers():
+        name = checker.name
+        if name != MAIN_CHECKER_NAME:
+            try:
+                by_checker[name]["checker"] = checker
+                by_checker[name]["options"] += checker._options_and_values()
+                by_checker[name]["msgs"].update(checker.msgs)
+                by_checker[name]["reports"] += checker.reports
+            except KeyError:
+                by_checker[name] = {
+                    "checker": checker,
+                    "options": list(checker._options_and_values()),
+                    "msgs": dict(checker.msgs),
+                    "reports": list(checker.reports),
+                }
+    return by_checker


-def _get_global_options_documentation(linter: PyLinter) ->str:
+def _get_global_options_documentation(linter: PyLinter) -> str:
     """Get documentation for the main checker."""
-    pass
+    result = get_rst_title("Pylint global options and switches", "-")
+    result += """
+Pylint provides global options and switches.
+
+"""
+    for checker in linter.get_checkers():
+        if checker.name == MAIN_CHECKER_NAME and checker.options:
+            for section, options in checker._options_by_section():
+                if section is None:
+                    title = "General options"
+                else:
+                    title = f"{section.capitalize()} options"
+                result += get_rst_title(title, "~")
+                assert isinstance(options, list)
+                result += f"{get_rst_section(None, options)}\n"
+    return result


-def _get_checkers_documentation(linter: PyLinter, show_options: bool=True
-    ) ->str:
+def _get_checkers_documentation(linter: PyLinter, show_options: bool = True) -> str:
     """Get documentation for individual checkers."""
-    pass
+    if show_options:
+        result = _get_global_options_documentation(linter)
+    else:
+        result = ""
+
+    result += get_rst_title("Pylint checkers' options and switches", "-")
+    result += """\
+
+Pylint checkers can provide three set of features:
+
+* options that control their execution,
+* messages that they can raise,
+* reports that they can generate.
+
+Below is a list of all checkers and their features.
+
+"""
+    by_checker = _get_checkers_infos(linter)
+    for checker_name in sorted(by_checker):
+        information = by_checker[checker_name]
+        checker = information["checker"]
+        del information["checker"]
+        result += checker.get_full_documentation(
+            **information, show_options=show_options
+        )
+    return result


-def print_full_documentation(linter: PyLinter, stream: TextIO=sys.stdout,
-    show_options: bool=True) ->None:
+def print_full_documentation(
+    linter: PyLinter, stream: TextIO = sys.stdout, show_options: bool = True
+) -> None:
     """Output a full documentation in ReST format."""
-    pass
+    print(
+        _get_checkers_documentation(linter, show_options=show_options)[:-3], file=stream
+    )
diff --git a/pylint/utils/file_state.py b/pylint/utils/file_state.py
index 91b309b26..bc2763eaa 100644
--- a/pylint/utils/file_state.py
+++ b/pylint/utils/file_state.py
@@ -1,26 +1,46 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import collections
 from collections import defaultdict
 from collections.abc import Iterator
 from typing import TYPE_CHECKING, Dict, Literal
+
 from astroid import nodes
-from pylint.constants import INCOMPATIBLE_WITH_USELESS_SUPPRESSION, MSG_STATE_SCOPE_MODULE, WarningScope
+
+from pylint.constants import (
+    INCOMPATIBLE_WITH_USELESS_SUPPRESSION,
+    MSG_STATE_SCOPE_MODULE,
+    WarningScope,
+)
+
 if TYPE_CHECKING:
     from pylint.message import MessageDefinition, MessageDefinitionStore
+
+
 MessageStateDict = Dict[str, Dict[int, bool]]


 class FileState:
     """Hold internal state specific to the currently analyzed file."""

-    def __init__(self, modname: str, msg_store: MessageDefinitionStore,
-        node: (nodes.Module | None)=None, *, is_base_filestate: bool=False
-        ) ->None:
+    def __init__(
+        self,
+        modname: str,
+        msg_store: MessageDefinitionStore,
+        node: nodes.Module | None = None,
+        *,
+        is_base_filestate: bool = False,
+    ) -> None:
         self.base_name = modname
         self._module_msgs_state: MessageStateDict = {}
         self._raw_module_msgs_state: MessageStateDict = {}
-        self._ignored_msgs: defaultdict[tuple[str, int], set[int]
-            ] = collections.defaultdict(set)
+        self._ignored_msgs: defaultdict[tuple[str, int], set[int]] = (
+            collections.defaultdict(set)
+        )
         self._suppression_mapping: dict[tuple[str, int], int] = {}
         self._module = node
         if node:
@@ -33,35 +53,202 @@ class FileState:
         PyLinter.
         """

-    def _set_state_on_block_lines(self, msgs_store: MessageDefinitionStore,
-        node: nodes.NodeNG, msg: MessageDefinition, msg_state: dict[int, bool]
-        ) ->None:
+    def _set_state_on_block_lines(
+        self,
+        msgs_store: MessageDefinitionStore,
+        node: nodes.NodeNG,
+        msg: MessageDefinition,
+        msg_state: dict[int, bool],
+    ) -> None:
         """Recursively walk (depth first) AST to collect block level options
         line numbers and set the state correctly.
         """
-        pass
+        for child in node.get_children():
+            self._set_state_on_block_lines(msgs_store, child, msg, msg_state)
+        # first child line number used to distinguish between disable
+        # which are the first child of scoped node with those defined later.
+        # For instance in the code below:
+        #
+        # 1.   def meth8(self):
+        # 2.        """test late disabling"""
+        # 3.        pylint: disable=not-callable, useless-suppression
+        # 4.        print(self.blip)
+        # 5.        pylint: disable=no-member, useless-suppression
+        # 6.        print(self.bla)
+        #
+        # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6
+        #
+        # this is necessary to disable locally messages applying to class /
+        # function using their fromlineno
+        if (
+            isinstance(node, (nodes.Module, nodes.ClassDef, nodes.FunctionDef))
+            and node.body
+        ):
+            firstchildlineno = node.body[0].fromlineno
+        else:
+            firstchildlineno = node.tolineno
+        self._set_message_state_in_block(msg, msg_state, node, firstchildlineno)

-    def _set_message_state_in_block(self, msg: MessageDefinition, lines:
-        dict[int, bool], node: nodes.NodeNG, firstchildlineno: int) ->None:
+    def _set_message_state_in_block(
+        self,
+        msg: MessageDefinition,
+        lines: dict[int, bool],
+        node: nodes.NodeNG,
+        firstchildlineno: int,
+    ) -> None:
         """Set the state of a message in a block of lines."""
-        pass
+        first = node.fromlineno
+        last = node.tolineno
+        for lineno, state in list(lines.items()):
+            original_lineno = lineno
+            if first > lineno or last < lineno:
+                continue
+            # Set state for all lines for this block, if the
+            # warning is applied to nodes.
+            if msg.scope == WarningScope.NODE:
+                if lineno > firstchildlineno:
+                    state = True
+                first_, last_ = node.block_range(lineno)
+                # pylint: disable=useless-suppression
+                # For block nodes first_ is their definition line. For example, we
+                # set the state of line zero for a module to allow disabling
+                # invalid-name for the module. For example:
+                # 1. # pylint: disable=invalid-name
+                # 2. ...
+                # OR
+                # 1. """Module docstring"""
+                # 2. # pylint: disable=invalid-name
+                # 3. ...
+                #
+                # But if we already visited line 0 we don't need to set its state again
+                # 1. # pylint: disable=invalid-name
+                # 2. # pylint: enable=invalid-name
+                # 3. ...
+                # The state should come from line 1, not from line 2
+                # Therefore, if the 'fromlineno' is already in the states we just start
+                # with the lineno we were originally visiting.
+                # pylint: enable=useless-suppression
+                if (
+                    first_ == node.fromlineno
+                    and first_ >= firstchildlineno
+                    and node.fromlineno in self._module_msgs_state.get(msg.msgid, ())
+                ):
+                    first_ = lineno
+
+            else:
+                first_ = lineno
+                last_ = last
+            for line in range(first_, last_ + 1):
+                # Do not override existing entries. This is especially important
+                # when parsing the states for a scoped node where some line-disables
+                # have already been parsed.
+                if (
+                    (
+                        isinstance(node, nodes.Module)
+                        and node.fromlineno <= line < lineno
+                    )
+                    or (
+                        not isinstance(node, nodes.Module)
+                        and node.fromlineno < line < lineno
+                    )
+                ) and line in self._module_msgs_state.get(msg.msgid, ()):
+                    continue
+                if line in lines:  # state change in the same block
+                    state = lines[line]
+                    original_lineno = line

-    def _set_message_state_on_line(self, msg: MessageDefinition, line: int,
-        state: bool, original_lineno: int) ->None:
+                self._set_message_state_on_line(msg, line, state, original_lineno)
+
+            del lines[lineno]
+
+    def _set_message_state_on_line(
+        self,
+        msg: MessageDefinition,
+        line: int,
+        state: bool,
+        original_lineno: int,
+    ) -> None:
         """Set the state of a message on a line."""
-        pass
+        # Update suppression mapping
+        if not state:
+            self._suppression_mapping[(msg.msgid, line)] = original_lineno
+        else:
+            self._suppression_mapping.pop((msg.msgid, line), None)

-    def set_msg_status(self, msg: MessageDefinition, line: int, status:
-        bool, scope: str='package') ->None:
+        # Update message state for respective line
+        try:
+            self._module_msgs_state[msg.msgid][line] = state
+        except KeyError:
+            self._module_msgs_state[msg.msgid] = {line: state}
+
+    def set_msg_status(
+        self,
+        msg: MessageDefinition,
+        line: int,
+        status: bool,
+        scope: str = "package",
+    ) -> None:
         """Set status (enabled/disable) for a given message at a given line."""
-        pass
+        assert line > 0
+        if scope != "line":
+            # Expand the status to cover all relevant block lines
+            self._set_state_on_block_lines(
+                self._msgs_store, self._module, msg, {line: status}
+            )
+        else:
+            self._set_message_state_on_line(msg, line, status, line)

-    def handle_ignored_message(self, state_scope: (Literal[0, 1, 2] | None),
-        msgid: str, line: (int | None)) ->None:
+        # Store the raw value
+        try:
+            self._raw_module_msgs_state[msg.msgid][line] = status
+        except KeyError:
+            self._raw_module_msgs_state[msg.msgid] = {line: status}
+
+    def handle_ignored_message(
+        self, state_scope: Literal[0, 1, 2] | None, msgid: str, line: int | None
+    ) -> None:
         """Report an ignored message.

         state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
         depending on whether the message was disabled locally in the module,
         or globally.
         """
-        pass
+        if state_scope == MSG_STATE_SCOPE_MODULE:
+            assert isinstance(line, int)  # should always be int inside module scope
+
+            try:
+                orig_line = self._suppression_mapping[(msgid, line)]
+                self._ignored_msgs[(msgid, orig_line)].add(line)
+            except KeyError:
+                pass
+
+    def iter_spurious_suppression_messages(
+        self,
+        msgs_store: MessageDefinitionStore,
+    ) -> Iterator[
+        tuple[
+            Literal["useless-suppression", "suppressed-message"],
+            int,
+            tuple[str] | tuple[str, int],
+        ]
+    ]:
+        for warning, lines in self._raw_module_msgs_state.items():
+            for line, enable in lines.items():
+                if (
+                    not enable
+                    and (warning, line) not in self._ignored_msgs
+                    and warning not in INCOMPATIBLE_WITH_USELESS_SUPPRESSION
+                ):
+                    yield "useless-suppression", line, (
+                        msgs_store.get_msg_display_string(warning),
+                    )
+        # don't use iteritems here, _ignored_msgs may be modified by add_message
+        for (warning, from_), ignored_lines in list(self._ignored_msgs.items()):
+            for line in ignored_lines:
+                yield "suppressed-message", line, (
+                    msgs_store.get_msg_display_string(warning),
+                    from_,
+                )
+
+    def get_effective_max_line_number(self) -> int | None:
+        return self._effective_max_line_number  # type: ignore[no-any-return]
diff --git a/pylint/utils/linterstats.py b/pylint/utils/linterstats.py
index 3950c7b35..53afbcfe2 100644
--- a/pylint/utils/linterstats.py
+++ b/pylint/utils/linterstats.py
@@ -1,10 +1,17 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 from typing import Literal, TypedDict, cast
+
 from pylint.typing import MessageTypesFullName


 class BadNames(TypedDict):
     """TypedDict to store counts of node types with bad names."""
+
     argument: int
     attr: int
     klass: int
@@ -22,6 +29,7 @@ class BadNames(TypedDict):

 class CodeTypeCount(TypedDict):
     """TypedDict to store counts of lines of code types."""
+
     code: int
     comment: int
     docstring: int
@@ -31,12 +39,14 @@ class CodeTypeCount(TypedDict):

 class DuplicatedLines(TypedDict):
     """TypedDict to store counts of lines of duplicated code."""
+
     nb_duplicated_lines: int
     percent_duplicated_lines: float


 class NodeCount(TypedDict):
     """TypedDict to store counts of different types of nodes."""
+
     function: int
     klass: int
     method: int
@@ -45,6 +55,7 @@ class NodeCount(TypedDict):

 class UndocumentedNodes(TypedDict):
     """TypedDict to store counts of undocumented node types."""
+
     function: int
     klass: int
     method: int
@@ -53,6 +64,7 @@ class UndocumentedNodes(TypedDict):

 class ModuleStats(TypedDict):
     """TypedDict to store counts of types of messages and statements."""
+
     convention: int
     error: int
     fatal: int
@@ -62,29 +74,53 @@ class ModuleStats(TypedDict):
     warning: int


+# pylint: disable-next=too-many-instance-attributes
 class LinterStats:
     """Class used to linter stats."""

-    def __init__(self, bad_names: (BadNames | None)=None, by_module: (dict[
-        str, ModuleStats] | None)=None, by_msg: (dict[str, int] | None)=
-        None, code_type_count: (CodeTypeCount | None)=None, dependencies: (
-        dict[str, set[str]] | None)=None, duplicated_lines: (
-        DuplicatedLines | None)=None, node_count: (NodeCount | None)=None,
-        undocumented: (UndocumentedNodes | None)=None) ->None:
-        self.bad_names = bad_names or BadNames(argument=0, attr=0, klass=0,
-            class_attribute=0, class_const=0, const=0, inlinevar=0,
-            function=0, method=0, module=0, variable=0, typevar=0, typealias=0)
+    def __init__(
+        self,
+        bad_names: BadNames | None = None,
+        by_module: dict[str, ModuleStats] | None = None,
+        by_msg: dict[str, int] | None = None,
+        code_type_count: CodeTypeCount | None = None,
+        dependencies: dict[str, set[str]] | None = None,
+        duplicated_lines: DuplicatedLines | None = None,
+        node_count: NodeCount | None = None,
+        undocumented: UndocumentedNodes | None = None,
+    ) -> None:
+        self.bad_names = bad_names or BadNames(
+            argument=0,
+            attr=0,
+            klass=0,
+            class_attribute=0,
+            class_const=0,
+            const=0,
+            inlinevar=0,
+            function=0,
+            method=0,
+            module=0,
+            variable=0,
+            typevar=0,
+            typealias=0,
+        )
         self.by_module: dict[str, ModuleStats] = by_module or {}
         self.by_msg: dict[str, int] = by_msg or {}
-        self.code_type_count = code_type_count or CodeTypeCount(code=0,
-            comment=0, docstring=0, empty=0, total=0)
+        self.code_type_count = code_type_count or CodeTypeCount(
+            code=0, comment=0, docstring=0, empty=0, total=0
+        )
+
         self.dependencies: dict[str, set[str]] = dependencies or {}
         self.duplicated_lines = duplicated_lines or DuplicatedLines(
-            nb_duplicated_lines=0, percent_duplicated_lines=0.0)
-        self.node_count = node_count or NodeCount(function=0, klass=0,
-            method=0, module=0)
-        self.undocumented = undocumented or UndocumentedNodes(function=0,
-            klass=0, method=0, module=0)
+            nb_duplicated_lines=0, percent_duplicated_lines=0.0
+        )
+        self.node_count = node_count or NodeCount(
+            function=0, klass=0, method=0, module=0
+        )
+        self.undocumented = undocumented or UndocumentedNodes(
+            function=0, klass=0, method=0, module=0
+        )
+
         self.convention = 0
         self.error = 0
         self.fatal = 0
@@ -92,14 +128,15 @@ class LinterStats:
         self.refactor = 0
         self.statement = 0
         self.warning = 0
+
         self.global_note = 0
         self.nb_duplicated_lines = 0
         self.percent_duplicated_lines = 0.0

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         return str(self)

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         return f"""{self.bad_names}
         {sorted(self.by_module.items())}
         {sorted(self.by_msg.items())}
@@ -118,85 +155,236 @@ class LinterStats:
         {self.nb_duplicated_lines}
         {self.percent_duplicated_lines}"""

-    def init_single_module(self, module_name: str) ->None:
+    def init_single_module(self, module_name: str) -> None:
         """Use through PyLinter.set_current_module so PyLinter.current_name is
         consistent.
         """
-        pass
-
-    def get_bad_names(self, node_name: Literal['argument', 'attr', 'class',
-        'class_attribute', 'class_const', 'const', 'inlinevar', 'function',
-        'method', 'module', 'variable', 'typevar', 'typealias']) ->int:
+        self.by_module[module_name] = ModuleStats(
+            convention=0, error=0, fatal=0, info=0, refactor=0, statement=0, warning=0
+        )
+
+    def get_bad_names(
+        self,
+        node_name: Literal[
+            "argument",
+            "attr",
+            "class",
+            "class_attribute",
+            "class_const",
+            "const",
+            "inlinevar",
+            "function",
+            "method",
+            "module",
+            "variable",
+            "typevar",
+            "typealias",
+        ],
+    ) -> int:
         """Get a bad names node count."""
-        pass
+        if node_name == "class":
+            return self.bad_names.get("klass", 0)
+        return self.bad_names.get(node_name, 0)

-    def increase_bad_name(self, node_name: str, increase: int) ->None:
+    def increase_bad_name(self, node_name: str, increase: int) -> None:
         """Increase a bad names node count."""
-        pass
-
-    def reset_bad_names(self) ->None:
+        if node_name not in {
+            "argument",
+            "attr",
+            "class",
+            "class_attribute",
+            "class_const",
+            "const",
+            "inlinevar",
+            "function",
+            "method",
+            "module",
+            "variable",
+            "typevar",
+            "typealias",
+        }:
+            raise ValueError("Node type not part of the bad_names stat")
+
+        node_name = cast(
+            Literal[
+                "argument",
+                "attr",
+                "class",
+                "class_attribute",
+                "class_const",
+                "const",
+                "inlinevar",
+                "function",
+                "method",
+                "module",
+                "variable",
+                "typevar",
+                "typealias",
+            ],
+            node_name,
+        )
+        if node_name == "class":
+            self.bad_names["klass"] += increase
+        else:
+            self.bad_names[node_name] += increase
+
+    def reset_bad_names(self) -> None:
         """Resets the bad_names attribute."""
-        pass
-
-    def get_code_count(self, type_name: Literal['code', 'comment',
-        'docstring', 'empty', 'total']) ->int:
+        self.bad_names = BadNames(
+            argument=0,
+            attr=0,
+            klass=0,
+            class_attribute=0,
+            class_const=0,
+            const=0,
+            inlinevar=0,
+            function=0,
+            method=0,
+            module=0,
+            variable=0,
+            typevar=0,
+            typealias=0,
+        )
+
+    def get_code_count(
+        self, type_name: Literal["code", "comment", "docstring", "empty", "total"]
+    ) -> int:
         """Get a code type count."""
-        pass
+        return self.code_type_count.get(type_name, 0)

-    def reset_code_count(self) ->None:
+    def reset_code_count(self) -> None:
         """Resets the code_type_count attribute."""
-        pass
+        self.code_type_count = CodeTypeCount(
+            code=0, comment=0, docstring=0, empty=0, total=0
+        )

-    def reset_duplicated_lines(self) ->None:
+    def reset_duplicated_lines(self) -> None:
         """Resets the duplicated_lines attribute."""
-        pass
+        self.duplicated_lines = DuplicatedLines(
+            nb_duplicated_lines=0, percent_duplicated_lines=0.0
+        )

-    def get_node_count(self, node_name: Literal['function', 'class',
-        'method', 'module']) ->int:
+    def get_node_count(
+        self, node_name: Literal["function", "class", "method", "module"]
+    ) -> int:
         """Get a node count while handling some extra conditions."""
-        pass
+        if node_name == "class":
+            return self.node_count.get("klass", 0)
+        return self.node_count.get(node_name, 0)

-    def reset_node_count(self) ->None:
+    def reset_node_count(self) -> None:
         """Resets the node count attribute."""
-        pass
+        self.node_count = NodeCount(function=0, klass=0, method=0, module=0)

-    def get_undocumented(self, node_name: Literal['function', 'class',
-        'method', 'module']) ->float:
+    def get_undocumented(
+        self, node_name: Literal["function", "class", "method", "module"]
+    ) -> float:
         """Get a undocumented node count."""
-        pass
+        if node_name == "class":
+            return self.undocumented["klass"]
+        return self.undocumented[node_name]

-    def reset_undocumented(self) ->None:
+    def reset_undocumented(self) -> None:
         """Resets the undocumented attribute."""
-        pass
+        self.undocumented = UndocumentedNodes(function=0, klass=0, method=0, module=0)

-    def get_global_message_count(self, type_name: str) ->int:
+    def get_global_message_count(self, type_name: str) -> int:
         """Get a global message count."""
-        pass
+        return getattr(self, type_name, 0)

-    def get_module_message_count(self, modname: str, type_name:
-        MessageTypesFullName) ->int:
+    def get_module_message_count(
+        self, modname: str, type_name: MessageTypesFullName
+    ) -> int:
         """Get a module message count."""
-        pass
+        return self.by_module[modname].get(type_name, 0)

-    def increase_single_message_count(self, type_name: str, increase: int
-        ) ->None:
+    def increase_single_message_count(self, type_name: str, increase: int) -> None:
         """Increase the message type count of an individual message type."""
-        pass
+        setattr(self, type_name, getattr(self, type_name) + increase)

-    def increase_single_module_message_count(self, modname: str, type_name:
-        MessageTypesFullName, increase: int) ->None:
+    def increase_single_module_message_count(
+        self, modname: str, type_name: MessageTypesFullName, increase: int
+    ) -> None:
         """Increase the message type count of an individual message type of a
         module.
         """
-        pass
+        self.by_module[modname][type_name] += increase

-    def reset_message_count(self) ->None:
+    def reset_message_count(self) -> None:
         """Resets the message type count of the stats object."""
-        pass
+        self.convention = 0
+        self.error = 0
+        self.fatal = 0
+        self.info = 0
+        self.refactor = 0
+        self.warning = 0


-def merge_stats(stats: list[LinterStats]) ->LinterStats:
+def merge_stats(stats: list[LinterStats]) -> LinterStats:
     """Used to merge multiple stats objects into a new one when pylint is run in
     parallel mode.
     """
-    pass
+    merged = LinterStats()
+    for stat in stats:
+        merged.bad_names["argument"] += stat.bad_names["argument"]
+        merged.bad_names["attr"] += stat.bad_names["attr"]
+        merged.bad_names["klass"] += stat.bad_names["klass"]
+        merged.bad_names["class_attribute"] += stat.bad_names["class_attribute"]
+        merged.bad_names["class_const"] += stat.bad_names["class_const"]
+        merged.bad_names["const"] += stat.bad_names["const"]
+        merged.bad_names["inlinevar"] += stat.bad_names["inlinevar"]
+        merged.bad_names["function"] += stat.bad_names["function"]
+        merged.bad_names["method"] += stat.bad_names["method"]
+        merged.bad_names["module"] += stat.bad_names["module"]
+        merged.bad_names["variable"] += stat.bad_names["variable"]
+        merged.bad_names["typevar"] += stat.bad_names["typevar"]
+        merged.bad_names["typealias"] += stat.bad_names["typealias"]
+
+        for mod_key, mod_value in stat.by_module.items():
+            merged.by_module[mod_key] = mod_value
+
+        for msg_key, msg_value in stat.by_msg.items():
+            try:
+                merged.by_msg[msg_key] += msg_value
+            except KeyError:
+                merged.by_msg[msg_key] = msg_value
+
+        merged.code_type_count["code"] += stat.code_type_count["code"]
+        merged.code_type_count["comment"] += stat.code_type_count["comment"]
+        merged.code_type_count["docstring"] += stat.code_type_count["docstring"]
+        merged.code_type_count["empty"] += stat.code_type_count["empty"]
+        merged.code_type_count["total"] += stat.code_type_count["total"]
+
+        for dep_key, dep_value in stat.dependencies.items():
+            try:
+                merged.dependencies[dep_key].update(dep_value)
+            except KeyError:
+                merged.dependencies[dep_key] = dep_value
+
+        merged.duplicated_lines["nb_duplicated_lines"] += stat.duplicated_lines[
+            "nb_duplicated_lines"
+        ]
+        merged.duplicated_lines["percent_duplicated_lines"] += stat.duplicated_lines[
+            "percent_duplicated_lines"
+        ]
+
+        merged.node_count["function"] += stat.node_count["function"]
+        merged.node_count["klass"] += stat.node_count["klass"]
+        merged.node_count["method"] += stat.node_count["method"]
+        merged.node_count["module"] += stat.node_count["module"]
+
+        merged.undocumented["function"] += stat.undocumented["function"]
+        merged.undocumented["klass"] += stat.undocumented["klass"]
+        merged.undocumented["method"] += stat.undocumented["method"]
+        merged.undocumented["module"] += stat.undocumented["module"]
+
+        merged.convention += stat.convention
+        merged.error += stat.error
+        merged.fatal += stat.fatal
+        merged.info += stat.info
+        merged.refactor += stat.refactor
+        merged.statement += stat.statement
+        merged.warning += stat.warning
+
+        merged.global_note += stat.global_note
+    return merged
diff --git a/pylint/utils/pragma_parser.py b/pylint/utils/pragma_parser.py
index d42e18922..12513e284 100644
--- a/pylint/utils/pragma_parser.py
+++ b/pylint/utils/pragma_parser.py
@@ -1,16 +1,25 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 import re
 from collections.abc import Generator
 from typing import NamedTuple
-OPTION_RGX = """
-    (?:^\\s*\\#.*|\\s*|               # Comment line, or whitespaces,
-       \\s*\\#.*(?=\\#.*?\\bpylint:))  # or a beginning of an inline comment
+
+# Allow stopping after the first semicolon/hash encountered,
+# so that an option can be continued with the reasons
+# why it is active or disabled.
+OPTION_RGX = r"""
+    (?:^\s*\#.*|\s*|               # Comment line, or whitespaces,
+       \s*\#.*(?=\#.*?\bpylint:))  # or a beginning of an inline comment
                                    # followed by "pylint:" pragma
-    (\\#                            # Beginning of comment
+    (\#                            # Beginning of comment
     .*?                            # Anything (as little as possible)
-    \\bpylint:                      # pylint word and column
-    \\s*                            # Any number of whitespaces
-    ([^;#\\n]+))                    # Anything except semicolon or hash or
+    \bpylint:                      # pylint word and column
+    \s*                            # Any number of whitespaces
+    ([^;#\n]+))                    # Anything except semicolon or hash or
                                    # newline (it is the second matched group)
                                    # and end of the first matched group
     [;#]{0,1}                      # From 0 to 1 repetition of semicolon or hash
@@ -23,22 +32,44 @@ class PragmaRepresenter(NamedTuple):
     messages: list[str]


-ATOMIC_KEYWORDS = frozenset(('disable-all', 'skip-file'))
-MESSAGE_KEYWORDS = frozenset(('disable-next', 'disable-msg', 'enable-msg',
-    'disable', 'enable'))
-ALL_KEYWORDS = '|'.join(sorted(ATOMIC_KEYWORDS | MESSAGE_KEYWORDS, key=len,
-    reverse=True))
-TOKEN_SPECIFICATION = [('KEYWORD', f'\\b({ALL_KEYWORDS:s})\\b'), (
-    'MESSAGE_STRING', '[0-9A-Za-z\\-\\_]{2,}'), ('ASSIGN', '='), (
-    'MESSAGE_NUMBER', '[CREIWF]{1}\\d*')]
-TOK_REGEX = '|'.join(f'(?P<{token_name:s}>{token_rgx:s})' for token_name,
-    token_rgx in TOKEN_SPECIFICATION)
+ATOMIC_KEYWORDS = frozenset(("disable-all", "skip-file"))
+MESSAGE_KEYWORDS = frozenset(
+    ("disable-next", "disable-msg", "enable-msg", "disable", "enable")
+)
+# sorted is necessary because sets are unordered collections and ALL_KEYWORDS
+# string should not vary between executions
+# reverse is necessary in order to have the longest keywords first, so that, for example,
+# 'disable' string should not be matched instead of 'disable-all'
+ALL_KEYWORDS = "|".join(
+    sorted(ATOMIC_KEYWORDS | MESSAGE_KEYWORDS, key=len, reverse=True)
+)
+
+
+TOKEN_SPECIFICATION = [
+    ("KEYWORD", rf"\b({ALL_KEYWORDS:s})\b"),
+    ("MESSAGE_STRING", r"[0-9A-Za-z\-\_]{2,}"),  # Identifiers
+    ("ASSIGN", r"="),  # Assignment operator
+    ("MESSAGE_NUMBER", r"[CREIWF]{1}\d*"),
+]
+
+TOK_REGEX = "|".join(
+    f"(?P<{token_name:s}>{token_rgx:s})"
+    for token_name, token_rgx in TOKEN_SPECIFICATION
+)
+
+
+def emit_pragma_representer(action: str, messages: list[str]) -> PragmaRepresenter:
+    if not messages and action in MESSAGE_KEYWORDS:
+        raise InvalidPragmaError(
+            "The keyword is not followed by message identifier", action
+        )
+    return PragmaRepresenter(action, messages)


 class PragmaParserError(Exception):
     """A class for exceptions thrown by pragma_parser module."""

-    def __init__(self, message: str, token: str) ->None:
+    def __init__(self, message: str, token: str) -> None:
         """:args message: explain the reason why the exception has been thrown
         :args token: token concerned by the exception.
         """
@@ -53,3 +84,52 @@ class UnRecognizedOptionError(PragmaParserError):

 class InvalidPragmaError(PragmaParserError):
     """Thrown in case the pragma is invalid."""
+
+
+def parse_pragma(pylint_pragma: str) -> Generator[PragmaRepresenter, None, None]:
+    action: str | None = None
+    messages: list[str] = []
+    assignment_required = False
+    previous_token = ""
+
+    for mo in re.finditer(TOK_REGEX, pylint_pragma):
+        kind = mo.lastgroup
+        value = mo.group()
+
+        if kind == "ASSIGN":
+            if not assignment_required:
+                if action:
+                    # A keyword has been found previously but doesn't support assignment
+                    raise UnRecognizedOptionError(
+                        "The keyword doesn't support assignment", action
+                    )
+                if previous_token:
+                    # Something found previously but not a known keyword
+                    raise UnRecognizedOptionError(
+                        "The keyword is unknown", previous_token
+                    )
+                # Nothing at all detected before this assignment
+                raise InvalidPragmaError("Missing keyword before assignment", "")
+            assignment_required = False
+        elif assignment_required:
+            raise InvalidPragmaError(
+                "The = sign is missing after the keyword", action or ""
+            )
+        elif kind == "KEYWORD":
+            if action:
+                yield emit_pragma_representer(action, messages)
+            action = value
+            messages = []
+            assignment_required = action in MESSAGE_KEYWORDS
+        elif kind in {"MESSAGE_STRING", "MESSAGE_NUMBER"}:
+            messages.append(value)
+            assignment_required = False
+        else:
+            raise RuntimeError("Token not recognized")
+
+        previous_token = value
+
+    if action:
+        yield emit_pragma_representer(action, messages)
+    else:
+        raise UnRecognizedOptionError("The keyword is unknown", previous_token)
diff --git a/pylint/utils/utils.py b/pylint/utils/utils.py
index 12fbd9948..73e9e6a5f 100644
--- a/pylint/utils/utils.py
+++ b/pylint/utils/utils.py
@@ -1,11 +1,19 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
+# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
+
 from __future__ import annotations
+
 try:
     import isort.api
     import isort.settings
+
     HAS_ISORT_5 = True
-except ImportError:
+except ImportError:  # isort < 5
     import isort
+
     HAS_ISORT_5 = False
+
 import argparse
 import codecs
 import os
@@ -17,77 +25,199 @@ import warnings
 from collections import deque
 from collections.abc import Iterable, Sequence
 from io import BufferedReader, BytesIO
-from typing import TYPE_CHECKING, Any, List, Literal, Pattern, TextIO, Tuple, TypeVar, Union
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    List,
+    Literal,
+    Pattern,
+    TextIO,
+    Tuple,
+    TypeVar,
+    Union,
+)
+
 from astroid import Module, modutils, nodes
+
 from pylint.constants import PY_EXTS
 from pylint.typing import OptionDict
+
 if TYPE_CHECKING:
     from pylint.lint import PyLinter
+
 DEFAULT_LINE_LENGTH = 79
-GLOBAL_OPTION_BOOL = Literal['suggestion-mode', 'analyse-fallback-blocks',
-    'allow-global-unused-variables', 'prefer-stubs']
-GLOBAL_OPTION_INT = Literal['max-line-length', 'docstring-min-length']
-GLOBAL_OPTION_LIST = Literal['ignored-modules']
-GLOBAL_OPTION_PATTERN = Literal['no-docstring-rgx', 'dummy-variables-rgx',
-    'ignored-argument-names', 'mixin-class-rgx']
-GLOBAL_OPTION_PATTERN_LIST = Literal['exclude-too-few-public-methods',
-    'ignore-paths']
-GLOBAL_OPTION_TUPLE_INT = Literal['py-version']
-GLOBAL_OPTION_NAMES = Union[GLOBAL_OPTION_BOOL, GLOBAL_OPTION_INT,
-    GLOBAL_OPTION_LIST, GLOBAL_OPTION_PATTERN, GLOBAL_OPTION_PATTERN_LIST,
-    GLOBAL_OPTION_TUPLE_INT]
-T_GlobalOptionReturnTypes = TypeVar('T_GlobalOptionReturnTypes', bool, int,
-    List[str], Pattern[str], List[Pattern[str]], Tuple[int, ...])
-
-
-def normalize_text(text: str, line_len: int=DEFAULT_LINE_LENGTH, indent: str=''
-    ) ->str:
+
+# These are types used to overload get_global_option() and refer to the options type
+GLOBAL_OPTION_BOOL = Literal[
+    "suggestion-mode",
+    "analyse-fallback-blocks",
+    "allow-global-unused-variables",
+    "prefer-stubs",
+]
+GLOBAL_OPTION_INT = Literal["max-line-length", "docstring-min-length"]
+GLOBAL_OPTION_LIST = Literal["ignored-modules"]
+GLOBAL_OPTION_PATTERN = Literal[
+    "no-docstring-rgx",
+    "dummy-variables-rgx",
+    "ignored-argument-names",
+    "mixin-class-rgx",
+]
+GLOBAL_OPTION_PATTERN_LIST = Literal["exclude-too-few-public-methods", "ignore-paths"]
+GLOBAL_OPTION_TUPLE_INT = Literal["py-version"]
+GLOBAL_OPTION_NAMES = Union[
+    GLOBAL_OPTION_BOOL,
+    GLOBAL_OPTION_INT,
+    GLOBAL_OPTION_LIST,
+    GLOBAL_OPTION_PATTERN,
+    GLOBAL_OPTION_PATTERN_LIST,
+    GLOBAL_OPTION_TUPLE_INT,
+]
+T_GlobalOptionReturnTypes = TypeVar(
+    "T_GlobalOptionReturnTypes",
+    bool,
+    int,
+    List[str],
+    Pattern[str],
+    List[Pattern[str]],
+    Tuple[int, ...],
+)
+
+
+def normalize_text(
+    text: str, line_len: int = DEFAULT_LINE_LENGTH, indent: str = ""
+) -> str:
     """Wrap the text on the given line length."""
-    pass
+    return "\n".join(
+        textwrap.wrap(
+            text, width=line_len, initial_indent=indent, subsequent_indent=indent
+        )
+    )
+

+CMPS = ["=", "-", "+"]

-CMPS = ['=', '-', '+']

+# py3k has no more cmp builtin
+def cmp(a: float, b: float) -> int:
+    return (a > b) - (a < b)

-def diff_string(old: float, new: float) ->str:
+
+def diff_string(old: float, new: float) -> str:
     """Given an old and new value, return a string representing the difference."""
-    pass
+    diff = abs(old - new)
+    diff_str = f"{CMPS[cmp(old, new)]}{diff and f'{diff:.2f}' or ''}"
+    return diff_str


-def get_module_and_frameid(node: nodes.NodeNG) ->tuple[str, str]:
+def get_module_and_frameid(node: nodes.NodeNG) -> tuple[str, str]:
     """Return the module name and the frame id in the module."""
-    pass
+    frame = node.frame()
+    module, obj = "", []
+    while frame:
+        if isinstance(frame, Module):
+            module = frame.name
+        else:
+            obj.append(getattr(frame, "name", "<lambda>"))
+        try:
+            frame = frame.parent.frame()
+        except AttributeError:
+            break
+    obj.reverse()
+    return module, ".".join(obj)


-def get_rst_title(title: str, character: str) ->str:
+def get_rst_title(title: str, character: str) -> str:
     """Permit to get a title formatted as ReStructuredText test (underlined with a
     chosen character).
     """
-    pass
+    return f"{title}\n{character * len(title)}\n"


-def get_rst_section(section: (str | None), options: list[tuple[str,
-    OptionDict, Any]], doc: (str | None)=None) ->str:
+def get_rst_section(
+    section: str | None,
+    options: list[tuple[str, OptionDict, Any]],
+    doc: str | None = None,
+) -> str:
     """Format an option's section using as a ReStructuredText formatted output."""
-    pass
-
-
-def register_plugins(linter: PyLinter, directory: str) ->None:
+    result = ""
+    if section:
+        result += get_rst_title(section, "'")
+    if doc:
+        formatted_doc = normalize_text(doc)
+        result += f"{formatted_doc}\n\n"
+    for optname, optdict, value in options:
+        help_opt = optdict.get("help")
+        result += f":{optname}:\n"
+        if help_opt:
+            assert isinstance(help_opt, str)
+            formatted_help = normalize_text(help_opt, indent="  ")
+            result += f"{formatted_help}\n"
+        if value and optname != "py-version":
+            value = str(_format_option_value(optdict, value))
+            result += f"\n  Default: ``{value.replace('`` ', '```` ``')}``\n"
+    return result
+
+
+def decoding_stream(
+    stream: BufferedReader | BytesIO,
+    encoding: str,
+    errors: Literal["strict"] = "strict",
+) -> codecs.StreamReader:
+    try:
+        reader_cls = codecs.getreader(encoding or sys.getdefaultencoding())
+    except LookupError:
+        reader_cls = codecs.getreader(sys.getdefaultencoding())
+    return reader_cls(stream, errors)
+
+
+def tokenize_module(node: nodes.Module) -> list[tokenize.TokenInfo]:
+    with node.stream() as stream:
+        readline = stream.readline
+        return list(tokenize.tokenize(readline))
+
+
+def register_plugins(linter: PyLinter, directory: str) -> None:
     """Load all module and package in the given directory, looking for a
     'register' function in each one, used to register pylint checkers.
     """
-    pass
-
-
-def _splitstrip(string: str, sep: str=',') ->list[str]:
-    """Return a list of stripped string by splitting the string given as
+    imported = {}
+    for filename in os.listdir(directory):
+        base, extension = os.path.splitext(filename)
+        if base in imported or base == "__pycache__":
+            continue
+        if (
+            extension in PY_EXTS
+            and base != "__init__"
+            or (
+                not extension
+                and os.path.isdir(os.path.join(directory, base))
+                and not filename.startswith(".")
+            )
+        ):
+            try:
+                module = modutils.load_module_from_file(
+                    os.path.join(directory, filename)
+                )
+            except ValueError:
+                # empty module name (usually Emacs auto-save files)
+                continue
+            except ImportError as exc:
+                print(f"Problem importing module {filename}: {exc}", file=sys.stderr)
+            else:
+                if hasattr(module, "register"):
+                    module.register(linter)
+                    imported[base] = 1
+
+
+def _splitstrip(string: str, sep: str = ",") -> list[str]:
+    r"""Return a list of stripped string by splitting the string given as
     argument on `sep` (',' by default), empty strings are discarded.

     >>> _splitstrip('a, b, c   ,  4,,')
     ['a', 'b', 'c', '4']
     >>> _splitstrip('a')
     ['a']
-    >>> _splitstrip('a,\\nb,\\nc,')
+    >>> _splitstrip('a,\nb,\nc,')
     ['a', 'b', 'c']

     :type string: str or unicode
@@ -99,59 +229,160 @@ def _splitstrip(string: str, sep: str=',') ->list[str]:
     :rtype: str or unicode
     :return: the unquoted string (or the input string if it wasn't quoted)
     """
-    pass
+    return [word.strip() for word in string.split(sep) if word.strip()]


-def _unquote(string: str) ->str:
+def _unquote(string: str) -> str:
     """Remove optional quotes (simple or double) from the string.

     :param string: an optionally quoted string
     :return: the unquoted string (or the input string if it wasn't quoted)
     """
-    pass
+    if not string:
+        return string
+    if string[0] in "\"'":
+        string = string[1:]
+    if string[-1] in "\"'":
+        string = string[:-1]
+    return string


-def _check_regexp_csv(value: (list[str] | tuple[str] | str)) ->Iterable[str]:
-    """Split a comma-separated list of regexps, taking care to avoid splitting
-    a regex employing a comma as quantifier, as in `\\d{1,2}`.
-    """
-    pass
+def _check_csv(value: list[str] | tuple[str] | str) -> Sequence[str]:
+    if isinstance(value, (list, tuple)):
+        return value
+    return _splitstrip(value)


-def _comment(string: str) ->str:
+def _check_regexp_csv(value: list[str] | tuple[str] | str) -> Iterable[str]:
+    r"""Split a comma-separated list of regexps, taking care to avoid splitting
+    a regex employing a comma as quantifier, as in `\d{1,2}`.
+    """
+    if isinstance(value, (list, tuple)):
+        yield from value
+    else:
+        # None is a sentinel value here
+        regexps: deque[deque[str] | None] = deque([None])
+        open_braces = False
+        for char in value:
+            if char == "{":
+                open_braces = True
+            elif char == "}" and open_braces:
+                open_braces = False
+
+            if char == "," and not open_braces:
+                regexps.append(None)
+            elif regexps[-1] is None:
+                regexps.pop()
+                regexps.append(deque([char]))
+            else:
+                regexps[-1].append(char)
+        yield from ("".join(regexp).strip() for regexp in regexps if regexp is not None)
+
+
+def _comment(string: str) -> str:
     """Return string as a comment."""
-    pass
+    lines = [line.strip() for line in string.splitlines()]
+    sep = "\n"
+    return "# " + f"{sep}# ".join(lines)


-def _format_option_value(optdict: OptionDict, value: Any) ->str:
+def _format_option_value(optdict: OptionDict, value: Any) -> str:
     """Return the user input's value from a 'compiled' value.

     TODO: Refactor the code to not use this deprecated function
     """
-    pass
-
-
-def format_section(stream: TextIO, section: str, options: list[tuple[str,
-    OptionDict, Any]], doc: (str | None)=None) ->None:
+    if optdict.get("type", None) == "py_version":
+        value = ".".join(str(item) for item in value)
+    elif isinstance(value, (list, tuple)):
+        value = ",".join(_format_option_value(optdict, item) for item in value)
+    elif isinstance(value, dict):
+        value = ",".join(f"{k}:{v}" for k, v in value.items())
+    elif hasattr(value, "match"):  # optdict.get('type') == 'regexp'
+        # compiled regexp
+        value = value.pattern
+    elif optdict.get("type") == "yn":
+        value = "yes" if value else "no"
+    elif isinstance(value, str) and value.isspace():
+        value = f"'{value}'"
+    return str(value)
+
+
+def format_section(
+    stream: TextIO,
+    section: str,
+    options: list[tuple[str, OptionDict, Any]],
+    doc: str | None = None,
+) -> None:
     """Format an option's section using the INI format."""
-    pass
-
-
-def _ini_format(stream: TextIO, options: list[tuple[str, OptionDict, Any]]
-    ) ->None:
+    warnings.warn(
+        "format_section has been deprecated. It will be removed in pylint 4.0.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    if doc:
+        print(_comment(doc), file=stream)
+    print(f"[{section}]", file=stream)
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=DeprecationWarning)
+        _ini_format(stream, options)
+
+
+def _ini_format(stream: TextIO, options: list[tuple[str, OptionDict, Any]]) -> None:
     """Format options using the INI format."""
-    pass
+    warnings.warn(
+        "_ini_format has been deprecated. It will be removed in pylint 4.0.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    for optname, optdict, value in options:
+        # Skip deprecated option
+        if "kwargs" in optdict:
+            assert isinstance(optdict["kwargs"], dict)
+            if "new_names" in optdict["kwargs"]:
+                continue
+        value = _format_option_value(optdict, value)
+        help_opt = optdict.get("help")
+        if help_opt:
+            assert isinstance(help_opt, str)
+            help_opt = normalize_text(help_opt, indent="# ")
+            print(file=stream)
+            print(help_opt, file=stream)
+        else:
+            print(file=stream)
+        if value in {"None", "False"}:
+            print(f"#{optname}=", file=stream)
+        else:
+            value = str(value).strip()
+            if re.match(r"^([\w-]+,)+[\w-]+$", str(value)):
+                separator = "\n " + " " * len(optname)
+                value = separator.join(x + "," for x in str(value).split(","))
+                # remove trailing ',' from last element of the list
+                value = value[:-1]
+            print(f"{optname}={value}", file=stream)


 class IsortDriver:
     """A wrapper around isort API that changed between versions 4 and 5."""

-    def __init__(self, config: argparse.Namespace) ->None:
+    def __init__(self, config: argparse.Namespace) -> None:
         if HAS_ISORT_5:
-            self.isort5_config = isort.settings.Config(extra_standard_library
-                =config.known_standard_library, known_third_party=config.
-                known_third_party)
+            self.isort5_config = isort.settings.Config(
+                # There is no typo here. EXTRA_standard_library is
+                # what most users want. The option has been named
+                # KNOWN_standard_library for ages in pylint, and we
+                # don't want to break compatibility.
+                extra_standard_library=config.known_standard_library,
+                known_third_party=config.known_third_party,
+            )
         else:
-            self.isort4_obj = isort.SortImports(file_contents='',
+            # pylint: disable-next=no-member
+            self.isort4_obj = isort.SortImports(  # type: ignore[attr-defined]
+                file_contents="",
                 known_standard_library=config.known_standard_library,
-                known_third_party=config.known_third_party)
+                known_third_party=config.known_third_party,
+            )
+
+    def place_module(self, package: str) -> str:
+        if HAS_ISORT_5:
+            return isort.api.place_module(package, self.isort5_config)
+        return self.isort4_obj.place_module(package)  # type: ignore[no-any-return]