Skip to content

back to Reference (Gold) summary

Reference (Gold): tornado

Pytest Summary for test test

status count
passed 1144
skipped 57
failed 61
total 1262
collected 1262

Failed pytests:

iostream_test.py::TestIOStreamWebMixin::test_connection_closed

iostream_test.py::TestIOStreamWebMixin::test_connection_closed
self = 

    def test_connection_closed(self: typing.Any):
        # When a server sends a response and then closes the connection,
        # the client must be allowed to read the data before the IOStream
        # closes itself.  Epoll reports closed connections with a separate
        # EPOLLRDHUP event delivered at the same time as the read event,
        # while kqueue reports them as a second read/write event with an EOF
        # flag.
>       response = self.fetch("/", headers={"Connection": "close"})
E       AttributeError: 'TestIOStreamWebMixin' object has no attribute 'fetch'

tornado/test/iostream_test.py:73: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_read_until_close

iostream_test.py::TestIOStreamWebMixin::test_read_until_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_read_zero_bytes

iostream_test.py::TestIOStreamWebMixin::test_read_zero_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_write_while_connecting

iostream_test.py::TestIOStreamWebMixin::test_write_while_connecting
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_future_interface

iostream_test.py::TestIOStreamWebMixin::test_future_interface
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_future_close_while_reading

iostream_test.py::TestIOStreamWebMixin::test_future_close_while_reading
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamWebMixin::test_future_read_until_close

iostream_test.py::TestIOStreamWebMixin::test_future_read_until_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamWebMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_write_zero_bytes

iostream_test.py::TestReadWriteMixin::test_write_zero_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_future_delayed_close_callback

iostream_test.py::TestReadWriteMixin::test_future_delayed_close_callback
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_close_buffered_data

iostream_test.py::TestReadWriteMixin::test_close_buffered_data
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_close_after_close

iostream_test.py::TestReadWriteMixin::test_read_until_close_after_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_large_read_until

iostream_test.py::TestReadWriteMixin::test_large_read_until
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_with_close_after_second_packet

iostream_test.py::TestReadWriteMixin::test_read_until_with_close_after_second_packet
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_unsatisfied_after_close

iostream_test.py::TestReadWriteMixin::test_read_until_unsatisfied_after_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_close_callback_with_pending_read

iostream_test.py::TestReadWriteMixin::test_close_callback_with_pending_read
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_future_close_callback

iostream_test.py::TestReadWriteMixin::test_future_close_callback
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_write_memoryview

iostream_test.py::TestReadWriteMixin::test_write_memoryview
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_bytes_partial

iostream_test.py::TestReadWriteMixin::test_read_bytes_partial
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes_inline

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes_inline
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes_ignores_extra

iostream_test.py::TestReadWriteMixin::test_read_until_max_bytes_ignores_extra
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes_inline

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes_inline
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes_ignores_extra

iostream_test.py::TestReadWriteMixin::test_read_until_regex_max_bytes_ignores_extra
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_small_reads_from_large_buffer

iostream_test.py::TestReadWriteMixin::test_small_reads_from_large_buffer
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_small_read_untils_from_large_buffer

iostream_test.py::TestReadWriteMixin::test_small_read_untils_from_large_buffer
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_flow_control

iostream_test.py::TestReadWriteMixin::test_flow_control
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_into

iostream_test.py::TestReadWriteMixin::test_read_into
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_into_partial

iostream_test.py::TestReadWriteMixin::test_read_into_partial
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_read_into_zero_bytes

iostream_test.py::TestReadWriteMixin::test_read_into_zero_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestReadWriteMixin::test_many_mixed_reads

iostream_test.py::TestReadWriteMixin::test_many_mixed_reads
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestReadWriteMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_write_zero_bytes

iostream_test.py::TestIOStreamMixin::test_write_zero_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_future_delayed_close_callback

iostream_test.py::TestIOStreamMixin::test_future_delayed_close_callback
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_close_buffered_data

iostream_test.py::TestIOStreamMixin::test_close_buffered_data
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_close_after_close

iostream_test.py::TestIOStreamMixin::test_read_until_close_after_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_large_read_until

iostream_test.py::TestIOStreamMixin::test_large_read_until
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_with_close_after_second_packet

iostream_test.py::TestIOStreamMixin::test_read_until_with_close_after_second_packet
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_unsatisfied_after_close

iostream_test.py::TestIOStreamMixin::test_read_until_unsatisfied_after_close
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_close_callback_with_pending_read

iostream_test.py::TestIOStreamMixin::test_close_callback_with_pending_read
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_future_close_callback

iostream_test.py::TestIOStreamMixin::test_future_close_callback
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_write_memoryview

iostream_test.py::TestIOStreamMixin::test_write_memoryview
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_bytes_partial

iostream_test.py::TestIOStreamMixin::test_read_bytes_partial
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_inline

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_inline
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_ignores_extra

iostream_test.py::TestIOStreamMixin::test_read_until_max_bytes_ignores_extra
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_inline

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_inline
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_ignores_extra

iostream_test.py::TestIOStreamMixin::test_read_until_regex_max_bytes_ignores_extra
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_small_reads_from_large_buffer

iostream_test.py::TestIOStreamMixin::test_small_reads_from_large_buffer
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_small_read_untils_from_large_buffer

iostream_test.py::TestIOStreamMixin::test_small_read_untils_from_large_buffer
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_flow_control

iostream_test.py::TestIOStreamMixin::test_flow_control
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_into

iostream_test.py::TestIOStreamMixin::test_read_into
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_into_partial

iostream_test.py::TestIOStreamMixin::test_read_into_partial
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_into_zero_bytes

iostream_test.py::TestIOStreamMixin::test_read_into_zero_bytes
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_many_mixed_reads

iostream_test.py::TestIOStreamMixin::test_many_mixed_reads
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_connection_refused

iostream_test.py::TestIOStreamMixin::test_connection_refused
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_gaierror

iostream_test.py::TestIOStreamMixin::test_gaierror
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_read_until_close_with_error

iostream_test.py::TestIOStreamMixin::test_read_until_close_with_error
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_inline_read_error

iostream_test.py::TestIOStreamMixin::test_inline_read_error
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_async_read_error_logging

iostream_test.py::TestIOStreamMixin::test_async_read_error_logging
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

iostream_test.py::TestIOStreamMixin::test_future_write

iostream_test.py::TestIOStreamMixin::test_future_write
self = 
args = (), kwargs = {}

    @functools.wraps(coro)
    def post_coroutine(self, *args, **kwargs):
        # type: (AsyncTestCase, *Any, **Any) -> None
        try:
>           return self.io_loop.run_sync(
                functools.partial(coro, self, *args, **kwargs), timeout=timeout
            )
E           AttributeError: 'TestIOStreamMixin' object has no attribute 'io_loop'

tornado/testing.py:604: AttributeError

Patch diff

diff --git a/tornado/_locale_data.py b/tornado/_locale_data.py
index 6e70773f..7a5d2852 100644
--- a/tornado/_locale_data.py
+++ b/tornado/_locale_data.py
@@ -1,51 +1,80 @@
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Data used by the tornado.locale module."""
-LOCALE_NAMES = {'af_ZA': {'name_en': 'Afrikaans', 'name': 'Afrikaans'},
-    'am_ET': {'name_en': 'Amharic', 'name': 'አማርኛ'}, 'ar_AR': {'name_en':
-    'Arabic', 'name': 'العربية'}, 'bg_BG': {'name_en': 'Bulgarian', 'name':
-    'Български'}, 'bn_IN': {'name_en': 'Bengali', 'name': 'বাংলা'}, 'bs_BA':
-    {'name_en': 'Bosnian', 'name': 'Bosanski'}, 'ca_ES': {'name_en':
-    'Catalan', 'name': 'Català'}, 'cs_CZ': {'name_en': 'Czech', 'name':
-    'Čeština'}, 'cy_GB': {'name_en': 'Welsh', 'name': 'Cymraeg'}, 'da_DK':
-    {'name_en': 'Danish', 'name': 'Dansk'}, 'de_DE': {'name_en': 'German',
-    'name': 'Deutsch'}, 'el_GR': {'name_en': 'Greek', 'name': 'Ελληνικά'},
-    'en_GB': {'name_en': 'English (UK)', 'name': 'English (UK)'}, 'en_US':
-    {'name_en': 'English (US)', 'name': 'English (US)'}, 'es_ES': {
-    'name_en': 'Spanish (Spain)', 'name': 'Español (España)'}, 'es_LA': {
-    'name_en': 'Spanish', 'name': 'Español'}, 'et_EE': {'name_en':
-    'Estonian', 'name': 'Eesti'}, 'eu_ES': {'name_en': 'Basque', 'name':
-    'Euskara'}, 'fa_IR': {'name_en': 'Persian', 'name': 'فارسی'}, 'fi_FI':
-    {'name_en': 'Finnish', 'name': 'Suomi'}, 'fr_CA': {'name_en':
-    'French (Canada)', 'name': 'Français (Canada)'}, 'fr_FR': {'name_en':
-    'French', 'name': 'Français'}, 'ga_IE': {'name_en': 'Irish', 'name':
-    'Gaeilge'}, 'gl_ES': {'name_en': 'Galician', 'name': 'Galego'}, 'he_IL':
-    {'name_en': 'Hebrew', 'name': 'עברית'}, 'hi_IN': {'name_en': 'Hindi',
-    'name': 'हिन्दी'}, 'hr_HR': {'name_en': 'Croatian', 'name': 'Hrvatski'},
-    'hu_HU': {'name_en': 'Hungarian', 'name': 'Magyar'}, 'id_ID': {
-    'name_en': 'Indonesian', 'name': 'Bahasa Indonesia'}, 'is_IS': {
-    'name_en': 'Icelandic', 'name': 'Íslenska'}, 'it_IT': {'name_en':
-    'Italian', 'name': 'Italiano'}, 'ja_JP': {'name_en': 'Japanese', 'name':
-    '日本語'}, 'ko_KR': {'name_en': 'Korean', 'name': '한국어'}, 'lt_LT': {
-    'name_en': 'Lithuanian', 'name': 'Lietuvių'}, 'lv_LV': {'name_en':
-    'Latvian', 'name': 'Latviešu'}, 'mk_MK': {'name_en': 'Macedonian',
-    'name': 'Македонски'}, 'ml_IN': {'name_en': 'Malayalam', 'name':
-    'മലയാളം'}, 'ms_MY': {'name_en': 'Malay', 'name': 'Bahasa Melayu'},
-    'nb_NO': {'name_en': 'Norwegian (bokmal)', 'name': 'Norsk (bokmål)'},
-    'nl_NL': {'name_en': 'Dutch', 'name': 'Nederlands'}, 'nn_NO': {
-    'name_en': 'Norwegian (nynorsk)', 'name': 'Norsk (nynorsk)'}, 'pa_IN':
-    {'name_en': 'Punjabi', 'name': 'ਪੰਜਾਬੀ'}, 'pl_PL': {'name_en': 'Polish',
-    'name': 'Polski'}, 'pt_BR': {'name_en': 'Portuguese (Brazil)', 'name':
-    'Português (Brasil)'}, 'pt_PT': {'name_en': 'Portuguese (Portugal)',
-    'name': 'Português (Portugal)'}, 'ro_RO': {'name_en': 'Romanian',
-    'name': 'Română'}, 'ru_RU': {'name_en': 'Russian', 'name': 'Русский'},
-    'sk_SK': {'name_en': 'Slovak', 'name': 'Slovenčina'}, 'sl_SI': {
-    'name_en': 'Slovenian', 'name': 'Slovenščina'}, 'sq_AL': {'name_en':
-    'Albanian', 'name': 'Shqip'}, 'sr_RS': {'name_en': 'Serbian', 'name':
-    'Српски'}, 'sv_SE': {'name_en': 'Swedish', 'name': 'Svenska'}, 'sw_KE':
-    {'name_en': 'Swahili', 'name': 'Kiswahili'}, 'ta_IN': {'name_en':
-    'Tamil', 'name': 'தமிழ்'}, 'te_IN': {'name_en': 'Telugu', 'name':
-    'తెలుగు'}, 'th_TH': {'name_en': 'Thai', 'name': 'ภาษาไทย'}, 'tl_PH': {
-    'name_en': 'Filipino', 'name': 'Filipino'}, 'tr_TR': {'name_en':
-    'Turkish', 'name': 'Türkçe'}, 'uk_UA': {'name_en': 'Ukraini ', 'name':
-    'Українська'}, 'vi_VN': {'name_en': 'Vietnamese', 'name': 'Tiếng Việt'},
-    'zh_CN': {'name_en': 'Chinese (Simplified)', 'name': '中文(简体)'}, 'zh_TW':
-    {'name_en': 'Chinese (Traditional)', 'name': '中文(繁體)'}}
+
+LOCALE_NAMES = {
+    "af_ZA": {"name_en": "Afrikaans", "name": "Afrikaans"},
+    "am_ET": {"name_en": "Amharic", "name": "አማርኛ"},
+    "ar_AR": {"name_en": "Arabic", "name": "العربية"},
+    "bg_BG": {"name_en": "Bulgarian", "name": "Български"},
+    "bn_IN": {"name_en": "Bengali", "name": "বাংলা"},
+    "bs_BA": {"name_en": "Bosnian", "name": "Bosanski"},
+    "ca_ES": {"name_en": "Catalan", "name": "Català"},
+    "cs_CZ": {"name_en": "Czech", "name": "Čeština"},
+    "cy_GB": {"name_en": "Welsh", "name": "Cymraeg"},
+    "da_DK": {"name_en": "Danish", "name": "Dansk"},
+    "de_DE": {"name_en": "German", "name": "Deutsch"},
+    "el_GR": {"name_en": "Greek", "name": "Ελληνικά"},
+    "en_GB": {"name_en": "English (UK)", "name": "English (UK)"},
+    "en_US": {"name_en": "English (US)", "name": "English (US)"},
+    "es_ES": {"name_en": "Spanish (Spain)", "name": "Español (España)"},
+    "es_LA": {"name_en": "Spanish", "name": "Español"},
+    "et_EE": {"name_en": "Estonian", "name": "Eesti"},
+    "eu_ES": {"name_en": "Basque", "name": "Euskara"},
+    "fa_IR": {"name_en": "Persian", "name": "فارسی"},
+    "fi_FI": {"name_en": "Finnish", "name": "Suomi"},
+    "fr_CA": {"name_en": "French (Canada)", "name": "Français (Canada)"},
+    "fr_FR": {"name_en": "French", "name": "Français"},
+    "ga_IE": {"name_en": "Irish", "name": "Gaeilge"},
+    "gl_ES": {"name_en": "Galician", "name": "Galego"},
+    "he_IL": {"name_en": "Hebrew", "name": "עברית"},
+    "hi_IN": {"name_en": "Hindi", "name": "हिन्दी"},
+    "hr_HR": {"name_en": "Croatian", "name": "Hrvatski"},
+    "hu_HU": {"name_en": "Hungarian", "name": "Magyar"},
+    "id_ID": {"name_en": "Indonesian", "name": "Bahasa Indonesia"},
+    "is_IS": {"name_en": "Icelandic", "name": "Íslenska"},
+    "it_IT": {"name_en": "Italian", "name": "Italiano"},
+    "ja_JP": {"name_en": "Japanese", "name": "日本語"},
+    "ko_KR": {"name_en": "Korean", "name": "한국어"},
+    "lt_LT": {"name_en": "Lithuanian", "name": "Lietuvių"},
+    "lv_LV": {"name_en": "Latvian", "name": "Latviešu"},
+    "mk_MK": {"name_en": "Macedonian", "name": "Македонски"},
+    "ml_IN": {"name_en": "Malayalam", "name": "മലയാളം"},
+    "ms_MY": {"name_en": "Malay", "name": "Bahasa Melayu"},
+    "nb_NO": {"name_en": "Norwegian (bokmal)", "name": "Norsk (bokmål)"},
+    "nl_NL": {"name_en": "Dutch", "name": "Nederlands"},
+    "nn_NO": {"name_en": "Norwegian (nynorsk)", "name": "Norsk (nynorsk)"},
+    "pa_IN": {"name_en": "Punjabi", "name": "ਪੰਜਾਬੀ"},
+    "pl_PL": {"name_en": "Polish", "name": "Polski"},
+    "pt_BR": {"name_en": "Portuguese (Brazil)", "name": "Português (Brasil)"},
+    "pt_PT": {"name_en": "Portuguese (Portugal)", "name": "Português (Portugal)"},
+    "ro_RO": {"name_en": "Romanian", "name": "Română"},
+    "ru_RU": {"name_en": "Russian", "name": "Русский"},
+    "sk_SK": {"name_en": "Slovak", "name": "Slovenčina"},
+    "sl_SI": {"name_en": "Slovenian", "name": "Slovenščina"},
+    "sq_AL": {"name_en": "Albanian", "name": "Shqip"},
+    "sr_RS": {"name_en": "Serbian", "name": "Српски"},
+    "sv_SE": {"name_en": "Swedish", "name": "Svenska"},
+    "sw_KE": {"name_en": "Swahili", "name": "Kiswahili"},
+    "ta_IN": {"name_en": "Tamil", "name": "தமிழ்"},
+    "te_IN": {"name_en": "Telugu", "name": "తెలుగు"},
+    "th_TH": {"name_en": "Thai", "name": "ภาษาไทย"},
+    "tl_PH": {"name_en": "Filipino", "name": "Filipino"},
+    "tr_TR": {"name_en": "Turkish", "name": "Türkçe"},
+    "uk_UA": {"name_en": "Ukraini ", "name": "Українська"},
+    "vi_VN": {"name_en": "Vietnamese", "name": "Tiếng Việt"},
+    "zh_CN": {"name_en": "Chinese (Simplified)", "name": "中文(简体)"},
+    "zh_TW": {"name_en": "Chinese (Traditional)", "name": "中文(繁體)"},
+}
diff --git a/tornado/auth.py b/tornado/auth.py
index bbf6ea3c..d1edcc65 100644
--- a/tornado/auth.py
+++ b/tornado/auth.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """This module contains implementations of various third-party
 authentication schemes.

@@ -56,6 +71,7 @@ Example usage for Google OAuth:
    :hide:

 """
+
 import base64
 import binascii
 import hashlib
@@ -64,11 +80,13 @@ import time
 import urllib.parse
 import uuid
 import warnings
+
 from tornado import httpclient
 from tornado import escape
 from tornado.httputil import url_concat
 from tornado.util import unicode_type
 from tornado.web import RequestHandler
+
 from typing import List, Any, Dict, cast, Iterable, Union, Optional


@@ -84,8 +102,11 @@ class OpenIdMixin(object):
     * ``_OPENID_ENDPOINT``: the identity provider's URI.
     """

-    def authenticate_redirect(self, callback_uri: Optional[str]=None,
-        ax_attrs: List[str]=['name', 'email', 'language', 'username']) ->None:
+    def authenticate_redirect(
+        self,
+        callback_uri: Optional[str] = None,
+        ax_attrs: List[str] = ["name", "email", "language", "username"],
+    ) -> None:
         """Redirects to the authentication URL for this service.

         After authentication, the service will redirect back to the given
@@ -102,10 +123,16 @@ class OpenIdMixin(object):
             longer returns an awaitable object. It is now an ordinary
             synchronous function.
         """
-        pass
-
-    async def get_authenticated_user(self, http_client: Optional[httpclient
-        .AsyncHTTPClient]=None) ->Dict[str, Any]:
+        handler = cast(RequestHandler, self)
+        callback_uri = callback_uri or handler.request.uri
+        assert callback_uri is not None
+        args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
+        endpoint = self._OPENID_ENDPOINT  # type: ignore
+        handler.redirect(endpoint + "?" + urllib.parse.urlencode(args))
+
+    async def get_authenticated_user(
+        self, http_client: Optional[httpclient.AsyncHTTPClient] = None
+    ) -> Dict[str, Any]:
         """Fetches the authenticated user data upon redirect.

         This method should be called by the handler that receives the
@@ -121,15 +148,143 @@ class OpenIdMixin(object):
             The ``callback`` argument was removed. Use the returned
             awaitable object instead.
         """
-        pass
-
-    def get_auth_http_client(self) ->httpclient.AsyncHTTPClient:
+        handler = cast(RequestHandler, self)
+        # Verify the OpenID response via direct request to the OP
+        args = dict(
+            (k, v[-1]) for k, v in handler.request.arguments.items()
+        )  # type: Dict[str, Union[str, bytes]]
+        args["openid.mode"] = "check_authentication"
+        url = self._OPENID_ENDPOINT  # type: ignore
+        if http_client is None:
+            http_client = self.get_auth_http_client()
+        resp = await http_client.fetch(
+            url, method="POST", body=urllib.parse.urlencode(args)
+        )
+        return self._on_authentication_verified(resp)
+
+    def _openid_args(
+        self,
+        callback_uri: str,
+        ax_attrs: Iterable[str] = [],
+        oauth_scope: Optional[str] = None,
+    ) -> Dict[str, str]:
+        handler = cast(RequestHandler, self)
+        url = urllib.parse.urljoin(handler.request.full_url(), callback_uri)
+        args = {
+            "openid.ns": "http://specs.openid.net/auth/2.0",
+            "openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select",
+            "openid.identity": "http://specs.openid.net/auth/2.0/identifier_select",
+            "openid.return_to": url,
+            "openid.realm": urllib.parse.urljoin(url, "/"),
+            "openid.mode": "checkid_setup",
+        }
+        if ax_attrs:
+            args.update(
+                {
+                    "openid.ns.ax": "http://openid.net/srv/ax/1.0",
+                    "openid.ax.mode": "fetch_request",
+                }
+            )
+            ax_attrs = set(ax_attrs)
+            required = []  # type: List[str]
+            if "name" in ax_attrs:
+                ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
+                required += ["firstname", "fullname", "lastname"]
+                args.update(
+                    {
+                        "openid.ax.type.firstname": "http://axschema.org/namePerson/first",
+                        "openid.ax.type.fullname": "http://axschema.org/namePerson",
+                        "openid.ax.type.lastname": "http://axschema.org/namePerson/last",
+                    }
+                )
+            known_attrs = {
+                "email": "http://axschema.org/contact/email",
+                "language": "http://axschema.org/pref/language",
+                "username": "http://axschema.org/namePerson/friendly",
+            }
+            for name in ax_attrs:
+                args["openid.ax.type." + name] = known_attrs[name]
+                required.append(name)
+            args["openid.ax.required"] = ",".join(required)
+        if oauth_scope:
+            args.update(
+                {
+                    "openid.ns.oauth": "http://specs.openid.net/extensions/oauth/1.0",
+                    "openid.oauth.consumer": handler.request.host.split(":")[0],
+                    "openid.oauth.scope": oauth_scope,
+                }
+            )
+        return args
+
+    def _on_authentication_verified(
+        self, response: httpclient.HTTPResponse
+    ) -> Dict[str, Any]:
+        handler = cast(RequestHandler, self)
+        if b"is_valid:true" not in response.body:
+            raise AuthError("Invalid OpenID response: %r" % response.body)
+
+        # Make sure we got back at least an email from attribute exchange
+        ax_ns = None
+        for key in handler.request.arguments:
+            if (
+                key.startswith("openid.ns.")
+                and handler.get_argument(key) == "http://openid.net/srv/ax/1.0"
+            ):
+                ax_ns = key[10:]
+                break
+
+        def get_ax_arg(uri: str) -> str:
+            if not ax_ns:
+                return ""
+            prefix = "openid." + ax_ns + ".type."
+            ax_name = None
+            for name in handler.request.arguments.keys():
+                if handler.get_argument(name) == uri and name.startswith(prefix):
+                    part = name[len(prefix) :]
+                    ax_name = "openid." + ax_ns + ".value." + part
+                    break
+            if not ax_name:
+                return ""
+            return handler.get_argument(ax_name, "")
+
+        email = get_ax_arg("http://axschema.org/contact/email")
+        name = get_ax_arg("http://axschema.org/namePerson")
+        first_name = get_ax_arg("http://axschema.org/namePerson/first")
+        last_name = get_ax_arg("http://axschema.org/namePerson/last")
+        username = get_ax_arg("http://axschema.org/namePerson/friendly")
+        locale = get_ax_arg("http://axschema.org/pref/language").lower()
+        user = dict()
+        name_parts = []
+        if first_name:
+            user["first_name"] = first_name
+            name_parts.append(first_name)
+        if last_name:
+            user["last_name"] = last_name
+            name_parts.append(last_name)
+        if name:
+            user["name"] = name
+        elif name_parts:
+            user["name"] = " ".join(name_parts)
+        elif email:
+            user["name"] = email.split("@")[0]
+        if email:
+            user["email"] = email
+        if locale:
+            user["locale"] = locale
+        if username:
+            user["username"] = username
+        claimed_id = handler.get_argument("openid.claimed_id", None)
+        if claimed_id:
+            user["claimed_id"] = claimed_id
+        return user
+
+    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
         """Returns the `.AsyncHTTPClient` instance to be used for auth requests.

         May be overridden by subclasses to use an HTTP client other than
         the default.
         """
-        pass
+        return httpclient.AsyncHTTPClient()


 class OAuthMixin(object):
@@ -149,9 +304,12 @@ class OAuthMixin(object):
     `_oauth_consumer_token` methods.
     """

-    async def authorize_redirect(self, callback_uri: Optional[str]=None,
-        extra_params: Optional[Dict[str, Any]]=None, http_client: Optional[
-        httpclient.AsyncHTTPClient]=None) ->None:
+    async def authorize_redirect(
+        self,
+        callback_uri: Optional[str] = None,
+        extra_params: Optional[Dict[str, Any]] = None,
+        http_client: Optional[httpclient.AsyncHTTPClient] = None,
+    ) -> None:
         """Redirects the user to obtain OAuth authorization for this service.

         The ``callback_uri`` may be omitted if you have previously
@@ -179,10 +337,25 @@ class OAuthMixin(object):
            awaitable object instead.

         """
-        pass
-
-    async def get_authenticated_user(self, http_client: Optional[httpclient
-        .AsyncHTTPClient]=None) ->Dict[str, Any]:
+        if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False):
+            raise Exception("This service does not support oauth_callback")
+        if http_client is None:
+            http_client = self.get_auth_http_client()
+        assert http_client is not None
+        if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
+            response = await http_client.fetch(
+                self._oauth_request_token_url(
+                    callback_uri=callback_uri, extra_params=extra_params
+                )
+            )
+        else:
+            response = await http_client.fetch(self._oauth_request_token_url())
+        url = self._OAUTH_AUTHORIZE_URL  # type: ignore
+        self._on_request_token(url, callback_uri, response)
+
+    async def get_authenticated_user(
+        self, http_client: Optional[httpclient.AsyncHTTPClient] = None
+    ) -> Dict[str, Any]:
         """Gets the OAuth authorized user and access token.

         This method should be called from the handler for your
@@ -198,17 +371,125 @@ class OAuthMixin(object):
            The ``callback`` argument was removed. Use the returned
            awaitable object instead.
         """
-        pass
-
-    def _oauth_consumer_token(self) ->Dict[str, Any]:
+        handler = cast(RequestHandler, self)
+        request_key = escape.utf8(handler.get_argument("oauth_token"))
+        oauth_verifier = handler.get_argument("oauth_verifier", None)
+        request_cookie = handler.get_cookie("_oauth_request_token")
+        if not request_cookie:
+            raise AuthError("Missing OAuth request token cookie")
+        handler.clear_cookie("_oauth_request_token")
+        cookie_key, cookie_secret = [
+            base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")
+        ]
+        if cookie_key != request_key:
+            raise AuthError("Request token does not match cookie")
+        token = dict(
+            key=cookie_key, secret=cookie_secret
+        )  # type: Dict[str, Union[str, bytes]]
+        if oauth_verifier:
+            token["verifier"] = oauth_verifier
+        if http_client is None:
+            http_client = self.get_auth_http_client()
+        assert http_client is not None
+        response = await http_client.fetch(self._oauth_access_token_url(token))
+        access_token = _oauth_parse_response(response.body)
+        user = await self._oauth_get_user_future(access_token)
+        if not user:
+            raise AuthError("Error getting user")
+        user["access_token"] = access_token
+        return user
+
+    def _oauth_request_token_url(
+        self,
+        callback_uri: Optional[str] = None,
+        extra_params: Optional[Dict[str, Any]] = None,
+    ) -> str:
+        handler = cast(RequestHandler, self)
+        consumer_token = self._oauth_consumer_token()
+        url = self._OAUTH_REQUEST_TOKEN_URL  # type: ignore
+        args = dict(
+            oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
+            oauth_signature_method="HMAC-SHA1",
+            oauth_timestamp=str(int(time.time())),
+            oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
+            oauth_version="1.0",
+        )
+        if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
+            if callback_uri == "oob":
+                args["oauth_callback"] = "oob"
+            elif callback_uri:
+                args["oauth_callback"] = urllib.parse.urljoin(
+                    handler.request.full_url(), callback_uri
+                )
+            if extra_params:
+                args.update(extra_params)
+            signature = _oauth10a_signature(consumer_token, "GET", url, args)
+        else:
+            signature = _oauth_signature(consumer_token, "GET", url, args)
+
+        args["oauth_signature"] = signature
+        return url + "?" + urllib.parse.urlencode(args)
+
+    def _on_request_token(
+        self,
+        authorize_url: str,
+        callback_uri: Optional[str],
+        response: httpclient.HTTPResponse,
+    ) -> None:
+        handler = cast(RequestHandler, self)
+        request_token = _oauth_parse_response(response.body)
+        data = (
+            base64.b64encode(escape.utf8(request_token["key"]))
+            + b"|"
+            + base64.b64encode(escape.utf8(request_token["secret"]))
+        )
+        handler.set_cookie("_oauth_request_token", data)
+        args = dict(oauth_token=request_token["key"])
+        if callback_uri == "oob":
+            handler.finish(authorize_url + "?" + urllib.parse.urlencode(args))
+            return
+        elif callback_uri:
+            args["oauth_callback"] = urllib.parse.urljoin(
+                handler.request.full_url(), callback_uri
+            )
+        handler.redirect(authorize_url + "?" + urllib.parse.urlencode(args))
+
+    def _oauth_access_token_url(self, request_token: Dict[str, Any]) -> str:
+        consumer_token = self._oauth_consumer_token()
+        url = self._OAUTH_ACCESS_TOKEN_URL  # type: ignore
+        args = dict(
+            oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
+            oauth_token=escape.to_basestring(request_token["key"]),
+            oauth_signature_method="HMAC-SHA1",
+            oauth_timestamp=str(int(time.time())),
+            oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
+            oauth_version="1.0",
+        )
+        if "verifier" in request_token:
+            args["oauth_verifier"] = request_token["verifier"]
+
+        if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
+            signature = _oauth10a_signature(
+                consumer_token, "GET", url, args, request_token
+            )
+        else:
+            signature = _oauth_signature(
+                consumer_token, "GET", url, args, request_token
+            )
+
+        args["oauth_signature"] = signature
+        return url + "?" + urllib.parse.urlencode(args)
+
+    def _oauth_consumer_token(self) -> Dict[str, Any]:
         """Subclasses must override this to return their OAuth consumer keys.

         The return value should be a `dict` with keys ``key`` and ``secret``.
         """
-        pass
+        raise NotImplementedError()

-    async def _oauth_get_user_future(self, access_token: Dict[str, Any]
-        ) ->Dict[str, Any]:
+    async def _oauth_get_user_future(
+        self, access_token: Dict[str, Any]
+    ) -> Dict[str, Any]:
         """Subclasses must override this to get basic information about the
         user.

@@ -228,25 +509,50 @@ class OAuthMixin(object):

            A synchronous fallback to ``_oauth_get_user`` was removed.
         """
-        pass
-
-    def _oauth_request_parameters(self, url: str, access_token: Dict[str,
-        Any], parameters: Dict[str, Any]={}, method: str='GET') ->Dict[str, Any
-        ]:
+        raise NotImplementedError()
+
+    def _oauth_request_parameters(
+        self,
+        url: str,
+        access_token: Dict[str, Any],
+        parameters: Dict[str, Any] = {},
+        method: str = "GET",
+    ) -> Dict[str, Any]:
         """Returns the OAuth parameters as a dict for the given request.

         parameters should include all POST arguments and query string arguments
         that will be sent with the request.
         """
-        pass
-
-    def get_auth_http_client(self) ->httpclient.AsyncHTTPClient:
+        consumer_token = self._oauth_consumer_token()
+        base_args = dict(
+            oauth_consumer_key=escape.to_basestring(consumer_token["key"]),
+            oauth_token=escape.to_basestring(access_token["key"]),
+            oauth_signature_method="HMAC-SHA1",
+            oauth_timestamp=str(int(time.time())),
+            oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)),
+            oauth_version="1.0",
+        )
+        args = {}
+        args.update(base_args)
+        args.update(parameters)
+        if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
+            signature = _oauth10a_signature(
+                consumer_token, method, url, args, access_token
+            )
+        else:
+            signature = _oauth_signature(
+                consumer_token, method, url, args, access_token
+            )
+        base_args["oauth_signature"] = escape.to_basestring(signature)
+        return base_args
+
+    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
         """Returns the `.AsyncHTTPClient` instance to be used for auth requests.

         May be overridden by subclasses to use an HTTP client other than
         the default.
         """
-        pass
+        return httpclient.AsyncHTTPClient()


 class OAuth2Mixin(object):
@@ -261,10 +567,15 @@ class OAuth2Mixin(object):
     * ``_OAUTH_ACCESS_TOKEN_URL``:  The service's access token url.
     """

-    def authorize_redirect(self, redirect_uri: Optional[str]=None,
-        client_id: Optional[str]=None, client_secret: Optional[str]=None,
-        extra_params: Optional[Dict[str, Any]]=None, scope: Optional[List[
-        str]]=None, response_type: str='code') ->None:
+    def authorize_redirect(
+        self,
+        redirect_uri: Optional[str] = None,
+        client_id: Optional[str] = None,
+        client_secret: Optional[str] = None,
+        extra_params: Optional[Dict[str, Any]] = None,
+        scope: Optional[List[str]] = None,
+        response_type: str = "code",
+    ) -> None:
         """Redirects the user to obtain OAuth authorization for this service.

         Some providers require that you register a redirect URL with
@@ -282,10 +593,50 @@ class OAuth2Mixin(object):
            The ``client_secret`` argument (which has never had any effect)
            is deprecated and will be removed in Tornado 7.0.
         """
-        pass
-
-    async def oauth2_request(self, url: str, access_token: Optional[str]=
-        None, post_args: Optional[Dict[str, Any]]=None, **args: Any) ->Any:
+        if client_secret is not None:
+            warnings.warn("client_secret argument is deprecated", DeprecationWarning)
+        handler = cast(RequestHandler, self)
+        args = {"response_type": response_type}
+        if redirect_uri is not None:
+            args["redirect_uri"] = redirect_uri
+        if client_id is not None:
+            args["client_id"] = client_id
+        if extra_params:
+            args.update(extra_params)
+        if scope:
+            args["scope"] = " ".join(scope)
+        url = self._OAUTH_AUTHORIZE_URL  # type: ignore
+        handler.redirect(url_concat(url, args))
+
+    def _oauth_request_token_url(
+        self,
+        redirect_uri: Optional[str] = None,
+        client_id: Optional[str] = None,
+        client_secret: Optional[str] = None,
+        code: Optional[str] = None,
+        extra_params: Optional[Dict[str, Any]] = None,
+    ) -> str:
+        url = self._OAUTH_ACCESS_TOKEN_URL  # type: ignore
+        args = {}  # type: Dict[str, str]
+        if redirect_uri is not None:
+            args["redirect_uri"] = redirect_uri
+        if code is not None:
+            args["code"] = code
+        if client_id is not None:
+            args["client_id"] = client_id
+        if client_secret is not None:
+            args["client_secret"] = client_secret
+        if extra_params:
+            args.update(extra_params)
+        return url_concat(url, args)
+
+    async def oauth2_request(
+        self,
+        url: str,
+        access_token: Optional[str] = None,
+        post_args: Optional[Dict[str, Any]] = None,
+        **args: Any
+    ) -> Any:
         """Fetches the given URL auth an OAuth2 access token.

         If the request is a POST, ``post_args`` should be provided. Query
@@ -319,9 +670,23 @@ class OAuth2Mixin(object):

            The ``callback`` argument was removed. Use the returned awaitable object instead.
         """
-        pass
-
-    def get_auth_http_client(self) ->httpclient.AsyncHTTPClient:
+        all_args = {}
+        if access_token:
+            all_args["access_token"] = access_token
+            all_args.update(args)
+
+        if all_args:
+            url += "?" + urllib.parse.urlencode(all_args)
+        http = self.get_auth_http_client()
+        if post_args is not None:
+            response = await http.fetch(
+                url, method="POST", body=urllib.parse.urlencode(post_args)
+            )
+        else:
+            response = await http.fetch(url)
+        return escape.json_decode(response.body)
+
+    def get_auth_http_client(self) -> httpclient.AsyncHTTPClient:
         """Returns the `.AsyncHTTPClient` instance to be used for auth requests.

         May be overridden by subclasses to use an HTTP client other than
@@ -329,7 +694,7 @@ class OAuth2Mixin(object):

         .. versionadded:: 4.3
         """
-        pass
+        return httpclient.AsyncHTTPClient()


 class TwitterMixin(OAuthMixin):
@@ -370,15 +735,15 @@ class TwitterMixin(OAuthMixin):
        API, this class will no longer be updated and will be removed in the
        future.
     """
-    _OAUTH_REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
-    _OAUTH_ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
-    _OAUTH_AUTHORIZE_URL = 'https://api.twitter.com/oauth/authorize'
-    _OAUTH_AUTHENTICATE_URL = 'https://api.twitter.com/oauth/authenticate'
+
+    _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token"
+    _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token"
+    _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize"
+    _OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate"
     _OAUTH_NO_CALLBACKS = False
-    _TWITTER_BASE_URL = 'https://api.twitter.com/1.1'
+    _TWITTER_BASE_URL = "https://api.twitter.com/1.1"

-    async def authenticate_redirect(self, callback_uri: Optional[str]=None
-        ) ->None:
+    async def authenticate_redirect(self, callback_uri: Optional[str] = None) -> None:
         """Just like `~OAuthMixin.authorize_redirect`, but
         auto-redirects if authorized.

@@ -394,10 +759,19 @@ class TwitterMixin(OAuthMixin):
            The ``callback`` argument was removed. Use the returned
            awaitable object instead.
         """
-        pass
-
-    async def twitter_request(self, path: str, access_token: Dict[str, Any],
-        post_args: Optional[Dict[str, Any]]=None, **args: Any) ->Any:
+        http = self.get_auth_http_client()
+        response = await http.fetch(
+            self._oauth_request_token_url(callback_uri=callback_uri)
+        )
+        self._on_request_token(self._OAUTH_AUTHENTICATE_URL, None, response)
+
+    async def twitter_request(
+        self,
+        path: str,
+        access_token: Dict[str, Any],
+        post_args: Optional[Dict[str, Any]] = None,
+        **args: Any
+    ) -> Any:
         """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor``

         The path should not include the format or API version number.
@@ -439,7 +813,51 @@ class TwitterMixin(OAuthMixin):
            The ``callback`` argument was removed. Use the returned
            awaitable object instead.
         """
-        pass
+        if path.startswith("http:") or path.startswith("https:"):
+            # Raw urls are useful for e.g. search which doesn't follow the
+            # usual pattern: http://search.twitter.com/search.json
+            url = path
+        else:
+            url = self._TWITTER_BASE_URL + path + ".json"
+        # Add the OAuth resource request signature if we have credentials
+        if access_token:
+            all_args = {}
+            all_args.update(args)
+            all_args.update(post_args or {})
+            method = "POST" if post_args is not None else "GET"
+            oauth = self._oauth_request_parameters(
+                url, access_token, all_args, method=method
+            )
+            args.update(oauth)
+        if args:
+            url += "?" + urllib.parse.urlencode(args)
+        http = self.get_auth_http_client()
+        if post_args is not None:
+            response = await http.fetch(
+                url, method="POST", body=urllib.parse.urlencode(post_args)
+            )
+        else:
+            response = await http.fetch(url)
+        return escape.json_decode(response.body)
+
+    def _oauth_consumer_token(self) -> Dict[str, Any]:
+        handler = cast(RequestHandler, self)
+        handler.require_setting("twitter_consumer_key", "Twitter OAuth")
+        handler.require_setting("twitter_consumer_secret", "Twitter OAuth")
+        return dict(
+            key=handler.settings["twitter_consumer_key"],
+            secret=handler.settings["twitter_consumer_secret"],
+        )
+
+    async def _oauth_get_user_future(
+        self, access_token: Dict[str, Any]
+    ) -> Dict[str, Any]:
+        user = await self.twitter_request(
+            "/account/verify_credentials", access_token=access_token
+        )
+        if user:
+            user["username"] = user["screen_name"]
+        return user


 class GoogleOAuth2Mixin(OAuth2Mixin):
@@ -465,13 +883,14 @@ class GoogleOAuth2Mixin(OAuth2Mixin):

     .. versionadded:: 3.2
     """
-    _OAUTH_AUTHORIZE_URL = 'https://accounts.google.com/o/oauth2/v2/auth'
-    _OAUTH_ACCESS_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'
-    _OAUTH_USERINFO_URL = 'https://www.googleapis.com/oauth2/v1/userinfo'
+
+    _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/v2/auth"
+    _OAUTH_ACCESS_TOKEN_URL = "https://www.googleapis.com/oauth2/v4/token"
+    _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
     _OAUTH_NO_CALLBACKS = False
-    _OAUTH_SETTINGS_KEY = 'google_oauth'
+    _OAUTH_SETTINGS_KEY = "google_oauth"

-    def get_google_oauth_settings(self) ->Dict[str, str]:
+    def get_google_oauth_settings(self) -> Dict[str, str]:
         """Return the Google OAuth 2.0 credentials that you created with
         [Google Cloud
         Platform](https://console.cloud.google.com/apis/credentials). The dict
@@ -484,11 +903,16 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
         If your credentials are stored differently (e.g. in a db) you can
         override this method for custom provision.
         """
-        pass
-
-    async def get_authenticated_user(self, redirect_uri: str, code: str,
-        client_id: Optional[str]=None, client_secret: Optional[str]=None
-        ) ->Dict[str, Any]:
+        handler = cast(RequestHandler, self)
+        return handler.settings[self._OAUTH_SETTINGS_KEY]
+
+    async def get_authenticated_user(
+        self,
+        redirect_uri: str,
+        code: str,
+        client_id: Optional[str] = None,
+        client_secret: Optional[str] = None,
+    ) -> Dict[str, Any]:
         """Handles the login for the Google user, returning an access token.

         The result is a dictionary containing an ``access_token`` field
@@ -541,20 +965,50 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
         .. versionchanged:: 6.0

            The ``callback`` argument was removed. Use the returned awaitable object instead.
-        """
-        pass
+        """  # noqa: E501
+
+        if client_id is None or client_secret is None:
+            settings = self.get_google_oauth_settings()
+            if client_id is None:
+                client_id = settings["key"]
+            if client_secret is None:
+                client_secret = settings["secret"]
+        http = self.get_auth_http_client()
+        body = urllib.parse.urlencode(
+            {
+                "redirect_uri": redirect_uri,
+                "code": code,
+                "client_id": client_id,
+                "client_secret": client_secret,
+                "grant_type": "authorization_code",
+            }
+        )
+
+        response = await http.fetch(
+            self._OAUTH_ACCESS_TOKEN_URL,
+            method="POST",
+            headers={"Content-Type": "application/x-www-form-urlencoded"},
+            body=body,
+        )
+        return escape.json_decode(response.body)


 class FacebookGraphMixin(OAuth2Mixin):
     """Facebook authentication using the new Graph API and OAuth2."""
-    _OAUTH_ACCESS_TOKEN_URL = 'https://graph.facebook.com/oauth/access_token?'
-    _OAUTH_AUTHORIZE_URL = 'https://www.facebook.com/dialog/oauth?'
-    _OAUTH_NO_CALLBACKS = False
-    _FACEBOOK_BASE_URL = 'https://graph.facebook.com'

-    async def get_authenticated_user(self, redirect_uri: str, client_id:
-        str, client_secret: str, code: str, extra_fields: Optional[Dict[str,
-        Any]]=None) ->Optional[Dict[str, Any]]:
+    _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
+    _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?"
+    _OAUTH_NO_CALLBACKS = False
+    _FACEBOOK_BASE_URL = "https://graph.facebook.com"
+
+    async def get_authenticated_user(
+        self,
+        redirect_uri: str,
+        client_id: str,
+        client_secret: str,
+        code: str,
+        extra_fields: Optional[Dict[str, Any]] = None,
+    ) -> Optional[Dict[str, Any]]:
         """Handles the login for the Facebook user, returning a user object.

         Example usage:
@@ -603,10 +1057,67 @@ class FacebookGraphMixin(OAuth2Mixin):

            The ``callback`` argument was removed. Use the returned awaitable object instead.
         """
-        pass
-
-    async def facebook_request(self, path: str, access_token: Optional[str]
-        =None, post_args: Optional[Dict[str, Any]]=None, **args: Any) ->Any:
+        http = self.get_auth_http_client()
+        args = {
+            "redirect_uri": redirect_uri,
+            "code": code,
+            "client_id": client_id,
+            "client_secret": client_secret,
+        }
+
+        fields = set(
+            ["id", "name", "first_name", "last_name", "locale", "picture", "link"]
+        )
+        if extra_fields:
+            fields.update(extra_fields)
+
+        response = await http.fetch(
+            self._oauth_request_token_url(**args)  # type: ignore
+        )
+        args = escape.json_decode(response.body)
+        session = {
+            "access_token": args.get("access_token"),
+            "expires_in": args.get("expires_in"),
+        }
+        assert session["access_token"] is not None
+
+        user = await self.facebook_request(
+            path="/me",
+            access_token=session["access_token"],
+            appsecret_proof=hmac.new(
+                key=client_secret.encode("utf8"),
+                msg=session["access_token"].encode("utf8"),
+                digestmod=hashlib.sha256,
+            ).hexdigest(),
+            fields=",".join(fields),
+        )
+
+        if user is None:
+            return None
+
+        fieldmap = {}
+        for field in fields:
+            fieldmap[field] = user.get(field)
+
+        # session_expires is converted to str for compatibility with
+        # older versions in which the server used url-encoding and
+        # this code simply returned the string verbatim.
+        # This should change in Tornado 5.0.
+        fieldmap.update(
+            {
+                "access_token": session["access_token"],
+                "session_expires": str(session.get("expires_in")),
+            }
+        )
+        return fieldmap
+
+    async def facebook_request(
+        self,
+        path: str,
+        access_token: Optional[str] = None,
+        post_args: Optional[Dict[str, Any]] = None,
+        **args: Any
+    ) -> Any:
         """Fetches the given relative API path, e.g., "/btaylor/picture"

         If the request is a POST, ``post_args`` should be provided. Query
@@ -657,24 +1168,95 @@ class FacebookGraphMixin(OAuth2Mixin):

            The ``callback`` argument was removed. Use the returned awaitable object instead.
         """
-        pass
-
-
-def _oauth_signature(consumer_token: Dict[str, Any], method: str, url: str,
-    parameters: Dict[str, Any]={}, token: Optional[Dict[str, Any]]=None
-    ) ->bytes:
+        url = self._FACEBOOK_BASE_URL + path
+        return await self.oauth2_request(
+            url, access_token=access_token, post_args=post_args, **args
+        )
+
+
+def _oauth_signature(
+    consumer_token: Dict[str, Any],
+    method: str,
+    url: str,
+    parameters: Dict[str, Any] = {},
+    token: Optional[Dict[str, Any]] = None,
+) -> bytes:
     """Calculates the HMAC-SHA1 OAuth signature for the given request.

     See http://oauth.net/core/1.0/#signing_process
     """
-    pass
-
-
-def _oauth10a_signature(consumer_token: Dict[str, Any], method: str, url:
-    str, parameters: Dict[str, Any]={}, token: Optional[Dict[str, Any]]=None
-    ) ->bytes:
+    parts = urllib.parse.urlparse(url)
+    scheme, netloc, path = parts[:3]
+    normalized_url = scheme.lower() + "://" + netloc.lower() + path
+
+    base_elems = []
+    base_elems.append(method.upper())
+    base_elems.append(normalized_url)
+    base_elems.append(
+        "&".join(
+            "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items())
+        )
+    )
+    base_string = "&".join(_oauth_escape(e) for e in base_elems)
+
+    key_elems = [escape.utf8(consumer_token["secret"])]
+    key_elems.append(escape.utf8(token["secret"] if token else ""))
+    key = b"&".join(key_elems)
+
+    hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
+    return binascii.b2a_base64(hash.digest())[:-1]
+
+
+def _oauth10a_signature(
+    consumer_token: Dict[str, Any],
+    method: str,
+    url: str,
+    parameters: Dict[str, Any] = {},
+    token: Optional[Dict[str, Any]] = None,
+) -> bytes:
     """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.

     See http://oauth.net/core/1.0a/#signing_process
     """
-    pass
+    parts = urllib.parse.urlparse(url)
+    scheme, netloc, path = parts[:3]
+    normalized_url = scheme.lower() + "://" + netloc.lower() + path
+
+    base_elems = []
+    base_elems.append(method.upper())
+    base_elems.append(normalized_url)
+    base_elems.append(
+        "&".join(
+            "%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items())
+        )
+    )
+
+    base_string = "&".join(_oauth_escape(e) for e in base_elems)
+    key_elems = [escape.utf8(urllib.parse.quote(consumer_token["secret"], safe="~"))]
+    key_elems.append(
+        escape.utf8(urllib.parse.quote(token["secret"], safe="~") if token else "")
+    )
+    key = b"&".join(key_elems)
+
+    hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
+    return binascii.b2a_base64(hash.digest())[:-1]
+
+
+def _oauth_escape(val: Union[str, bytes]) -> str:
+    if isinstance(val, unicode_type):
+        val = val.encode("utf-8")
+    return urllib.parse.quote(val, safe="~")
+
+
+def _oauth_parse_response(body: bytes) -> Dict[str, Any]:
+    # I can't find an officially-defined encoding for oauth responses and
+    # have never seen anyone use non-ascii.  Leave the response in a byte
+    # string for python 2, and use utf8 on python 3.
+    body_str = escape.native_str(body)
+    p = urllib.parse.parse_qs(body_str, keep_blank_values=False)
+    token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0])
+
+    # Add the extra parameters the Provider included to the token
+    special = ("oauth_token", "oauth_token_secret")
+    token.update((k, p[k][0]) for k in p if k not in special)
+    return token
diff --git a/tornado/autoreload.py b/tornado/autoreload.py
index 05cda05c..c6a6e82d 100644
--- a/tornado/autoreload.py
+++ b/tornado/autoreload.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Automatically restart the server when a source file is modified.

 Most applications should not access this module directly.  Instead,
@@ -27,11 +42,38 @@ Additionally, modifying these variables will cause reloading to behave
 incorrectly.

 """
+
 import os
 import sys
-if __name__ == '__main__':
+
+# sys.path handling
+# -----------------
+#
+# If a module is run with "python -m", the current directory (i.e. "")
+# is automatically prepended to sys.path, but not if it is run as
+# "path/to/file.py".  The processing for "-m" rewrites the former to
+# the latter, so subsequent executions won't have the same path as the
+# original.
+#
+# Conversely, when run as path/to/file.py, the directory containing
+# file.py gets added to the path, which can cause confusion as imports
+# may become relative in spite of the future import.
+#
+# We address the former problem by reconstructing the original command
+# line before re-execution so the new process will
+# see the correct path.  We attempt to address the latter problem when
+# tornado.autoreload is run as __main__.
+
+if __name__ == "__main__":
+    # This sys.path manipulation must come before our imports (as much
+    # as possible - if we introduced a tornado.sys or tornado.os
+    # module we'd be in trouble), or else our imports would become
+    # relative again despite the future import.
+    #
+    # There is a separate __main__ block at the end of the file to call main().
     if sys.path[0] == os.path.dirname(__file__):
         del sys.path[0]
+
 import functools
 import importlib.abc
 import os
@@ -41,60 +83,158 @@ import traceback
 import types
 import subprocess
 import weakref
+
 from tornado import ioloop
 from tornado.log import gen_log
 from tornado import process
+
 try:
     import signal
 except ImportError:
-    signal = None
+    signal = None  # type: ignore
+
 from typing import Callable, Dict, Optional, List, Union
-_has_execv = sys.platform != 'win32'
+
+# os.execv is broken on Windows and can't properly parse command line
+# arguments and executable name if they contain whitespaces. subprocess
+# fixes that behavior.
+_has_execv = sys.platform != "win32"
+
 _watched_files = set()
 _reload_hooks = []
 _reload_attempted = False
-_io_loops: 'weakref.WeakKeyDictionary[ioloop.IOLoop, bool]' = (weakref.
-    WeakKeyDictionary())
+_io_loops: "weakref.WeakKeyDictionary[ioloop.IOLoop, bool]" = (
+    weakref.WeakKeyDictionary()
+)
 _autoreload_is_main = False
 _original_argv: Optional[List[str]] = None
 _original_spec = None


-def start(check_time: int=500) ->None:
+def start(check_time: int = 500) -> None:
     """Begins watching source files for changes.

     .. versionchanged:: 5.0
        The ``io_loop`` argument (deprecated since version 4.1) has been removed.
     """
-    pass
+    io_loop = ioloop.IOLoop.current()
+    if io_loop in _io_loops:
+        return
+    _io_loops[io_loop] = True
+    if len(_io_loops) > 1:
+        gen_log.warning("tornado.autoreload started more than once in the same process")
+    modify_times: Dict[str, float] = {}
+    callback = functools.partial(_reload_on_update, modify_times)
+    scheduler = ioloop.PeriodicCallback(callback, check_time)
+    scheduler.start()


-def wait() ->None:
+def wait() -> None:
     """Wait for a watched file to change, then restart the process.

     Intended to be used at the end of scripts like unit test runners,
     to run the tests again after any source file changes (but see also
     the command-line interface in `main`)
     """
-    pass
+    io_loop = ioloop.IOLoop()
+    io_loop.add_callback(start)
+    io_loop.start()


-def watch(filename: str) ->None:
+def watch(filename: str) -> None:
     """Add a file to the watch list.

     All imported modules are watched by default.
     """
-    pass
+    _watched_files.add(filename)


-def add_reload_hook(fn: Callable[[], None]) ->None:
+def add_reload_hook(fn: Callable[[], None]) -> None:
     """Add a function to be called before reloading the process.

     Note that for open file and socket handles it is generally
     preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or
     `os.set_inheritable`) instead of using a reload hook to close them.
     """
-    pass
+    _reload_hooks.append(fn)
+
+
+def _reload_on_update(modify_times: Dict[str, float]) -> None:
+    if _reload_attempted:
+        # We already tried to reload and it didn't work, so don't try again.
+        return
+    if process.task_id() is not None:
+        # We're in a child process created by fork_processes.  If child
+        # processes restarted themselves, they'd all restart and then
+        # all call fork_processes again.
+        return
+    for module in list(sys.modules.values()):
+        # Some modules play games with sys.modules (e.g. email/__init__.py
+        # in the standard library), and occasionally this can cause strange
+        # failures in getattr.  Just ignore anything that's not an ordinary
+        # module.
+        if not isinstance(module, types.ModuleType):
+            continue
+        path = getattr(module, "__file__", None)
+        if not path:
+            continue
+        if path.endswith(".pyc") or path.endswith(".pyo"):
+            path = path[:-1]
+        _check_file(modify_times, path)
+    for path in _watched_files:
+        _check_file(modify_times, path)
+
+
+def _check_file(modify_times: Dict[str, float], path: str) -> None:
+    try:
+        modified = os.stat(path).st_mtime
+    except Exception:
+        return
+    if path not in modify_times:
+        modify_times[path] = modified
+        return
+    if modify_times[path] != modified:
+        gen_log.info("%s modified; restarting server", path)
+        _reload()
+
+
+def _reload() -> None:
+    global _reload_attempted
+    _reload_attempted = True
+    for fn in _reload_hooks:
+        fn()
+    if sys.platform != "win32":
+        # Clear the alarm signal set by
+        # ioloop.set_blocking_log_threshold so it doesn't fire
+        # after the exec.
+        signal.setitimer(signal.ITIMER_REAL, 0, 0)
+    # sys.path fixes: see comments at top of file.  If __main__.__spec__
+    # exists, we were invoked with -m and the effective path is about to
+    # change on re-exec.  Reconstruct the original command line to
+    # ensure that the new process sees the same path we did.
+    if _autoreload_is_main:
+        assert _original_argv is not None
+        spec = _original_spec
+        argv = _original_argv
+    else:
+        spec = getattr(sys.modules["__main__"], "__spec__", None)
+        argv = sys.argv
+    if spec and spec.name != "__main__":
+        # __spec__ is set in two cases: when running a module, and when running a directory. (when
+        # running a file, there is no spec). In the former case, we must pass -m to maintain the
+        # module-style behavior (setting sys.path), even though python stripped -m from its argv at
+        # startup. If sys.path is exactly __main__, we're running a directory and should fall
+        # through to the non-module behavior.
+        #
+        # Some of this, including the use of exactly __main__ as a spec for directory mode,
+        # is documented at https://docs.python.org/3/library/runpy.html#runpy.run_path
+        argv = ["-m", spec.name] + argv[1:]
+
+    if not _has_execv:
+        subprocess.Popen([sys.executable] + argv)
+        os._exit(0)
+    else:
+        os.execv(sys.executable, [sys.executable] + argv)


 _USAGE = """
@@ -103,7 +243,7 @@ _USAGE = """
 """


-def main() ->None:
+def main() -> None:
     """Command-line wrapper to re-run a script whenever its source changes.

     Scripts may be specified by filename or module name::
@@ -116,8 +256,95 @@ def main() ->None:
     can catch import-time problems like syntax errors that would otherwise
     prevent the script from reaching its call to `wait`.
     """
-    pass
+    # Remember that we were launched with autoreload as main.
+    # The main module can be tricky; set the variables both in our globals
+    # (which may be __main__) and the real importable version.
+    #
+    # We use optparse instead of the newer argparse because we want to
+    # mimic the python command-line interface which requires stopping
+    # parsing at the first positional argument. optparse supports
+    # this but as far as I can tell argparse does not.
+    import optparse
+    import tornado.autoreload
+
+    global _autoreload_is_main
+    global _original_argv, _original_spec
+    tornado.autoreload._autoreload_is_main = _autoreload_is_main = True
+    original_argv = sys.argv
+    tornado.autoreload._original_argv = _original_argv = original_argv
+    original_spec = getattr(sys.modules["__main__"], "__spec__", None)
+    tornado.autoreload._original_spec = _original_spec = original_spec
+
+    parser = optparse.OptionParser(
+        prog="python -m tornado.autoreload",
+        usage=_USAGE,
+        epilog="Either -m or a path must be specified, but not both",
+    )
+    parser.disable_interspersed_args()
+    parser.add_option("-m", dest="module", metavar="module", help="module to run")
+    parser.add_option(
+        "--until-success",
+        action="store_true",
+        help="stop reloading after the program exist successfully (status code 0)",
+    )
+    opts, rest = parser.parse_args()
+    if opts.module is None:
+        if not rest:
+            print("Either -m or a path must be specified", file=sys.stderr)
+            sys.exit(1)
+        path = rest[0]
+        sys.argv = rest[:]
+    else:
+        path = None
+        sys.argv = [sys.argv[0]] + rest
+
+    # SystemExit.code is typed funny: https://github.com/python/typeshed/issues/8513
+    # All we care about is truthiness
+    exit_status: Union[int, str, None] = 1
+    try:
+        import runpy
+
+        if opts.module is not None:
+            runpy.run_module(opts.module, run_name="__main__", alter_sys=True)
+        else:
+            assert path is not None
+            runpy.run_path(path, run_name="__main__")
+    except SystemExit as e:
+        exit_status = e.code
+        gen_log.info("Script exited with status %s", e.code)
+    except Exception as e:
+        gen_log.warning("Script exited with uncaught exception", exc_info=True)
+        # If an exception occurred at import time, the file with the error
+        # never made it into sys.modules and so we won't know to watch it.
+        # Just to make sure we've covered everything, walk the stack trace
+        # from the exception and watch every file.
+        for filename, lineno, name, line in traceback.extract_tb(sys.exc_info()[2]):
+            watch(filename)
+        if isinstance(e, SyntaxError):
+            # SyntaxErrors are special:  their innermost stack frame is fake
+            # so extract_tb won't see it and we have to get the filename
+            # from the exception object.
+            if e.filename is not None:
+                watch(e.filename)
+    else:
+        exit_status = 0
+        gen_log.info("Script exited normally")
+    # restore sys.argv so subsequent executions will include autoreload
+    sys.argv = original_argv
+
+    if opts.module is not None:
+        assert opts.module is not None
+        # runpy did a fake import of the module as __main__, but now it's
+        # no longer in sys.modules.  Figure out where it is and watch it.
+        loader = pkgutil.get_loader(opts.module)
+        if loader is not None and isinstance(loader, importlib.abc.FileLoader):
+            watch(loader.get_filename())
+    if opts.until_success and not exit_status:
+        return
+    wait()


-if __name__ == '__main__':
+if __name__ == "__main__":
+    # See also the other __main__ block at the top of the file, which modifies
+    # sys.path before our imports
     main()
diff --git a/tornado/concurrent.py b/tornado/concurrent.py
index 16b042a9..5047c538 100644
--- a/tornado/concurrent.py
+++ b/tornado/concurrent.py
@@ -1,3 +1,17 @@
+#
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
 """Utilities for working with ``Future`` objects.

 Tornado previously provided its own ``Future`` class, but now uses
@@ -10,33 +24,61 @@ implementation, applications rarely need to interact with it
 directly.

 """
+
 import asyncio
 from concurrent import futures
 import functools
 import sys
 import types
+
 from tornado.log import app_log
+
 import typing
 from typing import Any, Callable, Optional, Tuple, Union
-_T = typing.TypeVar('_T')
+
+_T = typing.TypeVar("_T")


 class ReturnValueIgnoredError(Exception):
+    # No longer used; was previously used by @return_future
     pass


 Future = asyncio.Future
-FUTURES = futures.Future, Future
+
+FUTURES = (futures.Future, Future)
+
+
+def is_future(x: Any) -> bool:
+    return isinstance(x, FUTURES)


 class DummyExecutor(futures.Executor):
+    def submit(  # type: ignore[override]
+        self, fn: Callable[..., _T], *args: Any, **kwargs: Any
+    ) -> "futures.Future[_T]":
+        future = futures.Future()  # type: futures.Future[_T]
+        try:
+            future_set_result_unless_cancelled(future, fn(*args, **kwargs))
+        except Exception:
+            future_set_exc_info(future, sys.exc_info())
+        return future
+
     if sys.version_info >= (3, 9):

+        def shutdown(self, wait: bool = True, cancel_futures: bool = False) -> None:
+            pass
+
+    else:
+
+        def shutdown(self, wait: bool = True) -> None:
+            pass
+

 dummy_executor = DummyExecutor()


-def run_on_executor(*args: Any, **kwargs: Any) ->Callable:
+def run_on_executor(*args: Any, **kwargs: Any) -> Callable:
     """Decorator to run a synchronous method asynchronously on an executor.

     Returns a future.
@@ -76,13 +118,34 @@ def run_on_executor(*args: Any, **kwargs: Any) ->Callable:

        The ``callback`` argument was removed.
     """
-    pass
+
+    # Fully type-checking decorators is tricky, and this one is
+    # discouraged anyway so it doesn't have all the generic magic.
+    def run_on_executor_decorator(fn: Callable) -> Callable[..., Future]:
+        executor = kwargs.get("executor", "executor")
+
+        @functools.wraps(fn)
+        def wrapper(self: Any, *args: Any, **kwargs: Any) -> Future:
+            async_future = Future()  # type: Future
+            conc_future = getattr(self, executor).submit(fn, self, *args, **kwargs)
+            chain_future(conc_future, async_future)
+            return async_future
+
+        return wrapper
+
+    if args and kwargs:
+        raise ValueError("cannot combine positional and keyword args")
+    if len(args) == 1:
+        return run_on_executor_decorator(args[0])
+    elif len(args) != 0:
+        raise ValueError("expected 1 argument, got %d", len(args))
+    return run_on_executor_decorator


 _NO_RESULT = object()


-def chain_future(a: 'Future[_T]', b: 'Future[_T]') ->None:
+def chain_future(a: "Future[_T]", b: "Future[_T]") -> None:
     """Chain two futures together so that when one completes, so does the other.

     The result (success or failure) of ``a`` will be copied to ``b``, unless
@@ -94,11 +157,31 @@ def chain_future(a: 'Future[_T]', b: 'Future[_T]') ->None:
        `concurrent.futures.Future`.

     """
-    pass

-
-def future_set_result_unless_cancelled(future:
-    'Union[futures.Future[_T], Future[_T]]', value: _T) ->None:
+    def copy(a: "Future[_T]") -> None:
+        if b.done():
+            return
+        if hasattr(a, "exc_info") and a.exc_info() is not None:  # type: ignore
+            future_set_exc_info(b, a.exc_info())  # type: ignore
+        else:
+            a_exc = a.exception()
+            if a_exc is not None:
+                b.set_exception(a_exc)
+            else:
+                b.set_result(a.result())
+
+    if isinstance(a, Future):
+        future_add_done_callback(a, copy)
+    else:
+        # concurrent.futures.Future
+        from tornado.ioloop import IOLoop
+
+        IOLoop.current().add_future(a, copy)
+
+
+def future_set_result_unless_cancelled(
+    future: "Union[futures.Future[_T], Future[_T]]", value: _T
+) -> None:
     """Set the given ``value`` as the `Future`'s result, if not cancelled.

     Avoids ``asyncio.InvalidStateError`` when calling ``set_result()`` on
@@ -106,11 +189,13 @@ def future_set_result_unless_cancelled(future:

     .. versionadded:: 5.0
     """
-    pass
+    if not future.cancelled():
+        future.set_result(value)


-def future_set_exception_unless_cancelled(future:
-    'Union[futures.Future[_T], Future[_T]]', exc: BaseException) ->None:
+def future_set_exception_unless_cancelled(
+    future: "Union[futures.Future[_T], Future[_T]]", exc: BaseException
+) -> None:
     """Set the given ``exc`` as the `Future`'s exception.

     If the Future is already canceled, logs the exception instead. If
@@ -124,12 +209,18 @@ def future_set_exception_unless_cancelled(future:
     .. versionadded:: 6.0

     """
-    pass
-
-
-def future_set_exc_info(future: 'Union[futures.Future[_T], Future[_T]]',
-    exc_info: Tuple[Optional[type], Optional[BaseException], Optional[types
-    .TracebackType]]) ->None:
+    if not future.cancelled():
+        future.set_exception(exc)
+    else:
+        app_log.error("Exception after Future was cancelled", exc_info=exc)
+
+
+def future_set_exc_info(
+    future: "Union[futures.Future[_T], Future[_T]]",
+    exc_info: Tuple[
+        Optional[type], Optional[BaseException], Optional[types.TracebackType]
+    ],
+) -> None:
     """Set the given ``exc_info`` as the `Future`'s exception.

     Understands both `asyncio.Future` and the extensions in older
@@ -143,12 +234,28 @@ def future_set_exc_info(future: 'Union[futures.Future[_T], Future[_T]]',
        (previously ``asyncio.InvalidStateError`` would be raised)

     """
+    if exc_info[1] is None:
+        raise Exception("future_set_exc_info called with no exception")
+    future_set_exception_unless_cancelled(future, exc_info[1])
+
+
+@typing.overload
+def future_add_done_callback(
+    future: "futures.Future[_T]", callback: Callable[["futures.Future[_T]"], None]
+) -> None:
     pass


-def future_add_done_callback(future:
-    'Union[futures.Future[_T], Future[_T]]', callback: Callable[..., None]
-    ) ->None:
+@typing.overload  # noqa: F811
+def future_add_done_callback(
+    future: "Future[_T]", callback: Callable[["Future[_T]"], None]
+) -> None:
+    pass
+
+
+def future_add_done_callback(  # noqa: F811
+    future: "Union[futures.Future[_T], Future[_T]]", callback: Callable[..., None]
+) -> None:
     """Arrange to call ``callback`` when ``future`` is complete.

     ``callback`` is invoked with one argument, the ``future``.
@@ -159,4 +266,7 @@ def future_add_done_callback(future:

     .. versionadded:: 5.0
     """
-    pass
+    if future.done():
+        callback(future)
+    else:
+        future.add_done_callback(callback)
diff --git a/tornado/curl_httpclient.py b/tornado/curl_httpclient.py
index 548b9c4d..397c3a97 100644
--- a/tornado/curl_httpclient.py
+++ b/tornado/curl_httpclient.py
@@ -1,4 +1,20 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Non-blocking HTTP client implementation using pycurl."""
+
 import collections
 import functools
 import logging
@@ -7,62 +23,566 @@ import re
 import threading
 import time
 from io import BytesIO
+
 from tornado import httputil
 from tornado import ioloop
+
 from tornado.escape import utf8, native_str
-from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
+from tornado.httpclient import (
+    HTTPRequest,
+    HTTPResponse,
+    HTTPError,
+    AsyncHTTPClient,
+    main,
+)
 from tornado.log import app_log
+
 from typing import Dict, Any, Callable, Union, Optional
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Deque, Tuple
-curl_log = logging.getLogger('tornado.curl_httpclient')
-CR_OR_LF_RE = re.compile(b'\r|\n')
+    from typing import Deque, Tuple  # noqa: F401
+
+curl_log = logging.getLogger("tornado.curl_httpclient")
+
+CR_OR_LF_RE = re.compile(b"\r|\n")


 class CurlAsyncHTTPClient(AsyncHTTPClient):
+    def initialize(  # type: ignore
+        self, max_clients: int = 10, defaults: Optional[Dict[str, Any]] = None
+    ) -> None:
+        super().initialize(defaults=defaults)
+        # Typeshed is incomplete for CurlMulti, so just use Any for now.
+        self._multi = pycurl.CurlMulti()  # type: Any
+        self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
+        self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
+        self._curls = [self._curl_create() for i in range(max_clients)]
+        self._free_list = self._curls[:]
+        self._requests = (
+            collections.deque()
+        )  # type: Deque[Tuple[HTTPRequest, Callable[[HTTPResponse], None], float]]
+        self._fds = {}  # type: Dict[int, int]
+        self._timeout = None  # type: Optional[object]
+
+        # libcurl has bugs that sometimes cause it to not report all
+        # relevant file descriptors and timeouts to TIMERFUNCTION/
+        # SOCKETFUNCTION.  Mitigate the effects of such bugs by
+        # forcing a periodic scan of all active requests.
+        self._force_timeout_callback = ioloop.PeriodicCallback(
+            self._handle_force_timeout, 1000
+        )
+        self._force_timeout_callback.start()
+
+        # Work around a bug in libcurl 7.29.0: Some fields in the curl
+        # multi object are initialized lazily, and its destructor will
+        # segfault if it is destroyed without having been used.  Add
+        # and remove a dummy handle to make sure everything is
+        # initialized.
+        dummy_curl_handle = pycurl.Curl()
+        self._multi.add_handle(dummy_curl_handle)
+        self._multi.remove_handle(dummy_curl_handle)
+
+    def close(self) -> None:
+        self._force_timeout_callback.stop()
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+        for curl in self._curls:
+            curl.close()
+        self._multi.close()
+        super().close()

-    def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes
-        ) ->None:
+        # Set below properties to None to reduce the reference count of current
+        # instance, because those properties hold some methods of current
+        # instance that will case circular reference.
+        self._force_timeout_callback = None  # type: ignore
+        self._multi = None
+
+    def fetch_impl(
+        self, request: HTTPRequest, callback: Callable[[HTTPResponse], None]
+    ) -> None:
+        self._requests.append((request, callback, self.io_loop.time()))
+        self._process_queue()
+        self._set_timeout(0)
+
+    def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None:
         """Called by libcurl when it wants to change the file descriptors
         it cares about.
         """
-        pass
+        event_map = {
+            pycurl.POLL_NONE: ioloop.IOLoop.NONE,
+            pycurl.POLL_IN: ioloop.IOLoop.READ,
+            pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
+            pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE,
+        }
+        if event == pycurl.POLL_REMOVE:
+            if fd in self._fds:
+                self.io_loop.remove_handler(fd)
+                del self._fds[fd]
+        else:
+            ioloop_event = event_map[event]
+            # libcurl sometimes closes a socket and then opens a new
+            # one using the same FD without giving us a POLL_NONE in
+            # between.  This is a problem with the epoll IOLoop,
+            # because the kernel can tell when a socket is closed and
+            # removes it from the epoll automatically, causing future
+            # update_handler calls to fail.  Since we can't tell when
+            # this has happened, always use remove and re-add
+            # instead of update.
+            if fd in self._fds:
+                self.io_loop.remove_handler(fd)
+            self.io_loop.add_handler(fd, self._handle_events, ioloop_event)
+            self._fds[fd] = ioloop_event

-    def _set_timeout(self, msecs: int) ->None:
+    def _set_timeout(self, msecs: int) -> None:
         """Called by libcurl to schedule a timeout."""
-        pass
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+        self._timeout = self.io_loop.add_timeout(
+            self.io_loop.time() + msecs / 1000.0, self._handle_timeout
+        )

-    def _handle_events(self, fd: int, events: int) ->None:
+    def _handle_events(self, fd: int, events: int) -> None:
         """Called by IOLoop when there is activity on one of our
         file descriptors.
         """
-        pass
+        action = 0
+        if events & ioloop.IOLoop.READ:
+            action |= pycurl.CSELECT_IN
+        if events & ioloop.IOLoop.WRITE:
+            action |= pycurl.CSELECT_OUT
+        while True:
+            try:
+                ret, num_handles = self._multi.socket_action(fd, action)
+            except pycurl.error as e:
+                ret = e.args[0]
+            if ret != pycurl.E_CALL_MULTI_PERFORM:
+                break
+        self._finish_pending_requests()

-    def _handle_timeout(self) ->None:
+    def _handle_timeout(self) -> None:
         """Called by IOLoop when the requested timeout has passed."""
-        pass
+        self._timeout = None
+        while True:
+            try:
+                ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0)
+            except pycurl.error as e:
+                ret = e.args[0]
+            if ret != pycurl.E_CALL_MULTI_PERFORM:
+                break
+        self._finish_pending_requests()
+
+        # In theory, we shouldn't have to do this because curl will
+        # call _set_timeout whenever the timeout changes.  However,
+        # sometimes after _handle_timeout we will need to reschedule
+        # immediately even though nothing has changed from curl's
+        # perspective.  This is because when socket_action is
+        # called with SOCKET_TIMEOUT, libcurl decides internally which
+        # timeouts need to be processed by using a monotonic clock
+        # (where available) while tornado uses python's time.time()
+        # to decide when timeouts have occurred.  When those clocks
+        # disagree on elapsed time (as they will whenever there is an
+        # NTP adjustment), tornado might call _handle_timeout before
+        # libcurl is ready.  After each timeout, resync the scheduled
+        # timeout with libcurl's current state.
+        new_timeout = self._multi.timeout()
+        if new_timeout >= 0:
+            self._set_timeout(new_timeout)

-    def _handle_force_timeout(self) ->None:
+    def _handle_force_timeout(self) -> None:
         """Called by IOLoop periodically to ask libcurl to process any
         events it may have forgotten about.
         """
-        pass
+        while True:
+            try:
+                ret, num_handles = self._multi.socket_all()
+            except pycurl.error as e:
+                ret = e.args[0]
+            if ret != pycurl.E_CALL_MULTI_PERFORM:
+                break
+        self._finish_pending_requests()

-    def _finish_pending_requests(self) ->None:
+    def _finish_pending_requests(self) -> None:
         """Process any requests that were completed by the last
         call to multi.socket_action.
         """
-        pass
+        while True:
+            num_q, ok_list, err_list = self._multi.info_read()
+            for curl in ok_list:
+                self._finish(curl)
+            for curl, errnum, errmsg in err_list:
+                self._finish(curl, errnum, errmsg)
+            if num_q == 0:
+                break
+        self._process_queue()

+    def _process_queue(self) -> None:
+        while True:
+            started = 0
+            while self._free_list and self._requests:
+                started += 1
+                curl = self._free_list.pop()
+                (request, callback, queue_start_time) = self._requests.popleft()
+                # TODO: Don't smuggle extra data on an attribute of the Curl object.
+                curl.info = {  # type: ignore
+                    "headers": httputil.HTTPHeaders(),
+                    "buffer": BytesIO(),
+                    "request": request,
+                    "callback": callback,
+                    "queue_start_time": queue_start_time,
+                    "curl_start_time": time.time(),
+                    "curl_start_ioloop_time": self.io_loop.current().time(),  # type: ignore
+                }
+                try:
+                    self._curl_setup_request(
+                        curl,
+                        request,
+                        curl.info["buffer"],  # type: ignore
+                        curl.info["headers"],  # type: ignore
+                    )
+                except Exception as e:
+                    # If there was an error in setup, pass it on
+                    # to the callback. Note that allowing the
+                    # error to escape here will appear to work
+                    # most of the time since we are still in the
+                    # caller's original stack frame, but when
+                    # _process_queue() is called from
+                    # _finish_pending_requests the exceptions have
+                    # nowhere to go.
+                    self._free_list.append(curl)
+                    callback(HTTPResponse(request=request, code=599, error=e))
+                else:
+                    self._multi.add_handle(curl)

-class CurlError(HTTPError):
+            if not started:
+                break
+
+    def _finish(
+        self,
+        curl: pycurl.Curl,
+        curl_error: Optional[int] = None,
+        curl_message: Optional[str] = None,
+    ) -> None:
+        info = curl.info  # type: ignore
+        curl.info = None  # type: ignore
+        self._multi.remove_handle(curl)
+        self._free_list.append(curl)
+        buffer = info["buffer"]
+        if curl_error:
+            assert curl_message is not None
+            error = CurlError(curl_error, curl_message)  # type: Optional[CurlError]
+            assert error is not None
+            code = error.code
+            effective_url = None
+            buffer.close()
+            buffer = None
+        else:
+            error = None
+            code = curl.getinfo(pycurl.HTTP_CODE)
+            effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
+            buffer.seek(0)
+        # the various curl timings are documented at
+        # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
+        time_info = dict(
+            queue=info["curl_start_ioloop_time"] - info["queue_start_time"],
+            namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
+            connect=curl.getinfo(pycurl.CONNECT_TIME),
+            appconnect=curl.getinfo(pycurl.APPCONNECT_TIME),
+            pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
+            starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
+            total=curl.getinfo(pycurl.TOTAL_TIME),
+            redirect=curl.getinfo(pycurl.REDIRECT_TIME),
+        )
+        try:
+            info["callback"](
+                HTTPResponse(
+                    request=info["request"],
+                    code=code,
+                    headers=info["headers"],
+                    buffer=buffer,
+                    effective_url=effective_url,
+                    error=error,
+                    reason=info["headers"].get("X-Http-Reason", None),
+                    request_time=self.io_loop.time() - info["curl_start_ioloop_time"],
+                    start_time=info["curl_start_time"],
+                    time_info=time_info,
+                )
+            )
+        except Exception:
+            self.handle_callback_exception(info["callback"])
+
+    def handle_callback_exception(self, callback: Any) -> None:
+        app_log.error("Exception in callback %r", callback, exc_info=True)
+
+    def _curl_create(self) -> pycurl.Curl:
+        curl = pycurl.Curl()
+        if curl_log.isEnabledFor(logging.DEBUG):
+            curl.setopt(pycurl.VERBOSE, 1)
+            curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
+        if hasattr(
+            pycurl, "PROTOCOLS"
+        ):  # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
+            curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
+            curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
+        return curl
+
+    def _curl_setup_request(
+        self,
+        curl: pycurl.Curl,
+        request: HTTPRequest,
+        buffer: BytesIO,
+        headers: httputil.HTTPHeaders,
+    ) -> None:
+        curl.setopt(pycurl.URL, native_str(request.url))
+
+        # libcurl's magic "Expect: 100-continue" behavior causes delays
+        # with servers that don't support it (which include, among others,
+        # Google's OpenID endpoint).  Additionally, this behavior has
+        # a bug in conjunction with the curl_multi_socket_action API
+        # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
+        # which increases the delays.  It's more trouble than it's worth,
+        # so just turn off the feature (yes, setting Expect: to an empty
+        # value is the official way to disable this)
+        if "Expect" not in request.headers:
+            request.headers["Expect"] = ""
+
+        # libcurl adds Pragma: no-cache by default; disable that too
+        if "Pragma" not in request.headers:
+            request.headers["Pragma"] = ""

-    def __init__(self, errno: int, message: str) ->None:
+        encoded_headers = [
+            b"%s: %s"
+            % (native_str(k).encode("ASCII"), native_str(v).encode("ISO8859-1"))
+            for k, v in request.headers.get_all()
+        ]
+        for line in encoded_headers:
+            if CR_OR_LF_RE.search(line):
+                raise ValueError("Illegal characters in header (CR or LF): %r" % line)
+        curl.setopt(pycurl.HTTPHEADER, encoded_headers)
+
+        curl.setopt(
+            pycurl.HEADERFUNCTION,
+            functools.partial(
+                self._curl_header_callback, headers, request.header_callback
+            ),
+        )
+        if request.streaming_callback:
+
+            def write_function(b: Union[bytes, bytearray]) -> int:
+                assert request.streaming_callback is not None
+                self.io_loop.add_callback(request.streaming_callback, b)
+                return len(b)
+
+        else:
+            write_function = buffer.write  # type: ignore
+        curl.setopt(pycurl.WRITEFUNCTION, write_function)
+        curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
+        curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
+        assert request.connect_timeout is not None
+        curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
+        assert request.request_timeout is not None
+        curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
+        if request.user_agent:
+            curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
+        else:
+            curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
+        if request.network_interface:
+            curl.setopt(pycurl.INTERFACE, request.network_interface)
+        if request.decompress_response:
+            curl.setopt(pycurl.ENCODING, "gzip,deflate")
+        else:
+            curl.setopt(pycurl.ENCODING, None)
+        if request.proxy_host and request.proxy_port:
+            curl.setopt(pycurl.PROXY, request.proxy_host)
+            curl.setopt(pycurl.PROXYPORT, request.proxy_port)
+            if request.proxy_username:
+                assert request.proxy_password is not None
+                credentials = httputil.encode_username_password(
+                    request.proxy_username, request.proxy_password
+                )
+                curl.setopt(pycurl.PROXYUSERPWD, credentials)
+
+            if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic":
+                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
+            elif request.proxy_auth_mode == "digest":
+                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
+            else:
+                raise ValueError(
+                    "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode
+                )
+        else:
+            try:
+                curl.unsetopt(pycurl.PROXY)
+            except TypeError:  # not supported, disable proxy
+                curl.setopt(pycurl.PROXY, "")
+            curl.unsetopt(pycurl.PROXYUSERPWD)
+        if request.validate_cert:
+            curl.setopt(pycurl.SSL_VERIFYPEER, 1)
+            curl.setopt(pycurl.SSL_VERIFYHOST, 2)
+        else:
+            curl.setopt(pycurl.SSL_VERIFYPEER, 0)
+            curl.setopt(pycurl.SSL_VERIFYHOST, 0)
+        if request.ca_certs is not None:
+            curl.setopt(pycurl.CAINFO, request.ca_certs)
+        else:
+            # There is no way to restore pycurl.CAINFO to its default value
+            # (Using unsetopt makes it reject all certificates).
+            # I don't see any way to read the default value from python so it
+            # can be restored later.  We'll have to just leave CAINFO untouched
+            # if no ca_certs file was specified, and require that if any
+            # request uses a custom ca_certs file, they all must.
+            pass
+
+        if request.allow_ipv6 is False:
+            # Curl behaves reasonably when DNS resolution gives an ipv6 address
+            # that we can't reach, so allow ipv6 unless the user asks to disable.
+            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
+        else:
+            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
+
+        # Set the request method through curl's irritating interface which makes
+        # up names for almost every single method
+        curl_options = {
+            "GET": pycurl.HTTPGET,
+            "POST": pycurl.POST,
+            "PUT": pycurl.UPLOAD,
+            "HEAD": pycurl.NOBODY,
+        }
+        custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
+        for o in curl_options.values():
+            curl.setopt(o, False)
+        if request.method in curl_options:
+            curl.unsetopt(pycurl.CUSTOMREQUEST)
+            curl.setopt(curl_options[request.method], True)
+        elif request.allow_nonstandard_methods or request.method in custom_methods:
+            curl.setopt(pycurl.CUSTOMREQUEST, request.method)
+        else:
+            raise KeyError("unknown method " + request.method)
+
+        body_expected = request.method in ("POST", "PATCH", "PUT")
+        body_present = request.body is not None
+        if not request.allow_nonstandard_methods:
+            # Some HTTP methods nearly always have bodies while others
+            # almost never do. Fail in this case unless the user has
+            # opted out of sanity checks with allow_nonstandard_methods.
+            if (body_expected and not body_present) or (
+                body_present and not body_expected
+            ):
+                raise ValueError(
+                    "Body must %sbe None for method %s (unless "
+                    "allow_nonstandard_methods is true)"
+                    % ("not " if body_expected else "", request.method)
+                )
+
+        if body_expected or body_present:
+            if request.method == "GET":
+                # Even with `allow_nonstandard_methods` we disallow
+                # GET with a body (because libcurl doesn't allow it
+                # unless we use CUSTOMREQUEST). While the spec doesn't
+                # forbid clients from sending a body, it arguably
+                # disallows the server from doing anything with them.
+                raise ValueError("Body must be None for GET request")
+            request_buffer = BytesIO(utf8(request.body or ""))
+
+            def ioctl(cmd: int) -> None:
+                if cmd == curl.IOCMD_RESTARTREAD:  # type: ignore
+                    request_buffer.seek(0)
+
+            curl.setopt(pycurl.READFUNCTION, request_buffer.read)
+            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
+            if request.method == "POST":
+                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ""))
+            else:
+                curl.setopt(pycurl.UPLOAD, True)
+                curl.setopt(pycurl.INFILESIZE, len(request.body or ""))
+
+        if request.auth_username is not None:
+            assert request.auth_password is not None
+            if request.auth_mode is None or request.auth_mode == "basic":
+                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
+            elif request.auth_mode == "digest":
+                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
+            else:
+                raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
+
+            userpwd = httputil.encode_username_password(
+                request.auth_username, request.auth_password
+            )
+            curl.setopt(pycurl.USERPWD, userpwd)
+            curl_log.debug(
+                "%s %s (username: %r)",
+                request.method,
+                request.url,
+                request.auth_username,
+            )
+        else:
+            curl.unsetopt(pycurl.USERPWD)
+            curl_log.debug("%s %s", request.method, request.url)
+
+        if request.client_cert is not None:
+            curl.setopt(pycurl.SSLCERT, request.client_cert)
+
+        if request.client_key is not None:
+            curl.setopt(pycurl.SSLKEY, request.client_key)
+
+        if request.ssl_options is not None:
+            raise ValueError("ssl_options not supported in curl_httpclient")
+
+        if threading.active_count() > 1:
+            # libcurl/pycurl is not thread-safe by default.  When multiple threads
+            # are used, signals should be disabled.  This has the side effect
+            # of disabling DNS timeouts in some environments (when libcurl is
+            # not linked against ares), so we don't do it when there is only one
+            # thread.  Applications that use many short-lived threads may need
+            # to set NOSIGNAL manually in a prepare_curl_callback since
+            # there may not be any other threads running at the time we call
+            # threading.activeCount.
+            curl.setopt(pycurl.NOSIGNAL, 1)
+        if request.prepare_curl_callback is not None:
+            request.prepare_curl_callback(curl)
+
+    def _curl_header_callback(
+        self,
+        headers: httputil.HTTPHeaders,
+        header_callback: Callable[[str], None],
+        header_line_bytes: bytes,
+    ) -> None:
+        header_line = native_str(header_line_bytes.decode("latin1"))
+        if header_callback is not None:
+            self.io_loop.add_callback(header_callback, header_line)
+        # header_line as returned by curl includes the end-of-line characters.
+        # whitespace at the start should be preserved to allow multi-line headers
+        header_line = header_line.rstrip()
+        if header_line.startswith("HTTP/"):
+            headers.clear()
+            try:
+                (__, __, reason) = httputil.parse_response_start_line(header_line)
+                header_line = "X-Http-Reason: %s" % reason
+            except httputil.HTTPInputError:
+                return
+        if not header_line:
+            return
+        headers.parse_line(header_line)
+
+    def _curl_debug(self, debug_type: int, debug_msg: str) -> None:
+        debug_types = ("I", "<", ">", "<", ">")
+        if debug_type == 0:
+            debug_msg = native_str(debug_msg)
+            curl_log.debug("%s", debug_msg.strip())
+        elif debug_type in (1, 2):
+            debug_msg = native_str(debug_msg)
+            for line in debug_msg.splitlines():
+                curl_log.debug("%s %s", debug_types[debug_type], line)
+        elif debug_type == 4:
+            curl_log.debug("%s %r", debug_types[debug_type], debug_msg)
+
+
+class CurlError(HTTPError):
+    def __init__(self, errno: int, message: str) -> None:
         HTTPError.__init__(self, 599, message)
         self.errno = errno


-if __name__ == '__main__':
+if __name__ == "__main__":
     AsyncHTTPClient.configure(CurlAsyncHTTPClient)
     main()
diff --git a/tornado/escape.py b/tornado/escape.py
index 15f61dbe..84abfca6 100644
--- a/tornado/escape.py
+++ b/tornado/escape.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Escaping/unescaping methods for HTML, JSON, URLs, and others.

 Also includes a few other miscellaneous string manipulation functions that
@@ -9,16 +24,19 @@ and were more relevant in Python 2). In new code, the standard library
 functions are encouraged instead of this module where applicable. See the
 docstrings on each function for details.
 """
+
 import html
 import json
 import re
 import urllib.parse
+
 from tornado.util import unicode_type
+
 import typing
 from typing import Union, Any, Optional, Dict, List, Callable


-def xhtml_escape(value: Union[str, bytes]) ->str:
+def xhtml_escape(value: Union[str, bytes]) -> str:
     """Escapes a string so it is valid within HTML or XML.

     Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``.
@@ -38,10 +56,10 @@ def xhtml_escape(value: Union[str, bytes]) ->str:
        except that single quotes are now escaped as ``&#x27;`` instead of
        ``&#39;`` and performance may be different.
     """
-    pass
+    return html.escape(to_unicode(value))


-def xhtml_unescape(value: Union[str, bytes]) ->str:
+def xhtml_unescape(value: Union[str, bytes]) -> str:
     """Un-escapes an XML-escaped string.

     Equivalent to `html.unescape` except that this function always returns
@@ -56,33 +74,42 @@ def xhtml_unescape(value: Union[str, bytes]) ->str:
        Some invalid inputs such as surrogates now raise an error, and numeric
        references to certain ISO-8859-1 characters are now handled correctly.
     """
-    pass
+    return html.unescape(to_unicode(value))


-def json_encode(value: Any) ->str:
+# The fact that json_encode wraps json.dumps is an implementation detail.
+# Please see https://github.com/tornadoweb/tornado/pull/706
+# before sending a pull request that adds **kwargs to this function.
+def json_encode(value: Any) -> str:
     """JSON-encodes the given Python object.

     Equivalent to `json.dumps` with the additional guarantee that the output
     will never contain the character sequence ``</`` which can be problematic
     when JSON is embedded in an HTML ``<script>`` tag.
     """
-    pass
+    # JSON permits but does not require forward slashes to be escaped.
+    # This is useful when json data is emitted in a <script> tag
+    # in HTML, as it prevents </script> tags from prematurely terminating
+    # the JavaScript.  Some json libraries do this escaping by default,
+    # although python's standard library does not, so we do it here.
+    # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
+    return json.dumps(value).replace("</", "<\\/")


-def json_decode(value: Union[str, bytes]) ->Any:
+def json_decode(value: Union[str, bytes]) -> Any:
     """Returns Python objects for the given JSON string.

     Supports both `str` and `bytes` inputs. Equvalent to `json.loads`.
     """
-    pass
+    return json.loads(value)


-def squeeze(value: str) ->str:
+def squeeze(value: str) -> str:
     """Replace all sequences of whitespace chars with a single space."""
-    pass
+    return re.sub(r"[\x00-\x20]+", " ", value).strip()


-def url_escape(value: Union[str, bytes], plus: bool=True) ->str:
+def url_escape(value: Union[str, bytes], plus: bool = True) -> str:
     """Returns a URL-encoded version of the given value.

     Equivalent to either `urllib.parse.quote_plus` or `urllib.parse.quote` depending on the ``plus``
@@ -97,11 +124,25 @@ def url_escape(value: Union[str, bytes], plus: bool=True) ->str:
     .. versionadded:: 3.1
         The ``plus`` argument
     """
+    quote = urllib.parse.quote_plus if plus else urllib.parse.quote
+    return quote(value)
+
+
+@typing.overload
+def url_unescape(value: Union[str, bytes], encoding: None, plus: bool = True) -> bytes:
     pass


-def url_unescape(value: Union[str, bytes], encoding: Optional[str]='utf-8',
-    plus: bool=True) ->Union[str, bytes]:
+@typing.overload
+def url_unescape(
+    value: Union[str, bytes], encoding: str = "utf-8", plus: bool = True
+) -> str:
+    pass
+
+
+def url_unescape(
+    value: Union[str, bytes], encoding: Optional[str] = "utf-8", plus: bool = True
+) -> Union[str, bytes]:
     """Decodes the given value from a URL.

     The argument may be either a byte or unicode string.
@@ -119,11 +160,19 @@ def url_unescape(value: Union[str, bytes], encoding: Optional[str]='utf-8',
     .. versionadded:: 3.1
        The ``plus`` argument
     """
-    pass
-
-
-def parse_qs_bytes(qs: Union[str, bytes], keep_blank_values: bool=False,
-    strict_parsing: bool=False) ->Dict[str, List[bytes]]:
+    if encoding is None:
+        if plus:
+            # unquote_to_bytes doesn't have a _plus variant
+            value = to_basestring(value).replace("+", " ")
+        return urllib.parse.unquote_to_bytes(value)
+    else:
+        unquote = urllib.parse.unquote_plus if plus else urllib.parse.unquote
+        return unquote(to_basestring(value), encoding=encoding)
+
+
+def parse_qs_bytes(
+    qs: Union[str, bytes], keep_blank_values: bool = False, strict_parsing: bool = False
+) -> Dict[str, List[bytes]]:
     """Parses a query string like urlparse.parse_qs,
     but takes bytes and returns the values as byte strings.

@@ -131,54 +180,131 @@ def parse_qs_bytes(qs: Union[str, bytes], keep_blank_values: bool=False,
     because it's too painful to keep them as byte strings in
     python3 and in practice they're nearly always ascii anyway.
     """
+    # This is gross, but python3 doesn't give us another way.
+    # Latin1 is the universal donor of character encodings.
+    if isinstance(qs, bytes):
+        qs = qs.decode("latin1")
+    result = urllib.parse.parse_qs(
+        qs, keep_blank_values, strict_parsing, encoding="latin1", errors="strict"
+    )
+    encoded = {}
+    for k, v in result.items():
+        encoded[k] = [i.encode("latin1") for i in v]
+    return encoded
+
+
+_UTF8_TYPES = (bytes, type(None))
+
+
+@typing.overload
+def utf8(value: bytes) -> bytes:
     pass


-_UTF8_TYPES = bytes, type(None)
+@typing.overload
+def utf8(value: str) -> bytes:
+    pass
+

+@typing.overload
+def utf8(value: None) -> None:
+    pass

-def utf8(value: Union[None, str, bytes]) ->Optional[bytes]:
+
+def utf8(value: Union[None, str, bytes]) -> Optional[bytes]:
     """Converts a string argument to a byte string.

     If the argument is already a byte string or None, it is returned unchanged.
     Otherwise it must be a unicode string and is encoded as utf8.
     """
+    if isinstance(value, _UTF8_TYPES):
+        return value
+    if not isinstance(value, unicode_type):
+        raise TypeError("Expected bytes, unicode, or None; got %r" % type(value))
+    return value.encode("utf-8")
+
+
+_TO_UNICODE_TYPES = (unicode_type, type(None))
+
+
+@typing.overload
+def to_unicode(value: str) -> str:
+    pass
+
+
+@typing.overload
+def to_unicode(value: bytes) -> str:
     pass


-_TO_UNICODE_TYPES = unicode_type, type(None)
+@typing.overload
+def to_unicode(value: None) -> None:
+    pass


-def to_unicode(value: Union[None, str, bytes]) ->Optional[str]:
+def to_unicode(value: Union[None, str, bytes]) -> Optional[str]:
     """Converts a string argument to a unicode string.

     If the argument is already a unicode string or None, it is returned
     unchanged.  Otherwise it must be a byte string and is decoded as utf8.
     """
-    pass
+    if isinstance(value, _TO_UNICODE_TYPES):
+        return value
+    if not isinstance(value, bytes):
+        raise TypeError("Expected bytes, unicode, or None; got %r" % type(value))
+    return value.decode("utf-8")


+# to_unicode was previously named _unicode not because it was private,
+# but to avoid conflicts with the built-in unicode() function/type
 _unicode = to_unicode
+
+# When dealing with the standard library across python 2 and 3 it is
+# sometimes useful to have a direct conversion to the native string type
 native_str = to_unicode
 to_basestring = to_unicode


-def recursive_unicode(obj: Any) ->Any:
+def recursive_unicode(obj: Any) -> Any:
     """Walks a simple data structure, converting byte strings to unicode.

     Supports lists, tuples, and dictionaries.
     """
-    pass
-
-
-_URL_RE = re.compile(to_unicode(
-    '\\b((?:([\\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\\s&()]|&amp;|&quot;)*(?:[^!"#$%&\'()*+,.:;<=>?@\\[\\]^`{|}~\\s]))|(?:\\((?:[^\\s&()]|&amp;|&quot;)*\\)))+)'
-    ))
-
-
-def linkify(text: Union[str, bytes], shorten: bool=False, extra_params:
-    Union[str, Callable[[str], str]]='', require_protocol: bool=False,
-    permitted_protocols: List[str]=['http', 'https']) ->str:
+    if isinstance(obj, dict):
+        return dict(
+            (recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items()
+        )
+    elif isinstance(obj, list):
+        return list(recursive_unicode(i) for i in obj)
+    elif isinstance(obj, tuple):
+        return tuple(recursive_unicode(i) for i in obj)
+    elif isinstance(obj, bytes):
+        return to_unicode(obj)
+    else:
+        return obj
+
+
+# I originally used the regex from
+# http://daringfireball.net/2010/07/improved_regex_for_matching_urls
+# but it gets all exponential on certain patterns (such as too many trailing
+# dots), causing the regex matcher to never return.
+# This regex should avoid those problems.
+# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
+# processed as escapes.
+_URL_RE = re.compile(
+    to_unicode(
+        r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)"""  # noqa: E501
+    )
+)
+
+
+def linkify(
+    text: Union[str, bytes],
+    shorten: bool = False,
+    extra_params: Union[str, Callable[[str], str]] = "",
+    require_protocol: bool = False,
+    permitted_protocols: List[str] = ["http", "https"],
+) -> str:
     """Converts plain text into HTML with links.

     For example: ``linkify("Hello http://tornadoweb.org!")`` would return
@@ -208,4 +334,70 @@ def linkify(text: Union[str, bytes], shorten: bool=False, extra_params:
       "mailto"])``. It is very unsafe to include protocols such as
       ``javascript``.
     """
-    pass
+    if extra_params and not callable(extra_params):
+        extra_params = " " + extra_params.strip()
+
+    def make_link(m: typing.Match) -> str:
+        url = m.group(1)
+        proto = m.group(2)
+        if require_protocol and not proto:
+            return url  # not protocol, no linkify
+
+        if proto and proto not in permitted_protocols:
+            return url  # bad protocol, no linkify
+
+        href = m.group(1)
+        if not proto:
+            href = "http://" + href  # no proto specified, use http
+
+        if callable(extra_params):
+            params = " " + extra_params(href).strip()
+        else:
+            params = extra_params
+
+        # clip long urls. max_len is just an approximation
+        max_len = 30
+        if shorten and len(url) > max_len:
+            before_clip = url
+            if proto:
+                proto_len = len(proto) + 1 + len(m.group(3) or "")  # +1 for :
+            else:
+                proto_len = 0
+
+            parts = url[proto_len:].split("/")
+            if len(parts) > 1:
+                # Grab the whole host part plus the first bit of the path
+                # The path is usually not that interesting once shortened
+                # (no more slug, etc), so it really just provides a little
+                # extra indication of shortening.
+                url = (
+                    url[:proto_len]
+                    + parts[0]
+                    + "/"
+                    + parts[1][:8].split("?")[0].split(".")[0]
+                )
+
+            if len(url) > max_len * 1.5:  # still too long
+                url = url[:max_len]
+
+            if url != before_clip:
+                amp = url.rfind("&")
+                # avoid splitting html char entities
+                if amp > max_len - 5:
+                    url = url[:amp]
+                url += "..."
+
+                if len(url) >= len(before_clip):
+                    url = before_clip
+                else:
+                    # full url is visible on mouse-over (for those who don't
+                    # have a status bar, such as Safari by default)
+                    params += ' title="%s"' % href
+
+        return '<a href="%s"%s>%s</a>' % (href, params, url)
+
+    # First HTML-escape so that our strings are all safe.
+    # The regex is modified to avoid character entites other than &amp; so
+    # that we won't pick up &quot;, etc.
+    text = _unicode(xhtml_escape(text))
+    return _URL_RE.sub(make_link, text)
diff --git a/tornado/gen.py b/tornado/gen.py
index 19f311cf..0e3c7a6f 100644
--- a/tornado/gen.py
+++ b/tornado/gen.py
@@ -66,6 +66,7 @@ function to extend this mechanism.
    via ``singledispatch``.

 """
+
 import asyncio
 import builtins
 import collections
@@ -77,21 +78,35 @@ from functools import singledispatch
 from inspect import isawaitable
 import sys
 import types
-from tornado.concurrent import Future, is_future, chain_future, future_set_exc_info, future_add_done_callback, future_set_result_unless_cancelled
+
+from tornado.concurrent import (
+    Future,
+    is_future,
+    chain_future,
+    future_set_exc_info,
+    future_add_done_callback,
+    future_set_result_unless_cancelled,
+)
 from tornado.ioloop import IOLoop
 from tornado.log import app_log
 from tornado.util import TimeoutError
+
 try:
     import contextvars
 except ImportError:
-    contextvars = None
+    contextvars = None  # type: ignore
+
 import typing
 from typing import Union, Any, Callable, List, Type, Tuple, Awaitable, Dict, overload
+
 if typing.TYPE_CHECKING:
-    from typing import Sequence, Deque, Optional, Set, Iterable
-_T = typing.TypeVar('_T')
-_Yieldable = Union[None, Awaitable, List[Awaitable], Dict[Any, Awaitable],
-    concurrent.futures.Future]
+    from typing import Sequence, Deque, Optional, Set, Iterable  # noqa: F401
+
+_T = typing.TypeVar("_T")
+
+_Yieldable = Union[
+    None, Awaitable, List[Awaitable], Dict[Any, Awaitable], concurrent.futures.Future
+]


 class KeyReuseError(Exception):
@@ -114,8 +129,53 @@ class ReturnValueIgnoredError(Exception):
     pass


-def coroutine(func: Union[Callable[..., 'Generator[Any, Any, _T]'],
-    Callable[..., _T]]) ->Callable[..., 'Future[_T]']:
+def _value_from_stopiteration(e: Union[StopIteration, "Return"]) -> Any:
+    try:
+        # StopIteration has a value attribute beginning in py33.
+        # So does our Return class.
+        return e.value
+    except AttributeError:
+        pass
+    try:
+        # Cython backports coroutine functionality by putting the value in
+        # e.args[0].
+        return e.args[0]
+    except (AttributeError, IndexError):
+        return None
+
+
+def _create_future() -> Future:
+    future = Future()  # type: Future
+    # Fixup asyncio debug info by removing extraneous stack entries
+    source_traceback = getattr(future, "_source_traceback", ())
+    while source_traceback:
+        # Each traceback entry is equivalent to a
+        # (filename, self.lineno, self.name, self.line) tuple
+        filename = source_traceback[-1][0]
+        if filename == __file__:
+            del source_traceback[-1]
+        else:
+            break
+    return future
+
+
+def _fake_ctx_run(f: Callable[..., _T], *args: Any, **kw: Any) -> _T:
+    return f(*args, **kw)
+
+
+@overload
+def coroutine(
+    func: Callable[..., "Generator[Any, Any, _T]"]
+) -> Callable[..., "Future[_T]"]: ...
+
+
+@overload
+def coroutine(func: Callable[..., _T]) -> Callable[..., "Future[_T]"]: ...
+
+
+def coroutine(
+    func: Union[Callable[..., "Generator[Any, Any, _T]"], Callable[..., _T]]
+) -> Callable[..., "Future[_T]"]:
     """Decorator for asynchronous generators.

     For compatibility with older versions of Python, coroutines may
@@ -140,16 +200,82 @@ def coroutine(func: Union[Callable[..., 'Generator[Any, Any, _T]'],
        awaitable object instead.

     """
-    pass
-

-def is_coroutine_function(func: Any) ->bool:
+    @functools.wraps(func)
+    def wrapper(*args, **kwargs):
+        # type: (*Any, **Any) -> Future[_T]
+        # This function is type-annotated with a comment to work around
+        # https://bitbucket.org/pypy/pypy/issues/2868/segfault-with-args-type-annotation-in
+        future = _create_future()
+        if contextvars is not None:
+            ctx_run = contextvars.copy_context().run  # type: Callable
+        else:
+            ctx_run = _fake_ctx_run
+        try:
+            result = ctx_run(func, *args, **kwargs)
+        except (Return, StopIteration) as e:
+            result = _value_from_stopiteration(e)
+        except Exception:
+            future_set_exc_info(future, sys.exc_info())
+            try:
+                return future
+            finally:
+                # Avoid circular references
+                future = None  # type: ignore
+        else:
+            if isinstance(result, Generator):
+                # Inline the first iteration of Runner.run.  This lets us
+                # avoid the cost of creating a Runner when the coroutine
+                # never actually yields, which in turn allows us to
+                # use "optional" coroutines in critical path code without
+                # performance penalty for the synchronous case.
+                try:
+                    yielded = ctx_run(next, result)
+                except (StopIteration, Return) as e:
+                    future_set_result_unless_cancelled(
+                        future, _value_from_stopiteration(e)
+                    )
+                except Exception:
+                    future_set_exc_info(future, sys.exc_info())
+                else:
+                    # Provide strong references to Runner objects as long
+                    # as their result future objects also have strong
+                    # references (typically from the parent coroutine's
+                    # Runner). This keeps the coroutine's Runner alive.
+                    # We do this by exploiting the public API
+                    # add_done_callback() instead of putting a private
+                    # attribute on the Future.
+                    # (GitHub issues #1769, #2229).
+                    runner = Runner(ctx_run, result, future, yielded)
+                    future.add_done_callback(lambda _: runner)
+                yielded = None
+                try:
+                    return future
+                finally:
+                    # Subtle memory optimization: if next() raised an exception,
+                    # the future's exc_info contains a traceback which
+                    # includes this stack frame.  This creates a cycle,
+                    # which will be collected at the next full GC but has
+                    # been shown to greatly increase memory usage of
+                    # benchmarks (relative to the refcount-based scheme
+                    # used in the absence of cycles).  We can avoid the
+                    # cycle by clearing the local variable after we return it.
+                    future = None  # type: ignore
+        future_set_result_unless_cancelled(future, result)
+        return future
+
+    wrapper.__wrapped__ = func  # type: ignore
+    wrapper.__tornado_coroutine__ = True  # type: ignore
+    return wrapper
+
+
+def is_coroutine_function(func: Any) -> bool:
     """Return whether *func* is a coroutine function, i.e. a function
     wrapped with `~.gen.coroutine`.

     .. versionadded:: 4.5
     """
-    pass
+    return getattr(func, "__tornado_coroutine__", False)


 class Return(Exception):
@@ -173,10 +299,11 @@ class Return(Exception):
     statement can be used with no arguments instead.
     """

-    def __init__(self, value: Any=None) ->None:
+    def __init__(self, value: Any = None) -> None:
         super().__init__()
         self.value = value
-        self.args = value,
+        # Cython recognizes subclasses of StopIteration with a .args tuple.
+        self.args = (value,)


 class WaitIterator(object):
@@ -233,54 +360,84 @@ class WaitIterator(object):
        Added ``async for`` support in Python 3.5.

     """
-    _unfinished = {}

-    def __init__(self, *args: Future, **kwargs: Future) ->None:
+    _unfinished = {}  # type: Dict[Future, Union[int, str]]
+
+    def __init__(self, *args: Future, **kwargs: Future) -> None:
         if args and kwargs:
-            raise ValueError('You must provide args or kwargs, not both')
+            raise ValueError("You must provide args or kwargs, not both")
+
         if kwargs:
-            self._unfinished = dict((f, k) for k, f in kwargs.items())
-            futures = list(kwargs.values())
+            self._unfinished = dict((f, k) for (k, f) in kwargs.items())
+            futures = list(kwargs.values())  # type: Sequence[Future]
         else:
-            self._unfinished = dict((f, i) for i, f in enumerate(args))
+            self._unfinished = dict((f, i) for (i, f) in enumerate(args))
             futures = args
-        self._finished = collections.deque()
-        self.current_index = None
-        self.current_future = None
-        self._running_future = None
+
+        self._finished = collections.deque()  # type: Deque[Future]
+        self.current_index = None  # type: Optional[Union[str, int]]
+        self.current_future = None  # type: Optional[Future]
+        self._running_future = None  # type: Optional[Future]
+
         for future in futures:
             future_add_done_callback(future, self._done_callback)

-    def done(self) ->bool:
+    def done(self) -> bool:
         """Returns True if this iterator has no more results."""
-        pass
+        if self._finished or self._unfinished:
+            return False
+        # Clear the 'current' values when iteration is done.
+        self.current_index = self.current_future = None
+        return True

-    def next(self) ->Future:
+    def next(self) -> Future:
         """Returns a `.Future` that will yield the next available result.

         Note that this `.Future` will not be the same object as any of
         the inputs.
         """
-        pass
+        self._running_future = Future()

-    def _return_result(self, done: Future) ->Future:
+        if self._finished:
+            return self._return_result(self._finished.popleft())
+
+        return self._running_future
+
+    def _done_callback(self, done: Future) -> None:
+        if self._running_future and not self._running_future.done():
+            self._return_result(done)
+        else:
+            self._finished.append(done)
+
+    def _return_result(self, done: Future) -> Future:
         """Called set the returned future's state that of the future
         we yielded, and set the current future for the iterator.
         """
-        pass
+        if self._running_future is None:
+            raise Exception("no future is running")
+        chain_future(done, self._running_future)
+
+        res = self._running_future
+        self._running_future = None
+        self.current_future = done
+        self.current_index = self._unfinished.pop(done)

-    def __aiter__(self) ->typing.AsyncIterator:
+        return res
+
+    def __aiter__(self) -> typing.AsyncIterator:
         return self

-    def __anext__(self) ->Future:
+    def __anext__(self) -> Future:
         if self.done():
-            raise getattr(builtins, 'StopAsyncIteration')()
+            # Lookup by name to silence pyflakes on older versions.
+            raise getattr(builtins, "StopAsyncIteration")()
         return self.next()


-def multi(children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
-    quiet_exceptions: 'Union[Type[Exception], Tuple[Type[Exception], ...]]'=()
-    ) ->'Union[Future[List], Future[Dict]]':
+def multi(
+    children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
+    quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (),
+) -> "Union[Future[List], Future[Dict]]":
     """Runs multiple asynchronous operations in parallel.

     ``children`` may either be a list or a dict whose values are
@@ -325,15 +482,16 @@ def multi(children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
        other than ``YieldPoint`` and `.Future`.

     """
-    pass
+    return multi_future(children, quiet_exceptions=quiet_exceptions)


 Multi = multi


-def multi_future(children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
-    quiet_exceptions: 'Union[Type[Exception], Tuple[Type[Exception], ...]]'=()
-    ) ->'Union[Future[List], Future[Dict]]':
+def multi_future(
+    children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
+    quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (),
+) -> "Union[Future[List], Future[Dict]]":
     """Wait for multiple asynchronous futures in parallel.

     Since Tornado 6.0, this function is exactly the same as `multi`.
@@ -348,10 +506,52 @@ def multi_future(children: Union[List[_Yieldable], Dict[Any, _Yieldable]],
     .. deprecated:: 4.3
        Use `multi` instead.
     """
-    pass
-
-
-def maybe_future(x: Any) ->Future:
+    if isinstance(children, dict):
+        keys = list(children.keys())  # type: Optional[List]
+        children_seq = children.values()  # type: Iterable
+    else:
+        keys = None
+        children_seq = children
+    children_futs = list(map(convert_yielded, children_seq))
+    assert all(is_future(i) or isinstance(i, _NullFuture) for i in children_futs)
+    unfinished_children = set(children_futs)
+
+    future = _create_future()
+    if not children_futs:
+        future_set_result_unless_cancelled(future, {} if keys is not None else [])
+
+    def callback(fut: Future) -> None:
+        unfinished_children.remove(fut)
+        if not unfinished_children:
+            result_list = []
+            for f in children_futs:
+                try:
+                    result_list.append(f.result())
+                except Exception as e:
+                    if future.done():
+                        if not isinstance(e, quiet_exceptions):
+                            app_log.error(
+                                "Multiple exceptions in yield list", exc_info=True
+                            )
+                    else:
+                        future_set_exc_info(future, sys.exc_info())
+            if not future.done():
+                if keys is not None:
+                    future_set_result_unless_cancelled(
+                        future, dict(zip(keys, result_list))
+                    )
+                else:
+                    future_set_result_unless_cancelled(future, result_list)
+
+    listening = set()  # type: Set[Future]
+    for f in children_futs:
+        if f not in listening:
+            listening.add(f)
+            future_add_done_callback(f, callback)
+    return future
+
+
+def maybe_future(x: Any) -> Future:
     """Converts ``x`` into a `.Future`.

     If ``x`` is already a `.Future`, it is simply returned; otherwise
@@ -364,12 +564,19 @@ def maybe_future(x: Any) ->Future:
        Instead of `maybe_future`, check for the non-future result types
        you expect (often just ``None``), and ``yield`` anything unknown.
     """
-    pass
-
-
-def with_timeout(timeout: Union[float, datetime.timedelta], future:
-    _Yieldable, quiet_exceptions:
-    'Union[Type[Exception], Tuple[Type[Exception], ...]]'=()) ->Future:
+    if is_future(x):
+        return x
+    else:
+        fut = _create_future()
+        fut.set_result(x)
+        return fut
+
+
+def with_timeout(
+    timeout: Union[float, datetime.timedelta],
+    future: _Yieldable,
+    quiet_exceptions: "Union[Type[Exception], Tuple[Type[Exception], ...]]" = (),
+) -> Future:
     """Wraps a `.Future` (or other yieldable object) in a timeout.

     Raises `tornado.util.TimeoutError` if the input future does not
@@ -402,10 +609,51 @@ def with_timeout(timeout: Union[float, datetime.timedelta], future:
        ``tornado.util.TimeoutError`` is now an alias to ``asyncio.TimeoutError``.

     """
-    pass
-
-
-def sleep(duration: float) ->'Future[None]':
+    # It's tempting to optimize this by cancelling the input future on timeout
+    # instead of creating a new one, but A) we can't know if we are the only
+    # one waiting on the input future, so cancelling it might disrupt other
+    # callers and B) concurrent futures can only be cancelled while they are
+    # in the queue, so cancellation cannot reliably bound our waiting time.
+    future_converted = convert_yielded(future)
+    result = _create_future()
+    chain_future(future_converted, result)
+    io_loop = IOLoop.current()
+
+    def error_callback(future: Future) -> None:
+        try:
+            future.result()
+        except asyncio.CancelledError:
+            pass
+        except Exception as e:
+            if not isinstance(e, quiet_exceptions):
+                app_log.error(
+                    "Exception in Future %r after timeout", future, exc_info=True
+                )
+
+    def timeout_callback() -> None:
+        if not result.done():
+            result.set_exception(TimeoutError("Timeout"))
+        # In case the wrapped future goes on to fail, log it.
+        future_add_done_callback(future_converted, error_callback)
+
+    timeout_handle = io_loop.add_timeout(timeout, timeout_callback)
+    if isinstance(future_converted, Future):
+        # We know this future will resolve on the IOLoop, so we don't
+        # need the extra thread-safety of IOLoop.add_future (and we also
+        # don't care about StackContext here.
+        future_add_done_callback(
+            future_converted, lambda future: io_loop.remove_timeout(timeout_handle)
+        )
+    else:
+        # concurrent.futures.Futures may resolve on any thread, so we
+        # need to route them back to the IOLoop.
+        io_loop.add_future(
+            future_converted, lambda future: io_loop.remove_timeout(timeout_handle)
+        )
+    return result
+
+
+def sleep(duration: float) -> "Future[None]":
     """Return a `.Future` that resolves after the given number of seconds.

     When used with ``yield`` in a coroutine, this is a non-blocking
@@ -419,7 +667,11 @@ def sleep(duration: float) ->'Future[None]':

     .. versionadded:: 4.1
     """
-    pass
+    f = _create_future()
+    IOLoop.current().call_later(
+        duration, lambda: future_set_result_unless_cancelled(f, None)
+    )
+    return f


 class _NullFuture(object):
@@ -435,8 +687,18 @@ class _NullFuture(object):
     with it.
     """

+    def result(self) -> None:
+        return None
+
+    def done(self) -> bool:
+        return True
+

+# _null_future is used as a dummy value in the coroutine runner. It differs
+# from moment in that moment always adds a delay of one IOLoop iteration
+# while _null_future is processed as soon as possible.
 _null_future = typing.cast(Future, _NullFuture())
+
 moment = typing.cast(Future, _NullFuture())
 moment.__doc__ = """A special object which may be yielded to allow the IOLoop to run for
 one iteration.
@@ -466,28 +728,131 @@ class Runner(object):
     `.Future`)
     """

-    def __init__(self, ctx_run: Callable, gen:
-        'Generator[_Yieldable, Any, _T]', result_future: 'Future[_T]',
-        first_yielded: _Yieldable) ->None:
+    def __init__(
+        self,
+        ctx_run: Callable,
+        gen: "Generator[_Yieldable, Any, _T]",
+        result_future: "Future[_T]",
+        first_yielded: _Yieldable,
+    ) -> None:
         self.ctx_run = ctx_run
         self.gen = gen
         self.result_future = result_future
-        self.future = _null_future
+        self.future = _null_future  # type: Union[None, Future]
         self.running = False
         self.finished = False
         self.io_loop = IOLoop.current()
         if self.ctx_run(self.handle_yield, first_yielded):
-            gen = result_future = first_yielded = None
+            gen = result_future = first_yielded = None  # type: ignore
             self.ctx_run(self.run)

-    def run(self) ->None:
+    def run(self) -> None:
         """Starts or resumes the generator, running until it reaches a
         yield point that is not ready.
         """
-        pass
+        if self.running or self.finished:
+            return
+        try:
+            self.running = True
+            while True:
+                future = self.future
+                if future is None:
+                    raise Exception("No pending future")
+                if not future.done():
+                    return
+                self.future = None
+                try:
+                    try:
+                        value = future.result()
+                    except Exception as e:
+                        # Save the exception for later. It's important that
+                        # gen.throw() not be called inside this try/except block
+                        # because that makes sys.exc_info behave unexpectedly.
+                        exc: Optional[Exception] = e
+                    else:
+                        exc = None
+                    finally:
+                        future = None
+
+                    if exc is not None:
+                        try:
+                            yielded = self.gen.throw(exc)
+                        finally:
+                            # Break up a circular reference for faster GC on
+                            # CPython.
+                            del exc
+                    else:
+                        yielded = self.gen.send(value)
+
+                except (StopIteration, Return) as e:
+                    self.finished = True
+                    self.future = _null_future
+                    future_set_result_unless_cancelled(
+                        self.result_future, _value_from_stopiteration(e)
+                    )
+                    self.result_future = None  # type: ignore
+                    return
+                except Exception:
+                    self.finished = True
+                    self.future = _null_future
+                    future_set_exc_info(self.result_future, sys.exc_info())
+                    self.result_future = None  # type: ignore
+                    return
+                if not self.handle_yield(yielded):
+                    return
+                yielded = None
+        finally:
+            self.running = False
+
+    def handle_yield(self, yielded: _Yieldable) -> bool:
+        try:
+            self.future = convert_yielded(yielded)
+        except BadYieldError:
+            self.future = Future()
+            future_set_exc_info(self.future, sys.exc_info())
+
+        if self.future is moment:
+            self.io_loop.add_callback(self.ctx_run, self.run)
+            return False
+        elif self.future is None:
+            raise Exception("no pending future")
+        elif not self.future.done():
+
+            def inner(f: Any) -> None:
+                # Break a reference cycle to speed GC.
+                f = None  # noqa: F841
+                self.ctx_run(self.run)
+
+            self.io_loop.add_future(self.future, inner)
+            return False
+        return True
+
+    def handle_exception(
+        self, typ: Type[Exception], value: Exception, tb: types.TracebackType
+    ) -> bool:
+        if not self.running and not self.finished:
+            self.future = Future()
+            future_set_exc_info(self.future, (typ, value, tb))
+            self.ctx_run(self.run)
+            return True
+        else:
+            return False


-def convert_yielded(yielded: _Yieldable) ->Future:
+def _wrap_awaitable(awaitable: Awaitable) -> Future:
+    # Convert Awaitables into Futures.
+    # Note that we use ensure_future, which handles both awaitables
+    # and coroutines, rather than create_task, which only accepts
+    # coroutines. (ensure_future calls create_task if given a coroutine)
+    fut = asyncio.ensure_future(awaitable)
+    # See comments on IOLoop._pending_tasks.
+    loop = IOLoop.current()
+    loop._register_task(fut)
+    fut.add_done_callback(lambda f: loop._unregister_task(f))
+    return fut
+
+
+def convert_yielded(yielded: _Yieldable) -> Future:
     """Convert a yielded object into a `.Future`.

     The default implementation accepts lists, dictionaries, and
@@ -504,7 +869,18 @@ def convert_yielded(yielded: _Yieldable) ->Future:
     .. versionadded:: 4.1

     """
-    pass
+    if yielded is None or yielded is moment:
+        return moment
+    elif yielded is _null_future:
+        return _null_future
+    elif isinstance(yielded, (list, dict)):
+        return multi(yielded)  # type: ignore
+    elif is_future(yielded):
+        return typing.cast(Future, yielded)
+    elif isawaitable(yielded):
+        return _wrap_awaitable(yielded)  # type: ignore
+    else:
+        raise BadYieldError("yielded unknown object %r" % (yielded,))


 convert_yielded = singledispatch(convert_yielded)
diff --git a/tornado/http1connection.py b/tornado/http1connection.py
index aa35011f..1a23f5c7 100644
--- a/tornado/http1connection.py
+++ b/tornado/http1connection.py
@@ -1,25 +1,48 @@
+#
+# Copyright 2014 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Client and server implementations of HTTP/1.x.

 .. versionadded:: 4.0
 """
+
 import asyncio
 import logging
 import re
 import types
-from tornado.concurrent import Future, future_add_done_callback, future_set_result_unless_cancelled
+
+from tornado.concurrent import (
+    Future,
+    future_add_done_callback,
+    future_set_result_unless_cancelled,
+)
 from tornado.escape import native_str, utf8
 from tornado import gen
 from tornado import httputil
 from tornado import iostream
 from tornado.log import gen_log, app_log
 from tornado.util import GzipDecompressor
+
+
 from typing import cast, Optional, Type, Awaitable, Callable, Union, Tuple
-CR_OR_LF_RE = re.compile(b'\r|\n')

+CR_OR_LF_RE = re.compile(b"\r|\n")

-class _QuietException(Exception):

-    def __init__(self) ->None:
+class _QuietException(Exception):
+    def __init__(self) -> None:
         pass


@@ -29,27 +52,37 @@ class _ExceptionLoggingContext(object):
     converted to _QuietException
     """

-    def __init__(self, logger: logging.Logger) ->None:
+    def __init__(self, logger: logging.Logger) -> None:
         self.logger = logger

-    def __enter__(self) ->None:
+    def __enter__(self) -> None:
         pass

-    def __exit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: types.TracebackType) ->None:
+    def __exit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: types.TracebackType,
+    ) -> None:
         if value is not None:
             assert typ is not None
-            self.logger.error('Uncaught exception', exc_info=(typ, value, tb))
+            self.logger.error("Uncaught exception", exc_info=(typ, value, tb))
             raise _QuietException


 class HTTP1ConnectionParameters(object):
     """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`."""

-    def __init__(self, no_keep_alive: bool=False, chunk_size: Optional[int]
-        =None, max_header_size: Optional[int]=None, header_timeout:
-        Optional[float]=None, max_body_size: Optional[int]=None,
-        body_timeout: Optional[float]=None, decompress: bool=False) ->None:
+    def __init__(
+        self,
+        no_keep_alive: bool = False,
+        chunk_size: Optional[int] = None,
+        max_header_size: Optional[int] = None,
+        header_timeout: Optional[float] = None,
+        max_body_size: Optional[int] = None,
+        body_timeout: Optional[float] = None,
+        decompress: bool = False,
+    ) -> None:
         """
         :arg bool no_keep_alive: If true, always close the connection after
             one request.
@@ -77,9 +110,13 @@ class HTTP1Connection(httputil.HTTPConnection):
     for servers.
     """

-    def __init__(self, stream: iostream.IOStream, is_client: bool, params:
-        Optional[HTTP1ConnectionParameters]=None, context: Optional[object]
-        =None) ->None:
+    def __init__(
+        self,
+        stream: iostream.IOStream,
+        is_client: bool,
+        params: Optional[HTTP1ConnectionParameters] = None,
+        context: Optional[object] = None,
+    ) -> None:
         """
         :arg stream: an `.IOStream`
         :arg bool is_client: client or server
@@ -94,23 +131,43 @@ class HTTP1Connection(httputil.HTTPConnection):
         self.params = params
         self.context = context
         self.no_keep_alive = params.no_keep_alive
-        self._max_body_size = (self.params.max_body_size if self.params.
-            max_body_size is not None else self.stream.max_buffer_size)
+        # The body limits can be altered by the delegate, so save them
+        # here instead of just referencing self.params later.
+        self._max_body_size = (
+            self.params.max_body_size
+            if self.params.max_body_size is not None
+            else self.stream.max_buffer_size
+        )
         self._body_timeout = self.params.body_timeout
+        # _write_finished is set to True when finish() has been called,
+        # i.e. there will be no more data sent.  Data may still be in the
+        # stream's write buffer.
         self._write_finished = False
+        # True when we have read the entire incoming body.
         self._read_finished = False
-        self._finish_future = Future()
+        # _finish_future resolves when all data has been written and flushed
+        # to the IOStream.
+        self._finish_future = Future()  # type: Future[None]
+        # If true, the connection should be closed after this request
+        # (after the response has been written in the server side,
+        # and after it has been read in the client)
         self._disconnect_on_finish = False
         self._clear_callbacks()
-        self._request_start_line = None
-        self._response_start_line = None
-        self._request_headers = None
+        # Save the start lines after we read or write them; they
+        # affect later processing (e.g. 304 responses and HEAD methods
+        # have content-length but no bodies)
+        self._request_start_line = None  # type: Optional[httputil.RequestStartLine]
+        self._response_start_line = None  # type: Optional[httputil.ResponseStartLine]
+        self._request_headers = None  # type: Optional[httputil.HTTPHeaders]
+        # True if we are writing output with chunked encoding.
         self._chunking_output = False
-        self._expected_content_remaining = None
-        self._pending_write = None
+        # While reading a body with a content-length, this is the
+        # amount left to read.
+        self._expected_content_remaining = None  # type: Optional[int]
+        # A Future for our outgoing writes, returned by IOStream.write.
+        self._pending_write = None  # type: Optional[Future[None]]

-    def read_response(self, delegate: httputil.HTTPMessageDelegate
-        ) ->Awaitable[bool]:
+    def read_response(self, delegate: httputil.HTTPMessageDelegate) -> Awaitable[bool]:
         """Read a single HTTP response.

         Typical client-mode usage is to write a request using `write_headers`,
@@ -121,18 +178,145 @@ class HTTP1Connection(httputil.HTTPConnection):
         Returns a `.Future` that resolves to a bool after the full response has
         been read. The result is true if the stream is still open.
         """
-        pass
-
-    def _clear_callbacks(self) ->None:
+        if self.params.decompress:
+            delegate = _GzipMessageDelegate(delegate, self.params.chunk_size)
+        return self._read_message(delegate)
+
+    async def _read_message(self, delegate: httputil.HTTPMessageDelegate) -> bool:
+        need_delegate_close = False
+        try:
+            header_future = self.stream.read_until_regex(
+                b"\r?\n\r?\n", max_bytes=self.params.max_header_size
+            )
+            if self.params.header_timeout is None:
+                header_data = await header_future
+            else:
+                try:
+                    header_data = await gen.with_timeout(
+                        self.stream.io_loop.time() + self.params.header_timeout,
+                        header_future,
+                        quiet_exceptions=iostream.StreamClosedError,
+                    )
+                except gen.TimeoutError:
+                    self.close()
+                    return False
+            start_line_str, headers = self._parse_headers(header_data)
+            if self.is_client:
+                resp_start_line = httputil.parse_response_start_line(start_line_str)
+                self._response_start_line = resp_start_line
+                start_line = (
+                    resp_start_line
+                )  # type: Union[httputil.RequestStartLine, httputil.ResponseStartLine]
+                # TODO: this will need to change to support client-side keepalive
+                self._disconnect_on_finish = False
+            else:
+                req_start_line = httputil.parse_request_start_line(start_line_str)
+                self._request_start_line = req_start_line
+                self._request_headers = headers
+                start_line = req_start_line
+                self._disconnect_on_finish = not self._can_keep_alive(
+                    req_start_line, headers
+                )
+            need_delegate_close = True
+            with _ExceptionLoggingContext(app_log):
+                header_recv_future = delegate.headers_received(start_line, headers)
+                if header_recv_future is not None:
+                    await header_recv_future
+            if self.stream is None:
+                # We've been detached.
+                need_delegate_close = False
+                return False
+            skip_body = False
+            if self.is_client:
+                assert isinstance(start_line, httputil.ResponseStartLine)
+                if (
+                    self._request_start_line is not None
+                    and self._request_start_line.method == "HEAD"
+                ):
+                    skip_body = True
+                code = start_line.code
+                if code == 304:
+                    # 304 responses may include the content-length header
+                    # but do not actually have a body.
+                    # http://tools.ietf.org/html/rfc7230#section-3.3
+                    skip_body = True
+                if 100 <= code < 200:
+                    # 1xx responses should never indicate the presence of
+                    # a body.
+                    if "Content-Length" in headers or "Transfer-Encoding" in headers:
+                        raise httputil.HTTPInputError(
+                            "Response code %d cannot have body" % code
+                        )
+                    # TODO: client delegates will get headers_received twice
+                    # in the case of a 100-continue.  Document or change?
+                    await self._read_message(delegate)
+            else:
+                if headers.get("Expect") == "100-continue" and not self._write_finished:
+                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
+            if not skip_body:
+                body_future = self._read_body(
+                    resp_start_line.code if self.is_client else 0, headers, delegate
+                )
+                if body_future is not None:
+                    if self._body_timeout is None:
+                        await body_future
+                    else:
+                        try:
+                            await gen.with_timeout(
+                                self.stream.io_loop.time() + self._body_timeout,
+                                body_future,
+                                quiet_exceptions=iostream.StreamClosedError,
+                            )
+                        except gen.TimeoutError:
+                            gen_log.info("Timeout reading body from %s", self.context)
+                            self.stream.close()
+                            return False
+            self._read_finished = True
+            if not self._write_finished or self.is_client:
+                need_delegate_close = False
+                with _ExceptionLoggingContext(app_log):
+                    delegate.finish()
+            # If we're waiting for the application to produce an asynchronous
+            # response, and we're not detached, register a close callback
+            # on the stream (we didn't need one while we were reading)
+            if (
+                not self._finish_future.done()
+                and self.stream is not None
+                and not self.stream.closed()
+            ):
+                self.stream.set_close_callback(self._on_connection_close)
+                await self._finish_future
+            if self.is_client and self._disconnect_on_finish:
+                self.close()
+            if self.stream is None:
+                return False
+        except httputil.HTTPInputError as e:
+            gen_log.info("Malformed HTTP message from %s: %s", self.context, e)
+            if not self.is_client:
+                await self.stream.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
+            self.close()
+            return False
+        finally:
+            if need_delegate_close:
+                with _ExceptionLoggingContext(app_log):
+                    delegate.on_connection_close()
+            header_future = None  # type: ignore
+            self._clear_callbacks()
+        return True
+
+    def _clear_callbacks(self) -> None:
         """Clears the callback attributes.

         This allows the request handler to be garbage collected more
         quickly in CPython by breaking up reference cycles.
         """
-        pass
+        self._write_callback = None
+        self._write_future = None  # type: Optional[Future[None]]
+        self._close_callback = None  # type: Optional[Callable[[], None]]
+        if self.stream is not None:
+            self.stream.set_close_callback(None)

-    def set_close_callback(self, callback: Optional[Callable[[], None]]
-        ) ->None:
+    def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None:
         """Sets a callback that will be run when the connection is closed.

         Note that this callback is slightly different from
@@ -144,9 +328,28 @@ class HTTP1Connection(httputil.HTTPConnection):
         after sending its request but before receiving all the
         response.
         """
-        pass
+        self._close_callback = callback
+
+    def _on_connection_close(self) -> None:
+        # Note that this callback is only registered on the IOStream
+        # when we have finished reading the request and are waiting for
+        # the application to produce its response.
+        if self._close_callback is not None:
+            callback = self._close_callback
+            self._close_callback = None
+            callback()
+        if not self._finish_future.done():
+            future_set_result_unless_cancelled(self._finish_future, None)
+        self._clear_callbacks()

-    def detach(self) ->iostream.IOStream:
+    def close(self) -> None:
+        if self.stream is not None:
+            self.stream.close()
+        self._clear_callbacks()
+        if not self._finish_future.done():
+            future_set_result_unless_cancelled(self._finish_future, None)
+
+    def detach(self) -> iostream.IOStream:
         """Take control of the underlying stream.

         Returns the underlying `.IOStream` object and stops all further
@@ -154,58 +357,419 @@ class HTTP1Connection(httputil.HTTPConnection):
         `.HTTPMessageDelegate.headers_received`.  Intended for implementing
         protocols like websockets that tunnel over an HTTP handshake.
         """
-        pass
+        self._clear_callbacks()
+        stream = self.stream
+        self.stream = None  # type: ignore
+        if not self._finish_future.done():
+            future_set_result_unless_cancelled(self._finish_future, None)
+        return stream

-    def set_body_timeout(self, timeout: float) ->None:
+    def set_body_timeout(self, timeout: float) -> None:
         """Sets the body timeout for a single request.

         Overrides the value from `.HTTP1ConnectionParameters`.
         """
-        pass
+        self._body_timeout = timeout

-    def set_max_body_size(self, max_body_size: int) ->None:
+    def set_max_body_size(self, max_body_size: int) -> None:
         """Sets the body size limit for a single request.

         Overrides the value from `.HTTP1ConnectionParameters`.
         """
-        pass
-
-    def write_headers(self, start_line: Union[httputil.RequestStartLine,
-        httputil.ResponseStartLine], headers: httputil.HTTPHeaders, chunk:
-        Optional[bytes]=None) ->'Future[None]':
+        self._max_body_size = max_body_size
+
+    def write_headers(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+        chunk: Optional[bytes] = None,
+    ) -> "Future[None]":
         """Implements `.HTTPConnection.write_headers`."""
-        pass
-
-    def write(self, chunk: bytes) ->'Future[None]':
+        lines = []
+        if self.is_client:
+            assert isinstance(start_line, httputil.RequestStartLine)
+            self._request_start_line = start_line
+            lines.append(utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1])))
+            # Client requests with a non-empty body must have either a
+            # Content-Length or a Transfer-Encoding. If Content-Length is not
+            # present we'll add our Transfer-Encoding below.
+            self._chunking_output = (
+                start_line.method in ("POST", "PUT", "PATCH")
+                and "Content-Length" not in headers
+            )
+        else:
+            assert isinstance(start_line, httputil.ResponseStartLine)
+            assert self._request_start_line is not None
+            assert self._request_headers is not None
+            self._response_start_line = start_line
+            lines.append(utf8("HTTP/1.1 %d %s" % (start_line[1], start_line[2])))
+            self._chunking_output = (
+                # TODO: should this use
+                # self._request_start_line.version or
+                # start_line.version?
+                self._request_start_line.version == "HTTP/1.1"
+                # Omit payload header field for HEAD request.
+                and self._request_start_line.method != "HEAD"
+                # 1xx, 204 and 304 responses have no body (not even a zero-length
+                # body), and so should not have either Content-Length or
+                # Transfer-Encoding headers.
+                and start_line.code not in (204, 304)
+                and (start_line.code < 100 or start_line.code >= 200)
+                # No need to chunk the output if a Content-Length is specified.
+                and "Content-Length" not in headers
+            )
+            # If connection to a 1.1 client will be closed, inform client
+            if (
+                self._request_start_line.version == "HTTP/1.1"
+                and self._disconnect_on_finish
+            ):
+                headers["Connection"] = "close"
+            # If a 1.0 client asked for keep-alive, add the header.
+            if (
+                self._request_start_line.version == "HTTP/1.0"
+                and self._request_headers.get("Connection", "").lower() == "keep-alive"
+            ):
+                headers["Connection"] = "Keep-Alive"
+        if self._chunking_output:
+            headers["Transfer-Encoding"] = "chunked"
+        if not self.is_client and (
+            self._request_start_line.method == "HEAD"
+            or cast(httputil.ResponseStartLine, start_line).code == 304
+        ):
+            self._expected_content_remaining = 0
+        elif "Content-Length" in headers:
+            self._expected_content_remaining = parse_int(headers["Content-Length"])
+        else:
+            self._expected_content_remaining = None
+        # TODO: headers are supposed to be of type str, but we still have some
+        # cases that let bytes slip through. Remove these native_str calls when those
+        # are fixed.
+        header_lines = (
+            native_str(n) + ": " + native_str(v) for n, v in headers.get_all()
+        )
+        lines.extend(line.encode("latin1") for line in header_lines)
+        for line in lines:
+            if CR_OR_LF_RE.search(line):
+                raise ValueError("Illegal characters (CR or LF) in header: %r" % line)
+        future = None
+        if self.stream.closed():
+            future = self._write_future = Future()
+            future.set_exception(iostream.StreamClosedError())
+            future.exception()
+        else:
+            future = self._write_future = Future()
+            data = b"\r\n".join(lines) + b"\r\n\r\n"
+            if chunk:
+                data += self._format_chunk(chunk)
+            self._pending_write = self.stream.write(data)
+            future_add_done_callback(self._pending_write, self._on_write_complete)
+        return future
+
+    def _format_chunk(self, chunk: bytes) -> bytes:
+        if self._expected_content_remaining is not None:
+            self._expected_content_remaining -= len(chunk)
+            if self._expected_content_remaining < 0:
+                # Close the stream now to stop further framing errors.
+                self.stream.close()
+                raise httputil.HTTPOutputError(
+                    "Tried to write more data than Content-Length"
+                )
+        if self._chunking_output and chunk:
+            # Don't write out empty chunks because that means END-OF-STREAM
+            # with chunked encoding
+            return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n"
+        else:
+            return chunk
+
+    def write(self, chunk: bytes) -> "Future[None]":
         """Implements `.HTTPConnection.write`.

         For backwards compatibility it is allowed but deprecated to
         skip `write_headers` and instead call `write()` with a
         pre-encoded header block.
         """
-        pass
-
-    def finish(self) ->None:
+        future = None
+        if self.stream.closed():
+            future = self._write_future = Future()
+            self._write_future.set_exception(iostream.StreamClosedError())
+            self._write_future.exception()
+        else:
+            future = self._write_future = Future()
+            self._pending_write = self.stream.write(self._format_chunk(chunk))
+            future_add_done_callback(self._pending_write, self._on_write_complete)
+        return future
+
+    def finish(self) -> None:
         """Implements `.HTTPConnection.finish`."""
-        pass
+        if (
+            self._expected_content_remaining is not None
+            and self._expected_content_remaining != 0
+            and not self.stream.closed()
+        ):
+            self.stream.close()
+            raise httputil.HTTPOutputError(
+                "Tried to write %d bytes less than Content-Length"
+                % self._expected_content_remaining
+            )
+        if self._chunking_output:
+            if not self.stream.closed():
+                self._pending_write = self.stream.write(b"0\r\n\r\n")
+                self._pending_write.add_done_callback(self._on_write_complete)
+        self._write_finished = True
+        # If the app finished the request while we're still reading,
+        # divert any remaining data away from the delegate and
+        # close the connection when we're done sending our response.
+        # Closing the connection is the only way to avoid reading the
+        # whole input body.
+        if not self._read_finished:
+            self._disconnect_on_finish = True
+        # No more data is coming, so instruct TCP to send any remaining
+        # data immediately instead of waiting for a full packet or ack.
+        self.stream.set_nodelay(True)
+        if self._pending_write is None:
+            self._finish_request(None)
+        else:
+            future_add_done_callback(self._pending_write, self._finish_request)
+
+    def _on_write_complete(self, future: "Future[None]") -> None:
+        exc = future.exception()
+        if exc is not None and not isinstance(exc, iostream.StreamClosedError):
+            future.result()
+        if self._write_callback is not None:
+            callback = self._write_callback
+            self._write_callback = None
+            self.stream.io_loop.add_callback(callback)
+        if self._write_future is not None:
+            future = self._write_future
+            self._write_future = None
+            future_set_result_unless_cancelled(future, None)
+
+    def _can_keep_alive(
+        self, start_line: httputil.RequestStartLine, headers: httputil.HTTPHeaders
+    ) -> bool:
+        if self.params.no_keep_alive:
+            return False
+        connection_header = headers.get("Connection")
+        if connection_header is not None:
+            connection_header = connection_header.lower()
+        if start_line.version == "HTTP/1.1":
+            return connection_header != "close"
+        elif (
+            "Content-Length" in headers
+            or is_transfer_encoding_chunked(headers)
+            or getattr(start_line, "method", None) in ("HEAD", "GET")
+        ):
+            # start_line may be a request or response start line; only
+            # the former has a method attribute.
+            return connection_header == "keep-alive"
+        return False
+
+    def _finish_request(self, future: "Optional[Future[None]]") -> None:
+        self._clear_callbacks()
+        if not self.is_client and self._disconnect_on_finish:
+            self.close()
+            return
+        # Turn Nagle's algorithm back on, leaving the stream in its
+        # default state for the next request.
+        self.stream.set_nodelay(False)
+        if not self._finish_future.done():
+            future_set_result_unless_cancelled(self._finish_future, None)
+
+    def _parse_headers(self, data: bytes) -> Tuple[str, httputil.HTTPHeaders]:
+        # The lstrip removes newlines that some implementations sometimes
+        # insert between messages of a reused connection.  Per RFC 7230,
+        # we SHOULD ignore at least one empty line before the request.
+        # http://tools.ietf.org/html/rfc7230#section-3.5
+        data_str = native_str(data.decode("latin1")).lstrip("\r\n")
+        # RFC 7230 section allows for both CRLF and bare LF.
+        eol = data_str.find("\n")
+        start_line = data_str[:eol].rstrip("\r")
+        headers = httputil.HTTPHeaders.parse(data_str[eol:])
+        return start_line, headers
+
+    def _read_body(
+        self,
+        code: int,
+        headers: httputil.HTTPHeaders,
+        delegate: httputil.HTTPMessageDelegate,
+    ) -> Optional[Awaitable[None]]:
+        if "Content-Length" in headers:
+            if "," in headers["Content-Length"]:
+                # Proxies sometimes cause Content-Length headers to get
+                # duplicated.  If all the values are identical then we can
+                # use them but if they differ it's an error.
+                pieces = re.split(r",\s*", headers["Content-Length"])
+                if any(i != pieces[0] for i in pieces):
+                    raise httputil.HTTPInputError(
+                        "Multiple unequal Content-Lengths: %r"
+                        % headers["Content-Length"]
+                    )
+                headers["Content-Length"] = pieces[0]
+
+            try:
+                content_length: Optional[int] = parse_int(headers["Content-Length"])
+            except ValueError:
+                # Handles non-integer Content-Length value.
+                raise httputil.HTTPInputError(
+                    "Only integer Content-Length is allowed: %s"
+                    % headers["Content-Length"]
+                )
+
+            if cast(int, content_length) > self._max_body_size:
+                raise httputil.HTTPInputError("Content-Length too long")
+        else:
+            content_length = None
+
+        is_chunked = is_transfer_encoding_chunked(headers)
+
+        if code == 204:
+            # This response code is not allowed to have a non-empty body,
+            # and has an implicit length of zero instead of read-until-close.
+            # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
+            if is_chunked or content_length not in (None, 0):
+                raise httputil.HTTPInputError(
+                    "Response with code %d should not have body" % code
+                )
+            content_length = 0
+
+        if is_chunked:
+            return self._read_chunked_body(delegate)
+        if content_length is not None:
+            return self._read_fixed_body(content_length, delegate)
+        if self.is_client:
+            return self._read_body_until_close(delegate)
+        return None
+
+    async def _read_fixed_body(
+        self, content_length: int, delegate: httputil.HTTPMessageDelegate
+    ) -> None:
+        while content_length > 0:
+            body = await self.stream.read_bytes(
+                min(self.params.chunk_size, content_length), partial=True
+            )
+            content_length -= len(body)
+            if not self._write_finished or self.is_client:
+                with _ExceptionLoggingContext(app_log):
+                    ret = delegate.data_received(body)
+                    if ret is not None:
+                        await ret
+
+    async def _read_chunked_body(self, delegate: httputil.HTTPMessageDelegate) -> None:
+        # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1
+        total_size = 0
+        while True:
+            chunk_len_str = await self.stream.read_until(b"\r\n", max_bytes=64)
+            try:
+                chunk_len = parse_hex_int(native_str(chunk_len_str[:-2]))
+            except ValueError:
+                raise httputil.HTTPInputError("invalid chunk size")
+            if chunk_len == 0:
+                crlf = await self.stream.read_bytes(2)
+                if crlf != b"\r\n":
+                    raise httputil.HTTPInputError(
+                        "improperly terminated chunked request"
+                    )
+                return
+            total_size += chunk_len
+            if total_size > self._max_body_size:
+                raise httputil.HTTPInputError("chunked body too large")
+            bytes_to_read = chunk_len
+            while bytes_to_read:
+                chunk = await self.stream.read_bytes(
+                    min(bytes_to_read, self.params.chunk_size), partial=True
+                )
+                bytes_to_read -= len(chunk)
+                if not self._write_finished or self.is_client:
+                    with _ExceptionLoggingContext(app_log):
+                        ret = delegate.data_received(chunk)
+                        if ret is not None:
+                            await ret
+            # chunk ends with \r\n
+            crlf = await self.stream.read_bytes(2)
+            assert crlf == b"\r\n"
+
+    async def _read_body_until_close(
+        self, delegate: httputil.HTTPMessageDelegate
+    ) -> None:
+        body = await self.stream.read_until_close()
+        if not self._write_finished or self.is_client:
+            with _ExceptionLoggingContext(app_log):
+                ret = delegate.data_received(body)
+                if ret is not None:
+                    await ret


 class _GzipMessageDelegate(httputil.HTTPMessageDelegate):
     """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``."""

-    def __init__(self, delegate: httputil.HTTPMessageDelegate, chunk_size: int
-        ) ->None:
+    def __init__(self, delegate: httputil.HTTPMessageDelegate, chunk_size: int) -> None:
         self._delegate = delegate
         self._chunk_size = chunk_size
-        self._decompressor = None
+        self._decompressor = None  # type: Optional[GzipDecompressor]
+
+    def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
+        if headers.get("Content-Encoding", "").lower() == "gzip":
+            self._decompressor = GzipDecompressor()
+            # Downstream delegates will only see uncompressed data,
+            # so rename the content-encoding header.
+            # (but note that curl_httpclient doesn't do this).
+            headers.add("X-Consumed-Content-Encoding", headers["Content-Encoding"])
+            del headers["Content-Encoding"]
+        return self._delegate.headers_received(start_line, headers)
+
+    async def data_received(self, chunk: bytes) -> None:
+        if self._decompressor:
+            compressed_data = chunk
+            while compressed_data:
+                decompressed = self._decompressor.decompress(
+                    compressed_data, self._chunk_size
+                )
+                if decompressed:
+                    ret = self._delegate.data_received(decompressed)
+                    if ret is not None:
+                        await ret
+                compressed_data = self._decompressor.unconsumed_tail
+                if compressed_data and not decompressed:
+                    raise httputil.HTTPInputError(
+                        "encountered unconsumed gzip data without making progress"
+                    )
+        else:
+            ret = self._delegate.data_received(chunk)
+            if ret is not None:
+                await ret
+
+    def finish(self) -> None:
+        if self._decompressor is not None:
+            tail = self._decompressor.flush()
+            if tail:
+                # The tail should always be empty: decompress returned
+                # all that it can in data_received and the only
+                # purpose of the flush call is to detect errors such
+                # as truncated input. If we did legitimately get a new
+                # chunk at this point we'd need to change the
+                # interface to make finish() a coroutine.
+                raise ValueError(
+                    "decompressor.flush returned data; possible truncated input"
+                )
+        return self._delegate.finish()
+
+    def on_connection_close(self) -> None:
+        return self._delegate.on_connection_close()


 class HTTP1ServerConnection(object):
     """An HTTP/1.x server."""

-    def __init__(self, stream: iostream.IOStream, params: Optional[
-        HTTP1ConnectionParameters]=None, context: Optional[object]=None
-        ) ->None:
+    def __init__(
+        self,
+        stream: iostream.IOStream,
+        params: Optional[HTTP1ConnectionParameters] = None,
+        context: Optional[object] = None,
+    ) -> None:
         """
         :arg stream: an `.IOStream`
         :arg params: a `.HTTP1ConnectionParameters` or None
@@ -217,41 +781,106 @@ class HTTP1ServerConnection(object):
             params = HTTP1ConnectionParameters()
         self.params = params
         self.context = context
-        self._serving_future = None
+        self._serving_future = None  # type: Optional[Future[None]]

-    async def close(self) ->None:
+    async def close(self) -> None:
         """Closes the connection.

         Returns a `.Future` that resolves after the serving loop has exited.
         """
-        pass
-
-    def start_serving(self, delegate: httputil.HTTPServerConnectionDelegate
-        ) ->None:
+        self.stream.close()
+        # Block until the serving loop is done, but ignore any exceptions
+        # (start_serving is already responsible for logging them).
+        assert self._serving_future is not None
+        try:
+            await self._serving_future
+        except Exception:
+            pass
+
+    def start_serving(self, delegate: httputil.HTTPServerConnectionDelegate) -> None:
         """Starts serving requests on this connection.

         :arg delegate: a `.HTTPServerConnectionDelegate`
         """
-        pass
-
-
-DIGITS = re.compile('[0-9]+')
-HEXDIGITS = re.compile('[0-9a-fA-F]+')
-
-
-def parse_int(s: str) ->int:
+        assert isinstance(delegate, httputil.HTTPServerConnectionDelegate)
+        fut = gen.convert_yielded(self._server_request_loop(delegate))
+        self._serving_future = fut
+        # Register the future on the IOLoop so its errors get logged.
+        self.stream.io_loop.add_future(fut, lambda f: f.result())
+
+    async def _server_request_loop(
+        self, delegate: httputil.HTTPServerConnectionDelegate
+    ) -> None:
+        try:
+            while True:
+                conn = HTTP1Connection(self.stream, False, self.params, self.context)
+                request_delegate = delegate.start_request(self, conn)
+                try:
+                    ret = await conn.read_response(request_delegate)
+                except (
+                    iostream.StreamClosedError,
+                    iostream.UnsatisfiableReadError,
+                    asyncio.CancelledError,
+                ):
+                    return
+                except _QuietException:
+                    # This exception was already logged.
+                    conn.close()
+                    return
+                except Exception:
+                    gen_log.error("Uncaught exception", exc_info=True)
+                    conn.close()
+                    return
+                if not ret:
+                    return
+                await asyncio.sleep(0)
+        finally:
+            delegate.on_close(self)
+
+
+DIGITS = re.compile(r"[0-9]+")
+HEXDIGITS = re.compile(r"[0-9a-fA-F]+")
+
+
+def parse_int(s: str) -> int:
     """Parse a non-negative integer from a string."""
-    pass
+    if DIGITS.fullmatch(s) is None:
+        raise ValueError("not an integer: %r" % s)
+    return int(s)


-def parse_hex_int(s: str) ->int:
+def parse_hex_int(s: str) -> int:
     """Parse a non-negative hexadecimal integer from a string."""
-    pass
+    if HEXDIGITS.fullmatch(s) is None:
+        raise ValueError("not a hexadecimal integer: %r" % s)
+    return int(s, 16)


-def is_transfer_encoding_chunked(headers: httputil.HTTPHeaders) ->bool:
+def is_transfer_encoding_chunked(headers: httputil.HTTPHeaders) -> bool:
     """Returns true if the headers specify Transfer-Encoding: chunked.

     Raise httputil.HTTPInputError if any other transfer encoding is used.
     """
-    pass
+    # Note that transfer-encoding is an area in which postel's law can lead
+    # us astray. If a proxy and a backend server are liberal in what they accept,
+    # but accept slightly different things, this can lead to mismatched framing
+    # and request smuggling issues. Therefore we are as strict as possible here
+    # (even technically going beyond the requirements of the RFCs: a value of
+    # ",chunked" is legal but doesn't appear in practice for legitimate traffic)
+    if "Transfer-Encoding" not in headers:
+        return False
+    if "Content-Length" in headers:
+        # Message cannot contain both Content-Length and
+        # Transfer-Encoding headers.
+        # http://tools.ietf.org/html/rfc7230#section-3.3.3
+        raise httputil.HTTPInputError(
+            "Message with both Transfer-Encoding and Content-Length"
+        )
+    if headers["Transfer-Encoding"].lower() == "chunked":
+        return True
+    # We do not support any transfer-encodings other than chunked, and we do not
+    # expect to add any support because the concept of transfer-encoding has
+    # been removed in HTTP/2.
+    raise httputil.HTTPInputError(
+        "Unsupported Transfer-Encoding %s" % headers["Transfer-Encoding"]
+    )
diff --git a/tornado/httpclient.py b/tornado/httpclient.py
index 6069b0be..3011c371 100644
--- a/tornado/httpclient.py
+++ b/tornado/httpclient.py
@@ -35,17 +35,24 @@ To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::

     AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
 """
+
 import datetime
 import functools
 from io import BytesIO
 import ssl
 import time
 import weakref
-from tornado.concurrent import Future, future_set_result_unless_cancelled, future_set_exception_unless_cancelled
+
+from tornado.concurrent import (
+    Future,
+    future_set_result_unless_cancelled,
+    future_set_exception_unless_cancelled,
+)
 from tornado.escape import utf8, native_str
 from tornado import gen, httputil
 from tornado.ioloop import IOLoop
 from tornado.util import Configurable
+
 from typing import Type, Any, Union, Dict, Callable, Optional, cast


@@ -79,29 +86,42 @@ class HTTPClient(object):

     """

-    def __init__(self, async_client_class:
-        'Optional[Type[AsyncHTTPClient]]'=None, **kwargs: Any) ->None:
+    def __init__(
+        self,
+        async_client_class: "Optional[Type[AsyncHTTPClient]]" = None,
+        **kwargs: Any
+    ) -> None:
+        # Initialize self._closed at the beginning of the constructor
+        # so that an exception raised here doesn't lead to confusing
+        # failures in __del__.
         self._closed = True
         self._io_loop = IOLoop(make_current=False)
         if async_client_class is None:
             async_client_class = AsyncHTTPClient

-        async def make_client() ->'AsyncHTTPClient':
+        # Create the client while our IOLoop is "current", without
+        # clobbering the thread's real current IOLoop (if any).
+        async def make_client() -> "AsyncHTTPClient":
             await gen.sleep(0)
             assert async_client_class is not None
             return async_client_class(**kwargs)
+
         self._async_client = self._io_loop.run_sync(make_client)
         self._closed = False

-    def __del__(self) ->None:
+    def __del__(self) -> None:
         self.close()

-    def close(self) ->None:
+    def close(self) -> None:
         """Closes the HTTPClient, freeing any resources used."""
-        pass
-
-    def fetch(self, request: Union['HTTPRequest', str], **kwargs: Any
-        ) ->'HTTPResponse':
+        if not self._closed:
+            self._async_client.close()
+            self._io_loop.close()
+            self._closed = True
+
+    def fetch(
+        self, request: Union["HTTPRequest", str], **kwargs: Any
+    ) -> "HTTPResponse":
         """Executes a request, returning an `HTTPResponse`.

         The request may be either a string URL or an `HTTPRequest` object.
@@ -111,7 +131,10 @@ class HTTPClient(object):
         If an error occurs during the fetch, we raise an `HTTPError` unless
         the ``raise_error`` keyword argument is set to False.
         """
-        pass
+        response = self._io_loop.run_sync(
+            functools.partial(self._async_client.fetch, request, **kwargs)
+        )
+        return response


 class AsyncHTTPClient(Configurable):
@@ -152,10 +175,27 @@ class AsyncHTTPClient(Configurable):
        The ``io_loop`` argument (deprecated since version 4.1) has been removed.

     """
-    _instance_cache = None

-    def __new__(cls, force_instance: bool=False, **kwargs: Any
-        ) ->'AsyncHTTPClient':
+    _instance_cache = None  # type: Dict[IOLoop, AsyncHTTPClient]
+
+    @classmethod
+    def configurable_base(cls) -> Type[Configurable]:
+        return AsyncHTTPClient
+
+    @classmethod
+    def configurable_default(cls) -> Type[Configurable]:
+        from tornado.simple_httpclient import SimpleAsyncHTTPClient
+
+        return SimpleAsyncHTTPClient
+
+    @classmethod
+    def _async_clients(cls) -> Dict[IOLoop, "AsyncHTTPClient"]:
+        attr_name = "_async_client_dict_" + cls.__name__
+        if not hasattr(cls, attr_name):
+            setattr(cls, attr_name, weakref.WeakKeyDictionary())
+        return getattr(cls, attr_name)
+
+    def __new__(cls, force_instance: bool = False, **kwargs: Any) -> "AsyncHTTPClient":
         io_loop = IOLoop.current()
         if force_instance:
             instance_cache = None
@@ -163,13 +203,24 @@ class AsyncHTTPClient(Configurable):
             instance_cache = cls._async_clients()
         if instance_cache is not None and io_loop in instance_cache:
             return instance_cache[io_loop]
-        instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs)
+        instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs)  # type: ignore
+        # Make sure the instance knows which cache to remove itself from.
+        # It can't simply call _async_clients() because we may be in
+        # __new__(AsyncHTTPClient) but instance.__class__ may be
+        # SimpleAsyncHTTPClient.
         instance._instance_cache = instance_cache
         if instance_cache is not None:
             instance_cache[instance.io_loop] = instance
         return instance

-    def close(self) ->None:
+    def initialize(self, defaults: Optional[Dict[str, Any]] = None) -> None:
+        self.io_loop = IOLoop.current()
+        self.defaults = dict(HTTPRequest._DEFAULTS)
+        if defaults is not None:
+            self.defaults.update(defaults)
+        self._closed = False
+
+    def close(self) -> None:
         """Destroys this HTTP client, freeing any file descriptors used.

         This method is **not needed in normal use** due to the way
@@ -182,10 +233,25 @@ class AsyncHTTPClient(Configurable):
         ``close()``.

         """
-        pass
-
-    def fetch(self, request: Union[str, 'HTTPRequest'], raise_error: bool=
-        True, **kwargs: Any) ->'Future[HTTPResponse]':
+        if self._closed:
+            return
+        self._closed = True
+        if self._instance_cache is not None:
+            cached_val = self._instance_cache.pop(self.io_loop, None)
+            # If there's an object other than self in the instance
+            # cache for our IOLoop, something has gotten mixed up. A
+            # value of None appears to be possible when this is called
+            # from a destructor (HTTPClient.__del__) as the weakref
+            # gets cleared before the destructor runs.
+            if cached_val is not None and cached_val is not self:
+                raise RuntimeError("inconsistent AsyncHTTPClient cache")
+
+    def fetch(
+        self,
+        request: Union[str, "HTTPRequest"],
+        raise_error: bool = True,
+        **kwargs: Any
+    ) -> "Future[HTTPResponse]":
         """Executes a request, asynchronously returning an `HTTPResponse`.

         The request may be either a string URL or an `HTTPRequest` object.
@@ -214,11 +280,41 @@ class AsyncHTTPClient(Configurable):
            `HTTPError` raised when a non-200 response code is used,
            instead of suppressing all errors.
         """
-        pass
+        if self._closed:
+            raise RuntimeError("fetch() called on closed AsyncHTTPClient")
+        if not isinstance(request, HTTPRequest):
+            request = HTTPRequest(url=request, **kwargs)
+        else:
+            if kwargs:
+                raise ValueError(
+                    "kwargs can't be used if request is an HTTPRequest object"
+                )
+        # We may modify this (to add Host, Accept-Encoding, etc),
+        # so make sure we don't modify the caller's object.  This is also
+        # where normal dicts get converted to HTTPHeaders objects.
+        request.headers = httputil.HTTPHeaders(request.headers)
+        request_proxy = _RequestProxy(request, self.defaults)
+        future = Future()  # type: Future[HTTPResponse]
+
+        def handle_response(response: "HTTPResponse") -> None:
+            if response.error:
+                if raise_error or not response._error_is_response_code:
+                    future_set_exception_unless_cancelled(future, response.error)
+                    return
+            future_set_result_unless_cancelled(future, response)
+
+        self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response)
+        return future
+
+    def fetch_impl(
+        self, request: "HTTPRequest", callback: Callable[["HTTPResponse"], None]
+    ) -> None:
+        raise NotImplementedError()

     @classmethod
-    def configure(cls, impl: 'Union[None, str, Type[Configurable]]', **
-        kwargs: Any) ->None:
+    def configure(
+        cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any
+    ) -> None:
         """Configures the `AsyncHTTPClient` subclass to use.

         ``AsyncHTTPClient()`` actually creates an instance of a subclass.
@@ -237,38 +333,67 @@ class AsyncHTTPClient(Configurable):

            AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
         """
-        pass
+        super(AsyncHTTPClient, cls).configure(impl, **kwargs)


 class HTTPRequest(object):
     """HTTP client request object."""
-    _headers = None
-    _DEFAULTS = dict(connect_timeout=20.0, request_timeout=20.0,
-        follow_redirects=True, max_redirects=5, decompress_response=True,
-        proxy_password='', allow_nonstandard_methods=False, validate_cert=True)
-
-    def __init__(self, url: str, method: str='GET', headers: Optional[Union
-        [Dict[str, str], httputil.HTTPHeaders]]=None, body: Optional[Union[
-        bytes, str]]=None, auth_username: Optional[str]=None, auth_password:
-        Optional[str]=None, auth_mode: Optional[str]=None, connect_timeout:
-        Optional[float]=None, request_timeout: Optional[float]=None,
-        if_modified_since: Optional[Union[float, datetime.datetime]]=None,
-        follow_redirects: Optional[bool]=None, max_redirects: Optional[int]
-        =None, user_agent: Optional[str]=None, use_gzip: Optional[bool]=
-        None, network_interface: Optional[str]=None, streaming_callback:
-        Optional[Callable[[bytes], None]]=None, header_callback: Optional[
-        Callable[[str], None]]=None, prepare_curl_callback: Optional[
-        Callable[[Any], None]]=None, proxy_host: Optional[str]=None,
-        proxy_port: Optional[int]=None, proxy_username: Optional[str]=None,
-        proxy_password: Optional[str]=None, proxy_auth_mode: Optional[str]=
-        None, allow_nonstandard_methods: Optional[bool]=None, validate_cert:
-        Optional[bool]=None, ca_certs: Optional[str]=None, allow_ipv6:
-        Optional[bool]=None, client_key: Optional[str]=None, client_cert:
-        Optional[str]=None, body_producer: Optional[Callable[[Callable[[
-        bytes], None]], 'Future[None]']]=None, expect_100_continue: bool=
-        False, decompress_response: Optional[bool]=None, ssl_options:
-        Optional[Union[Dict[str, Any], ssl.SSLContext]]=None) ->None:
-        """All parameters except ``url`` are optional.
+
+    _headers = None  # type: Union[Dict[str, str], httputil.HTTPHeaders]
+
+    # Default values for HTTPRequest parameters.
+    # Merged with the values on the request object by AsyncHTTPClient
+    # implementations.
+    _DEFAULTS = dict(
+        connect_timeout=20.0,
+        request_timeout=20.0,
+        follow_redirects=True,
+        max_redirects=5,
+        decompress_response=True,
+        proxy_password="",
+        allow_nonstandard_methods=False,
+        validate_cert=True,
+    )
+
+    def __init__(
+        self,
+        url: str,
+        method: str = "GET",
+        headers: Optional[Union[Dict[str, str], httputil.HTTPHeaders]] = None,
+        body: Optional[Union[bytes, str]] = None,
+        auth_username: Optional[str] = None,
+        auth_password: Optional[str] = None,
+        auth_mode: Optional[str] = None,
+        connect_timeout: Optional[float] = None,
+        request_timeout: Optional[float] = None,
+        if_modified_since: Optional[Union[float, datetime.datetime]] = None,
+        follow_redirects: Optional[bool] = None,
+        max_redirects: Optional[int] = None,
+        user_agent: Optional[str] = None,
+        use_gzip: Optional[bool] = None,
+        network_interface: Optional[str] = None,
+        streaming_callback: Optional[Callable[[bytes], None]] = None,
+        header_callback: Optional[Callable[[str], None]] = None,
+        prepare_curl_callback: Optional[Callable[[Any], None]] = None,
+        proxy_host: Optional[str] = None,
+        proxy_port: Optional[int] = None,
+        proxy_username: Optional[str] = None,
+        proxy_password: Optional[str] = None,
+        proxy_auth_mode: Optional[str] = None,
+        allow_nonstandard_methods: Optional[bool] = None,
+        validate_cert: Optional[bool] = None,
+        ca_certs: Optional[str] = None,
+        allow_ipv6: Optional[bool] = None,
+        client_key: Optional[str] = None,
+        client_cert: Optional[str] = None,
+        body_producer: Optional[
+            Callable[[Callable[[bytes], None]], "Future[None]"]
+        ] = None,
+        expect_100_continue: bool = False,
+        decompress_response: Optional[bool] = None,
+        ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
+    ) -> None:
+        r"""All parameters except ``url`` are optional.

         :arg str url: URL to fetch
         :arg str method: HTTP method, e.g. "GET" or "POST"
@@ -318,8 +443,8 @@ class HTTPRequest(object):
            the final response.
         :arg collections.abc.Callable header_callback: If set, ``header_callback`` will
            be run with each header line as it is received (including the
-           first line, e.g. ``HTTP/1.0 200 OK\\r\\n``, and a final line
-           containing only ``\\r\\n``.  All lines include the trailing newline
+           first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line
+           containing only ``\r\n``.  All lines include the trailing newline
            characters).  ``HTTPResponse.headers`` will be empty in the final
            response.  This is most useful in conjunction with
            ``streaming_callback``, because it's the only way to get access to
@@ -381,10 +506,13 @@ class HTTPRequest(object):
         .. versionadded:: 4.5
            The ``proxy_auth_mode`` argument.
         """
-        self.headers = headers
+        # Note that some of these attributes go through property setters
+        # defined below.
+        self.headers = headers  # type: ignore
         if if_modified_since:
-            self.headers['If-Modified-Since'] = httputil.format_timestamp(
-                if_modified_since)
+            self.headers["If-Modified-Since"] = httputil.format_timestamp(
+                if_modified_since
+            )
         self.proxy_host = proxy_host
         self.proxy_port = proxy_port
         self.proxy_username = proxy_username
@@ -392,7 +520,7 @@ class HTTPRequest(object):
         self.proxy_auth_mode = proxy_auth_mode
         self.url = url
         self.method = method
-        self.body = body
+        self.body = body  # type: ignore
         self.body_producer = body_producer
         self.auth_username = auth_username
         self.auth_password = auth_password
@@ -403,7 +531,7 @@ class HTTPRequest(object):
         self.max_redirects = max_redirects
         self.user_agent = user_agent
         if decompress_response is not None:
-            self.decompress_response = decompress_response
+            self.decompress_response = decompress_response  # type: Optional[bool]
         else:
             self.decompress_response = use_gzip
         self.network_interface = network_interface
@@ -420,6 +548,28 @@ class HTTPRequest(object):
         self.expect_100_continue = expect_100_continue
         self.start_time = time.time()

+    @property
+    def headers(self) -> httputil.HTTPHeaders:
+        # TODO: headers may actually be a plain dict until fairly late in
+        # the process (AsyncHTTPClient.fetch), but practically speaking,
+        # whenever the property is used they're already HTTPHeaders.
+        return self._headers  # type: ignore
+
+    @headers.setter
+    def headers(self, value: Union[Dict[str, str], httputil.HTTPHeaders]) -> None:
+        if value is None:
+            self._headers = httputil.HTTPHeaders()
+        else:
+            self._headers = value  # type: ignore
+
+    @property
+    def body(self) -> bytes:
+        return self._body
+
+    @body.setter
+    def body(self, value: Union[bytes, str]) -> None:
+        self._body = utf8(value)
+

 class HTTPResponse(object):
     """HTTP Response object.
@@ -470,28 +620,37 @@ class HTTPResponse(object):
        is excluded in both implementations. ``request_time`` is now more accurate for
        ``curl_httpclient`` because it uses a monotonic clock when available.
     """
-    error = None
+
+    # I'm not sure why these don't get type-inferred from the references in __init__.
+    error = None  # type: Optional[BaseException]
     _error_is_response_code = False
-    request = None
-
-    def __init__(self, request: HTTPRequest, code: int, headers: Optional[
-        httputil.HTTPHeaders]=None, buffer: Optional[BytesIO]=None,
-        effective_url: Optional[str]=None, error: Optional[BaseException]=
-        None, request_time: Optional[float]=None, time_info: Optional[Dict[
-        str, float]]=None, reason: Optional[str]=None, start_time: Optional
-        [float]=None) ->None:
+    request = None  # type: HTTPRequest
+
+    def __init__(
+        self,
+        request: HTTPRequest,
+        code: int,
+        headers: Optional[httputil.HTTPHeaders] = None,
+        buffer: Optional[BytesIO] = None,
+        effective_url: Optional[str] = None,
+        error: Optional[BaseException] = None,
+        request_time: Optional[float] = None,
+        time_info: Optional[Dict[str, float]] = None,
+        reason: Optional[str] = None,
+        start_time: Optional[float] = None,
+    ) -> None:
         if isinstance(request, _RequestProxy):
             self.request = request.request
         else:
             self.request = request
         self.code = code
-        self.reason = reason or httputil.responses.get(code, 'Unknown')
+        self.reason = reason or httputil.responses.get(code, "Unknown")
         if headers is not None:
             self.headers = headers
         else:
             self.headers = httputil.HTTPHeaders()
         self.buffer = buffer
-        self._body = None
+        self._body = None  # type: Optional[bytes]
         if effective_url is None:
             self.effective_url = request.url
         else:
@@ -500,8 +659,7 @@ class HTTPResponse(object):
         if error is None:
             if self.code < 200 or self.code >= 300:
                 self._error_is_response_code = True
-                self.error = HTTPError(self.code, message=self.reason,
-                    response=self)
+                self.error = HTTPError(self.code, message=self.reason, response=self)
             else:
                 self.error = None
         else:
@@ -510,13 +668,23 @@ class HTTPResponse(object):
         self.request_time = request_time
         self.time_info = time_info or {}

-    def rethrow(self) ->None:
+    @property
+    def body(self) -> bytes:
+        if self.buffer is None:
+            return b""
+        elif self._body is None:
+            self._body = self.buffer.getvalue()
+
+        return self._body
+
+    def rethrow(self) -> None:
         """If there was an error on the request, raise an `HTTPError`."""
-        pass
+        if self.error:
+            raise self.error

-    def __repr__(self) ->str:
-        args = ','.join('%s=%r' % i for i in sorted(self.__dict__.items()))
-        return '%s(%s)' % (self.__class__.__name__, args)
+    def __repr__(self) -> str:
+        args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
+        return "%s(%s)" % (self.__class__.__name__, args)


 class HTTPClientError(Exception):
@@ -540,15 +708,24 @@ class HTTPClientError(Exception):
        as an alias.
     """

-    def __init__(self, code: int, message: Optional[str]=None, response:
-        Optional[HTTPResponse]=None) ->None:
+    def __init__(
+        self,
+        code: int,
+        message: Optional[str] = None,
+        response: Optional[HTTPResponse] = None,
+    ) -> None:
         self.code = code
-        self.message = message or httputil.responses.get(code, 'Unknown')
+        self.message = message or httputil.responses.get(code, "Unknown")
         self.response = response
         super().__init__(code, message, response)

-    def __str__(self) ->str:
-        return 'HTTP %d: %s' % (self.code, self.message)
+    def __str__(self) -> str:
+        return "HTTP %d: %s" % (self.code, self.message)
+
+    # There is a cyclic reference between self and self.response,
+    # which breaks the default __repr__ implementation.
+    # (especially on pypy, which doesn't have the same recursion
+    # detection as cpython).
     __repr__ = __str__


@@ -561,12 +738,13 @@ class _RequestProxy(object):
     Used internally by AsyncHTTPClient implementations.
     """

-    def __init__(self, request: HTTPRequest, defaults: Optional[Dict[str, Any]]
-        ) ->None:
+    def __init__(
+        self, request: HTTPRequest, defaults: Optional[Dict[str, Any]]
+    ) -> None:
         self.request = request
         self.defaults = defaults

-    def __getattr__(self, name: str) ->Any:
+    def __getattr__(self, name: str) -> Any:
         request_attr = getattr(self.request, name)
         if request_attr is not None:
             return request_attr
@@ -576,5 +754,37 @@ class _RequestProxy(object):
             return None


-if __name__ == '__main__':
+def main() -> None:
+    from tornado.options import define, options, parse_command_line
+
+    define("print_headers", type=bool, default=False)
+    define("print_body", type=bool, default=True)
+    define("follow_redirects", type=bool, default=True)
+    define("validate_cert", type=bool, default=True)
+    define("proxy_host", type=str)
+    define("proxy_port", type=int)
+    args = parse_command_line()
+    client = HTTPClient()
+    for arg in args:
+        try:
+            response = client.fetch(
+                arg,
+                follow_redirects=options.follow_redirects,
+                validate_cert=options.validate_cert,
+                proxy_host=options.proxy_host,
+                proxy_port=options.proxy_port,
+            )
+        except HTTPError as e:
+            if e.response is not None:
+                response = e.response
+            else:
+                raise
+        if options.print_headers:
+            print(response.headers)
+        if options.print_body:
+            print(native_str(response.body))
+    client.close()
+
+
+if __name__ == "__main__":
     main()
diff --git a/tornado/httpserver.py b/tornado/httpserver.py
index 1a203b99..757f711b 100644
--- a/tornado/httpserver.py
+++ b/tornado/httpserver.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """A non-blocking, single-threaded HTTP server.

 Typical applications have little direct interaction with the `HTTPServer`
@@ -9,8 +24,10 @@ class except to start a server at the beginning of the process
    The ``HTTPRequest`` class that used to live in this module has been moved
    to `tornado.httputil.HTTPServerRequest`.  The old name remains as an alias.
 """
+
 import socket
 import ssl
+
 from tornado.escape import native_str
 from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters
 from tornado import httputil
@@ -18,15 +35,16 @@ from tornado import iostream
 from tornado import netutil
 from tornado.tcpserver import TCPServer
 from tornado.util import Configurable
+
 import typing
 from typing import Union, Any, Dict, Callable, List, Type, Tuple, Optional, Awaitable
+
 if typing.TYPE_CHECKING:
-    from typing import Set
+    from typing import Set  # noqa: F401


-class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate
-    ):
-    """A non-blocking, single-threaded HTTP server.
+class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate):
+    r"""A non-blocking, single-threaded HTTP server.

     A server is defined by a subclass of `.HTTPServerConnectionDelegate`,
     or, for backwards compatibility, a callback that takes an
@@ -135,10 +153,67 @@ class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate
        The ``io_loop`` argument has been removed.
     """

-    def __init__(self, *args: Any, **kwargs: Any) ->None:
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        # Ignore args to __init__; real initialization belongs in
+        # initialize since we're Configurable. (there's something
+        # weird in initialization order between this class,
+        # Configurable, and TCPServer so we can't leave __init__ out
+        # completely)
         pass

-    async def close_all_connections(self) ->None:
+    def initialize(
+        self,
+        request_callback: Union[
+            httputil.HTTPServerConnectionDelegate,
+            Callable[[httputil.HTTPServerRequest], None],
+        ],
+        no_keep_alive: bool = False,
+        xheaders: bool = False,
+        ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
+        protocol: Optional[str] = None,
+        decompress_request: bool = False,
+        chunk_size: Optional[int] = None,
+        max_header_size: Optional[int] = None,
+        idle_connection_timeout: Optional[float] = None,
+        body_timeout: Optional[float] = None,
+        max_body_size: Optional[int] = None,
+        max_buffer_size: Optional[int] = None,
+        trusted_downstream: Optional[List[str]] = None,
+    ) -> None:
+        # This method's signature is not extracted with autodoc
+        # because we want its arguments to appear on the class
+        # constructor. When changing this signature, also update the
+        # copy in httpserver.rst.
+        self.request_callback = request_callback
+        self.xheaders = xheaders
+        self.protocol = protocol
+        self.conn_params = HTTP1ConnectionParameters(
+            decompress=decompress_request,
+            chunk_size=chunk_size,
+            max_header_size=max_header_size,
+            header_timeout=idle_connection_timeout or 3600,
+            max_body_size=max_body_size,
+            body_timeout=body_timeout,
+            no_keep_alive=no_keep_alive,
+        )
+        TCPServer.__init__(
+            self,
+            ssl_options=ssl_options,
+            max_buffer_size=max_buffer_size,
+            read_chunk_size=chunk_size,
+        )
+        self._connections = set()  # type: Set[HTTP1ServerConnection]
+        self.trusted_downstream = trusted_downstream
+
+    @classmethod
+    def configurable_base(cls) -> Type[Configurable]:
+        return HTTPServer
+
+    @classmethod
+    def configurable_default(cls) -> Type[Configurable]:
+        return HTTPServer
+
+    async def close_all_connections(self) -> None:
         """Close all open connections and asynchronously wait for them to finish.

         This method is used in combination with `~.TCPServer.stop` to
@@ -152,72 +227,184 @@ class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate
         Note that this method is a coroutine and must be called with ``await``.

         """
-        pass
+        while self._connections:
+            # Peek at an arbitrary element of the set
+            conn = next(iter(self._connections))
+            await conn.close()
+
+    def handle_stream(self, stream: iostream.IOStream, address: Tuple) -> None:
+        context = _HTTPRequestContext(
+            stream, address, self.protocol, self.trusted_downstream
+        )
+        conn = HTTP1ServerConnection(stream, self.conn_params, context)
+        self._connections.add(conn)
+        conn.start_serving(self)
+
+    def start_request(
+        self, server_conn: object, request_conn: httputil.HTTPConnection
+    ) -> httputil.HTTPMessageDelegate:
+        if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate):
+            delegate = self.request_callback.start_request(server_conn, request_conn)
+        else:
+            delegate = _CallableAdapter(self.request_callback, request_conn)

+        if self.xheaders:
+            delegate = _ProxyAdapter(delegate, request_conn)

-class _CallableAdapter(httputil.HTTPMessageDelegate):
+        return delegate

-    def __init__(self, request_callback: Callable[[httputil.
-        HTTPServerRequest], None], request_conn: httputil.HTTPConnection
-        ) ->None:
+    def on_close(self, server_conn: object) -> None:
+        self._connections.remove(typing.cast(HTTP1ServerConnection, server_conn))
+
+
+class _CallableAdapter(httputil.HTTPMessageDelegate):
+    def __init__(
+        self,
+        request_callback: Callable[[httputil.HTTPServerRequest], None],
+        request_conn: httputil.HTTPConnection,
+    ) -> None:
         self.connection = request_conn
         self.request_callback = request_callback
-        self.request = None
+        self.request = None  # type: Optional[httputil.HTTPServerRequest]
         self.delegate = None
-        self._chunks = []
+        self._chunks = []  # type: List[bytes]
+
+    def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
+        self.request = httputil.HTTPServerRequest(
+            connection=self.connection,
+            start_line=typing.cast(httputil.RequestStartLine, start_line),
+            headers=headers,
+        )
+        return None
+
+    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
+        self._chunks.append(chunk)
+        return None
+
+    def finish(self) -> None:
+        assert self.request is not None
+        self.request.body = b"".join(self._chunks)
+        self.request._parse_body()
+        self.request_callback(self.request)
+
+    def on_connection_close(self) -> None:
+        del self._chunks


 class _HTTPRequestContext(object):
-
-    def __init__(self, stream: iostream.IOStream, address: Tuple, protocol:
-        Optional[str], trusted_downstream: Optional[List[str]]=None) ->None:
+    def __init__(
+        self,
+        stream: iostream.IOStream,
+        address: Tuple,
+        protocol: Optional[str],
+        trusted_downstream: Optional[List[str]] = None,
+    ) -> None:
         self.address = address
+        # Save the socket's address family now so we know how to
+        # interpret self.address even after the stream is closed
+        # and its socket attribute replaced with None.
         if stream.socket is not None:
             self.address_family = stream.socket.family
         else:
             self.address_family = None
-        if self.address_family in (socket.AF_INET, socket.AF_INET6
-            ) and address is not None:
+        # In HTTPServerRequest we want an IP, not a full socket address.
+        if (
+            self.address_family in (socket.AF_INET, socket.AF_INET6)
+            and address is not None
+        ):
             self.remote_ip = address[0]
         else:
-            self.remote_ip = '0.0.0.0'
+            # Unix (or other) socket; fake the remote address.
+            self.remote_ip = "0.0.0.0"
         if protocol:
             self.protocol = protocol
         elif isinstance(stream, iostream.SSLIOStream):
-            self.protocol = 'https'
+            self.protocol = "https"
         else:
-            self.protocol = 'http'
+            self.protocol = "http"
         self._orig_remote_ip = self.remote_ip
         self._orig_protocol = self.protocol
         self.trusted_downstream = set(trusted_downstream or [])

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         if self.address_family in (socket.AF_INET, socket.AF_INET6):
             return self.remote_ip
         elif isinstance(self.address, bytes):
+            # Python 3 with the -bb option warns about str(bytes),
+            # so convert it explicitly.
+            # Unix socket addresses are str on mac but bytes on linux.
             return native_str(self.address)
         else:
             return str(self.address)

-    def _apply_xheaders(self, headers: httputil.HTTPHeaders) ->None:
+    def _apply_xheaders(self, headers: httputil.HTTPHeaders) -> None:
         """Rewrite the ``remote_ip`` and ``protocol`` fields."""
-        pass
-
-    def _unapply_xheaders(self) ->None:
+        # Squid uses X-Forwarded-For, others use X-Real-Ip
+        ip = headers.get("X-Forwarded-For", self.remote_ip)
+        # Skip trusted downstream hosts in X-Forwarded-For list
+        for ip in (cand.strip() for cand in reversed(ip.split(","))):
+            if ip not in self.trusted_downstream:
+                break
+        ip = headers.get("X-Real-Ip", ip)
+        if netutil.is_valid_ip(ip):
+            self.remote_ip = ip
+        # AWS uses X-Forwarded-Proto
+        proto_header = headers.get(
+            "X-Scheme", headers.get("X-Forwarded-Proto", self.protocol)
+        )
+        if proto_header:
+            # use only the last proto entry if there is more than one
+            # TODO: support trusting multiple layers of proxied protocol
+            proto_header = proto_header.split(",")[-1].strip()
+        if proto_header in ("http", "https"):
+            self.protocol = proto_header
+
+    def _unapply_xheaders(self) -> None:
         """Undo changes from `_apply_xheaders`.

         Xheaders are per-request so they should not leak to the next
         request on the same connection.
         """
-        pass
+        self.remote_ip = self._orig_remote_ip
+        self.protocol = self._orig_protocol


 class _ProxyAdapter(httputil.HTTPMessageDelegate):
-
-    def __init__(self, delegate: httputil.HTTPMessageDelegate, request_conn:
-        httputil.HTTPConnection) ->None:
+    def __init__(
+        self,
+        delegate: httputil.HTTPMessageDelegate,
+        request_conn: httputil.HTTPConnection,
+    ) -> None:
         self.connection = request_conn
         self.delegate = delegate

+    def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
+        # TODO: either make context an official part of the
+        # HTTPConnection interface or figure out some other way to do this.
+        self.connection.context._apply_xheaders(headers)  # type: ignore
+        return self.delegate.headers_received(start_line, headers)
+
+    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
+        return self.delegate.data_received(chunk)
+
+    def finish(self) -> None:
+        self.delegate.finish()
+        self._cleanup()
+
+    def on_connection_close(self) -> None:
+        self.delegate.on_connection_close()
+        self._cleanup()
+
+    def _cleanup(self) -> None:
+        self.connection.context._unapply_xheaders()  # type: ignore
+

 HTTPRequest = httputil.HTTPServerRequest
diff --git a/tornado/httputil.py b/tornado/httputil.py
index 907aef4e..9ce992d8 100644
--- a/tornado/httputil.py
+++ b/tornado/httputil.py
@@ -1,8 +1,24 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """HTTP utility code shared by clients and servers.

 This module also defines the `HTTPServerRequest` class which is exposed
 via `tornado.web.RequestHandler.request`.
 """
+
 import calendar
 import collections.abc
 import copy
@@ -16,27 +32,48 @@ from ssl import SSLError
 import time
 import unicodedata
 from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl
+
 from tornado.escape import native_str, parse_qs_bytes, utf8
 from tornado.log import gen_log
 from tornado.util import ObjectDict, unicode_type
+
+
+# responses is unused in this file, but we re-export it to other files.
+# Reference it so pyflakes doesn't complain.
 responses
+
 import typing
-from typing import Tuple, Iterable, List, Mapping, Iterator, Dict, Union, Optional, Awaitable, Generator, AnyStr
+from typing import (
+    Tuple,
+    Iterable,
+    List,
+    Mapping,
+    Iterator,
+    Dict,
+    Union,
+    Optional,
+    Awaitable,
+    Generator,
+    AnyStr,
+)
+
 if typing.TYPE_CHECKING:
-    from typing import Deque
-    from asyncio import Future
-    import unittest
-HTTP_WHITESPACE = ' \t'
+    from typing import Deque  # noqa: F401
+    from asyncio import Future  # noqa: F401
+    import unittest  # noqa: F401
+
+# To be used with str.strip() and related methods.
+HTTP_WHITESPACE = " \t"


 @lru_cache(1000)
-def _normalize_header(name: str) ->str:
+def _normalize_header(name: str) -> str:
     """Map a header name to Http-Header-Case.

     >>> _normalize_header("coNtent-TYPE")
     'Content-Type'
     """
-    pass
+    return "-".join([w.capitalize() for w in name.split("-")])


 class HTTPHeaders(collections.abc.MutableMapping):
@@ -69,49 +106,63 @@ class HTTPHeaders(collections.abc.MutableMapping):
     """

     @typing.overload
-    def __init__(self, __arg: Mapping[str, List[str]]) ->None:
+    def __init__(self, __arg: Mapping[str, List[str]]) -> None:
         pass

-    @typing.overload
-    def __init__(self, __arg: Mapping[str, str]) ->None:
+    @typing.overload  # noqa: F811
+    def __init__(self, __arg: Mapping[str, str]) -> None:
         pass

-    @typing.overload
-    def __init__(self, *args: Tuple[str, str]) ->None:
+    @typing.overload  # noqa: F811
+    def __init__(self, *args: Tuple[str, str]) -> None:
         pass

-    @typing.overload
-    def __init__(self, **kwargs: str) ->None:
+    @typing.overload  # noqa: F811
+    def __init__(self, **kwargs: str) -> None:
         pass

-    def __init__(self, *args: typing.Any, **kwargs: str) ->None:
-        self._dict = {}
-        self._as_list = {}
-        self._last_key = None
-        if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0],
-            HTTPHeaders):
+    def __init__(self, *args: typing.Any, **kwargs: str) -> None:  # noqa: F811
+        self._dict = {}  # type: typing.Dict[str, str]
+        self._as_list = {}  # type: typing.Dict[str, typing.List[str]]
+        self._last_key = None  # type: Optional[str]
+        if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders):
+            # Copy constructor
             for k, v in args[0].get_all():
                 self.add(k, v)
         else:
+            # Dict-style initialization
             self.update(*args, **kwargs)

-    def add(self, name: str, value: str) ->None:
+    # new public methods
+
+    def add(self, name: str, value: str) -> None:
         """Adds a new value for the given key."""
-        pass
+        norm_name = _normalize_header(name)
+        self._last_key = norm_name
+        if norm_name in self:
+            self._dict[norm_name] = (
+                native_str(self[norm_name]) + "," + native_str(value)
+            )
+            self._as_list[norm_name].append(value)
+        else:
+            self[norm_name] = value

-    def get_list(self, name: str) ->List[str]:
+    def get_list(self, name: str) -> List[str]:
         """Returns all values for the given header as a list."""
-        pass
+        norm_name = _normalize_header(name)
+        return self._as_list.get(norm_name, [])

-    def get_all(self) ->Iterable[Tuple[str, str]]:
+    def get_all(self) -> Iterable[Tuple[str, str]]:
         """Returns an iterable of all (name, value) pairs.

         If a header has multiple values, multiple pairs will be
         returned with the same name.
         """
-        pass
+        for name, values in self._as_list.items():
+            for value in values:
+                yield (name, value)

-    def parse_line(self, line: str) ->None:
+    def parse_line(self, line: str) -> None:
         """Updates the dictionary with a single header line.

         >>> h = HTTPHeaders()
@@ -119,10 +170,22 @@ class HTTPHeaders(collections.abc.MutableMapping):
         >>> h.get('content-type')
         'text/html'
         """
-        pass
+        if line[0].isspace():
+            # continuation of a multi-line header
+            if self._last_key is None:
+                raise HTTPInputError("first header line cannot start with whitespace")
+            new_part = " " + line.lstrip(HTTP_WHITESPACE)
+            self._as_list[self._last_key][-1] += new_part
+            self._dict[self._last_key] += new_part
+        else:
+            try:
+                name, value = line.split(":", 1)
+            except ValueError:
+                raise HTTPInputError("no colon in header line")
+            self.add(name, value.strip(HTTP_WHITESPACE))

     @classmethod
-    def parse(cls, headers: str) ->'HTTPHeaders':
+    def parse(cls, headers: str) -> "HTTPHeaders":
         """Returns a dictionary from HTTP header text.

         >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n")
@@ -135,33 +198,52 @@ class HTTPHeaders(collections.abc.MutableMapping):
            mix of `KeyError`, and `ValueError`.

         """
-        pass
-
-    def __setitem__(self, name: str, value: str) ->None:
+        h = cls()
+        # RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line
+        # terminator and ignore any preceding CR.
+        for line in headers.split("\n"):
+            if line.endswith("\r"):
+                line = line[:-1]
+            if line:
+                h.parse_line(line)
+        return h
+
+    # MutableMapping abstract method implementations.
+
+    def __setitem__(self, name: str, value: str) -> None:
         norm_name = _normalize_header(name)
         self._dict[norm_name] = value
         self._as_list[norm_name] = [value]

-    def __getitem__(self, name: str) ->str:
+    def __getitem__(self, name: str) -> str:
         return self._dict[_normalize_header(name)]

-    def __delitem__(self, name: str) ->None:
+    def __delitem__(self, name: str) -> None:
         norm_name = _normalize_header(name)
         del self._dict[norm_name]
         del self._as_list[norm_name]

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return len(self._dict)

-    def __iter__(self) ->Iterator[typing.Any]:
+    def __iter__(self) -> Iterator[typing.Any]:
         return iter(self._dict)
+
+    def copy(self) -> "HTTPHeaders":
+        # defined in dict but not in MutableMapping.
+        return HTTPHeaders(self)
+
+    # Use our overridden copy method for the copy.copy module.
+    # This makes shallow copies one level deeper, but preserves
+    # the appearance that HTTPHeaders is a single container.
     __copy__ = copy

-    def __str__(self) ->str:
+    def __str__(self) -> str:
         lines = []
         for name, value in self.get_all():
-            lines.append('%s: %s\n' % (name, value))
-        return ''.join(lines)
+            lines.append("%s: %s\n" % (name, value))
+        return "".join(lines)
+
     __unicode__ = __str__


@@ -257,54 +339,90 @@ class HTTPServerRequest(object):
     .. versionchanged:: 4.0
        Moved from ``tornado.httpserver.HTTPRequest``.
     """
-    path = None
-    query = None
-    _body_future = None
-
-    def __init__(self, method: Optional[str]=None, uri: Optional[str]=None,
-        version: str='HTTP/1.0', headers: Optional[HTTPHeaders]=None, body:
-        Optional[bytes]=None, host: Optional[str]=None, files: Optional[
-        Dict[str, List['HTTPFile']]]=None, connection: Optional[
-        'HTTPConnection']=None, start_line: Optional['RequestStartLine']=
-        None, server_connection: Optional[object]=None) ->None:
+
+    path = None  # type: str
+    query = None  # type: str
+
+    # HACK: Used for stream_request_body
+    _body_future = None  # type: Future[None]
+
+    def __init__(
+        self,
+        method: Optional[str] = None,
+        uri: Optional[str] = None,
+        version: str = "HTTP/1.0",
+        headers: Optional[HTTPHeaders] = None,
+        body: Optional[bytes] = None,
+        host: Optional[str] = None,
+        files: Optional[Dict[str, List["HTTPFile"]]] = None,
+        connection: Optional["HTTPConnection"] = None,
+        start_line: Optional["RequestStartLine"] = None,
+        server_connection: Optional[object] = None,
+    ) -> None:
         if start_line is not None:
             method, uri, version = start_line
         self.method = method
         self.uri = uri
         self.version = version
         self.headers = headers or HTTPHeaders()
-        self.body = body or b''
-        context = getattr(connection, 'context', None)
-        self.remote_ip = getattr(context, 'remote_ip', None)
-        self.protocol = getattr(context, 'protocol', 'http')
-        self.host = host or self.headers.get('Host') or '127.0.0.1'
+        self.body = body or b""
+
+        # set remote IP and protocol
+        context = getattr(connection, "context", None)
+        self.remote_ip = getattr(context, "remote_ip", None)
+        self.protocol = getattr(context, "protocol", "http")
+
+        self.host = host or self.headers.get("Host") or "127.0.0.1"
         self.host_name = split_host_and_port(self.host.lower())[0]
         self.files = files or {}
         self.connection = connection
         self.server_connection = server_connection
         self._start_time = time.time()
         self._finish_time = None
+
         if uri is not None:
-            self.path, sep, self.query = uri.partition('?')
+            self.path, sep, self.query = uri.partition("?")
         self.arguments = parse_qs_bytes(self.query, keep_blank_values=True)
         self.query_arguments = copy.deepcopy(self.arguments)
-        self.body_arguments = {}
+        self.body_arguments = {}  # type: Dict[str, List[bytes]]

     @property
-    def cookies(self) ->Dict[str, http.cookies.Morsel]:
+    def cookies(self) -> Dict[str, http.cookies.Morsel]:
         """A dictionary of ``http.cookies.Morsel`` objects."""
-        pass
-
-    def full_url(self) ->str:
+        if not hasattr(self, "_cookies"):
+            self._cookies = (
+                http.cookies.SimpleCookie()
+            )  # type: http.cookies.SimpleCookie
+            if "Cookie" in self.headers:
+                try:
+                    parsed = parse_cookie(self.headers["Cookie"])
+                except Exception:
+                    pass
+                else:
+                    for k, v in parsed.items():
+                        try:
+                            self._cookies[k] = v
+                        except Exception:
+                            # SimpleCookie imposes some restrictions on keys;
+                            # parse_cookie does not. Discard any cookies
+                            # with disallowed keys.
+                            pass
+        return self._cookies
+
+    def full_url(self) -> str:
         """Reconstructs the full URL for this request."""
-        pass
+        return self.protocol + "://" + self.host + self.uri  # type: ignore[operator]

-    def request_time(self) ->float:
+    def request_time(self) -> float:
         """Returns the amount of time it took for this request to execute."""
-        pass
+        if self._finish_time is None:
+            return time.time() - self._start_time
+        else:
+            return self._finish_time - self._start_time

-    def get_ssl_certificate(self, binary_form: bool=False) ->Union[None,
-        Dict, bytes]:
+    def get_ssl_certificate(
+        self, binary_form: bool = False
+    ) -> Union[None, Dict, bytes]:
         """Returns the client's SSL certificate, if any.

         To use client certificates, the HTTPServer's
@@ -323,12 +441,32 @@ class HTTPServerRequest(object):
         details.
         http://docs.python.org/library/ssl.html#sslsocket-objects
         """
-        pass
-
-    def __repr__(self) ->str:
-        attrs = 'protocol', 'host', 'method', 'uri', 'version', 'remote_ip'
-        args = ', '.join([('%s=%r' % (n, getattr(self, n))) for n in attrs])
-        return '%s(%s)' % (self.__class__.__name__, args)
+        try:
+            if self.connection is None:
+                return None
+            # TODO: add a method to HTTPConnection for this so it can work with HTTP/2
+            return self.connection.stream.socket.getpeercert(  # type: ignore
+                binary_form=binary_form
+            )
+        except SSLError:
+            return None
+
+    def _parse_body(self) -> None:
+        parse_body_arguments(
+            self.headers.get("Content-Type", ""),
+            self.body,
+            self.body_arguments,
+            self.files,
+            self.headers,
+        )
+
+        for k, v in self.body_arguments.items():
+            self.arguments.setdefault(k, []).extend(v)
+
+    def __repr__(self) -> str:
+        attrs = ("protocol", "host", "method", "uri", "version", "remote_ip")
+        args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs])
+        return "%s(%s)" % (self.__class__.__name__, args)


 class HTTPInputError(Exception):
@@ -337,6 +475,7 @@ class HTTPInputError(Exception):

     .. versionadded:: 4.0
     """
+
     pass


@@ -345,6 +484,7 @@ class HTTPOutputError(Exception):

     .. versionadded:: 4.0
     """
+
     pass


@@ -354,8 +494,9 @@ class HTTPServerConnectionDelegate(object):
     .. versionadded:: 4.0
     """

-    def start_request(self, server_conn: object, request_conn: 'HTTPConnection'
-        ) ->'HTTPMessageDelegate':
+    def start_request(
+        self, server_conn: object, request_conn: "HTTPConnection"
+    ) -> "HTTPMessageDelegate":
         """This method is called by the server when a new request has started.

         :arg server_conn: is an opaque object representing the long-lived
@@ -365,9 +506,9 @@ class HTTPServerConnectionDelegate(object):

         This method should return a `.HTTPMessageDelegate`.
         """
-        pass
+        raise NotImplementedError()

-    def on_close(self, server_conn: object) ->None:
+    def on_close(self, server_conn: object) -> None:
         """This method is called when a connection has been closed.

         :arg server_conn: is a server connection that has previously been
@@ -382,9 +523,12 @@ class HTTPMessageDelegate(object):
     .. versionadded:: 4.0
     """

-    def headers_received(self, start_line: Union['RequestStartLine',
-        'ResponseStartLine'], headers: HTTPHeaders) ->Optional[Awaitable[None]
-        ]:
+    # TODO: genericize this class to avoid exposing the Union.
+    def headers_received(
+        self,
+        start_line: Union["RequestStartLine", "ResponseStartLine"],
+        headers: HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
         """Called when the HTTP headers have been received and parsed.

         :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`
@@ -399,18 +543,18 @@ class HTTPMessageDelegate(object):
         """
         pass

-    def data_received(self, chunk: bytes) ->Optional[Awaitable[None]]:
+    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
         """Called when a chunk of data has been received.

         May return a `.Future` for flow control.
         """
         pass

-    def finish(self) ->None:
+    def finish(self) -> None:
         """Called after the last chunk of data has been received."""
         pass

-    def on_connection_close(self) ->None:
+    def on_connection_close(self) -> None:
         """Called if the connection is closed without finishing the request.

         If ``headers_received`` is called, either ``finish`` or
@@ -425,9 +569,12 @@ class HTTPConnection(object):
     .. versionadded:: 4.0
     """

-    def write_headers(self, start_line: Union['RequestStartLine',
-        'ResponseStartLine'], headers: HTTPHeaders, chunk: Optional[bytes]=None
-        ) ->'Future[None]':
+    def write_headers(
+        self,
+        start_line: Union["RequestStartLine", "ResponseStartLine"],
+        headers: HTTPHeaders,
+        chunk: Optional[bytes] = None,
+    ) -> "Future[None]":
         """Write an HTTP header block.

         :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`.
@@ -444,9 +591,9 @@ class HTTPConnection(object):

            The ``callback`` argument was removed.
         """
-        pass
+        raise NotImplementedError()

-    def write(self, chunk: bytes) ->'Future[None]':
+    def write(self, chunk: bytes) -> "Future[None]":
         """Writes a chunk of body data.

         Returns a future for flow control.
@@ -455,15 +602,19 @@ class HTTPConnection(object):

            The ``callback`` argument was removed.
         """
-        pass
+        raise NotImplementedError()

-    def finish(self) ->None:
+    def finish(self) -> None:
         """Indicates that the last body data has been written."""
-        pass
+        raise NotImplementedError()


-def url_concat(url: str, args: Union[None, Dict[str, str], List[Tuple[str,
-    str]], Tuple[Tuple[str, str], ...]]) ->str:
+def url_concat(
+    url: str,
+    args: Union[
+        None, Dict[str, str], List[Tuple[str, str]], Tuple[Tuple[str, str], ...]
+    ],
+) -> str:
     """Concatenate url and arguments regardless of whether
     url has existing query parameters.

@@ -477,7 +628,32 @@ def url_concat(url: str, args: Union[None, Dict[str, str], List[Tuple[str,
     >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")])
     'http://example.com/foo?a=b&c=d&c=d2'
     """
-    pass
+    if args is None:
+        return url
+    parsed_url = urlparse(url)
+    if isinstance(args, dict):
+        parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True)
+        parsed_query.extend(args.items())
+    elif isinstance(args, list) or isinstance(args, tuple):
+        parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True)
+        parsed_query.extend(args)
+    else:
+        err = "'args' parameter should be dict, list or tuple. Not {0}".format(
+            type(args)
+        )
+        raise TypeError(err)
+    final_query = urlencode(parsed_query)
+    url = urlunparse(
+        (
+            parsed_url[0],
+            parsed_url[1],
+            parsed_url[2],
+            parsed_url[3],
+            final_query,
+            parsed_url[5],
+        )
+    )
+    return url


 class HTTPFile(ObjectDict):
@@ -490,13 +666,15 @@ class HTTPFile(ObjectDict):
     * ``body``
     * ``content_type``
     """
+
     filename: str
     body: bytes
     content_type: str


-def _parse_request_range(range_header: str) ->Optional[Tuple[Optional[int],
-    Optional[int]]]:
+def _parse_request_range(
+    range_header: str,
+) -> Optional[Tuple[Optional[int], Optional[int]]]:
     """Parses a Range header.

     Returns either ``None`` or tuple ``(start, end)``.
@@ -525,11 +703,27 @@ def _parse_request_range(range_header: str) ->Optional[Tuple[Optional[int],

     [0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges
     """
-    pass
+    unit, _, value = range_header.partition("=")
+    unit, value = unit.strip(), value.strip()
+    if unit != "bytes":
+        return None
+    start_b, _, end_b = value.partition("-")
+    try:
+        start = _int_or_none(start_b)
+        end = _int_or_none(end_b)
+    except ValueError:
+        return None
+    if end is not None:
+        if start is None:
+            if end != 0:
+                start = -end
+                end = None
+        else:
+            end += 1
+    return (start, end)


-def _get_content_range(start: Optional[int], end: Optional[int], total: int
-    ) ->str:
+def _get_content_range(start: Optional[int], end: Optional[int], total: int) -> str:
     """Returns a suitable Content-Range header:

     >>> print(_get_content_range(None, 1, 4))
@@ -539,12 +733,25 @@ def _get_content_range(start: Optional[int], end: Optional[int], total: int
     >>> print(_get_content_range(None, None, 4))
     bytes 0-3/4
     """
-    pass
+    start = start or 0
+    end = (end or total) - 1
+    return "bytes %s-%s/%s" % (start, end, total)
+
+
+def _int_or_none(val: str) -> Optional[int]:
+    val = val.strip()
+    if val == "":
+        return None
+    return int(val)


-def parse_body_arguments(content_type: str, body: bytes, arguments: Dict[
-    str, List[bytes]], files: Dict[str, List[HTTPFile]], headers: Optional[
-    HTTPHeaders]=None) ->None:
+def parse_body_arguments(
+    content_type: str,
+    body: bytes,
+    arguments: Dict[str, List[bytes]],
+    files: Dict[str, List[HTTPFile]],
+    headers: Optional[HTTPHeaders] = None,
+) -> None:
     """Parses a form request body.

     Supports ``application/x-www-form-urlencoded`` and
@@ -553,11 +760,46 @@ def parse_body_arguments(content_type: str, body: bytes, arguments: Dict[
     and ``files`` parameters are dictionaries that will be updated
     with the parsed contents.
     """
-    pass
-
-
-def parse_multipart_form_data(boundary: bytes, data: bytes, arguments: Dict
-    [str, List[bytes]], files: Dict[str, List[HTTPFile]]) ->None:
+    if content_type.startswith("application/x-www-form-urlencoded"):
+        if headers and "Content-Encoding" in headers:
+            gen_log.warning(
+                "Unsupported Content-Encoding: %s", headers["Content-Encoding"]
+            )
+            return
+        try:
+            # real charset decoding will happen in RequestHandler.decode_argument()
+            uri_arguments = parse_qs_bytes(body, keep_blank_values=True)
+        except Exception as e:
+            gen_log.warning("Invalid x-www-form-urlencoded body: %s", e)
+            uri_arguments = {}
+        for name, values in uri_arguments.items():
+            if values:
+                arguments.setdefault(name, []).extend(values)
+    elif content_type.startswith("multipart/form-data"):
+        if headers and "Content-Encoding" in headers:
+            gen_log.warning(
+                "Unsupported Content-Encoding: %s", headers["Content-Encoding"]
+            )
+            return
+        try:
+            fields = content_type.split(";")
+            for field in fields:
+                k, sep, v = field.strip().partition("=")
+                if k == "boundary" and v:
+                    parse_multipart_form_data(utf8(v), body, arguments, files)
+                    break
+            else:
+                raise ValueError("multipart boundary not found")
+        except Exception as e:
+            gen_log.warning("Invalid multipart/form-data: %s", e)
+
+
+def parse_multipart_form_data(
+    boundary: bytes,
+    data: bytes,
+    arguments: Dict[str, List[bytes]],
+    files: Dict[str, List[HTTPFile]],
+) -> None:
     """Parses a ``multipart/form-data`` body.

     The ``boundary`` and ``data`` parameters are both byte strings.
@@ -569,11 +811,50 @@ def parse_multipart_form_data(boundary: bytes, data: bytes, arguments: Dict
        Now recognizes non-ASCII filenames in RFC 2231/5987
        (``filename*=``) format.
     """
-    pass
+    # The standard allows for the boundary to be quoted in the header,
+    # although it's rare (it happens at least for google app engine
+    # xmpp).  I think we're also supposed to handle backslash-escapes
+    # here but I'll save that until we see a client that uses them
+    # in the wild.
+    if boundary.startswith(b'"') and boundary.endswith(b'"'):
+        boundary = boundary[1:-1]
+    final_boundary_index = data.rfind(b"--" + boundary + b"--")
+    if final_boundary_index == -1:
+        gen_log.warning("Invalid multipart/form-data: no final boundary")
+        return
+    parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n")
+    for part in parts:
+        if not part:
+            continue
+        eoh = part.find(b"\r\n\r\n")
+        if eoh == -1:
+            gen_log.warning("multipart/form-data missing headers")
+            continue
+        headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
+        disp_header = headers.get("Content-Disposition", "")
+        disposition, disp_params = _parse_header(disp_header)
+        if disposition != "form-data" or not part.endswith(b"\r\n"):
+            gen_log.warning("Invalid multipart/form-data")
+            continue
+        value = part[eoh + 4 : -2]
+        if not disp_params.get("name"):
+            gen_log.warning("multipart/form-data value missing name")
+            continue
+        name = disp_params["name"]
+        if disp_params.get("filename"):
+            ctype = headers.get("Content-Type", "application/unknown")
+            files.setdefault(name, []).append(
+                HTTPFile(
+                    filename=disp_params["filename"], body=value, content_type=ctype
+                )
+            )
+        else:
+            arguments.setdefault(name, []).append(value)


-def format_timestamp(ts: Union[int, float, tuple, time.struct_time,
-    datetime.datetime]) ->str:
+def format_timestamp(
+    ts: Union[int, float, tuple, time.struct_time, datetime.datetime]
+) -> str:
     """Formats a timestamp in the format used by HTTP.

     The argument may be a numeric timestamp as returned by `time.time`,
@@ -584,15 +865,26 @@ def format_timestamp(ts: Union[int, float, tuple, time.struct_time,
     >>> format_timestamp(1359312200)
     'Sun, 27 Jan 2013 18:43:20 GMT'
     """
-    pass
+    if isinstance(ts, (int, float)):
+        time_num = ts
+    elif isinstance(ts, (tuple, time.struct_time)):
+        time_num = calendar.timegm(ts)
+    elif isinstance(ts, datetime.datetime):
+        time_num = calendar.timegm(ts.utctimetuple())
+    else:
+        raise TypeError("unknown timestamp type: %r" % ts)
+    return email.utils.formatdate(time_num, usegmt=True)
+

+RequestStartLine = collections.namedtuple(
+    "RequestStartLine", ["method", "path", "version"]
+)

-RequestStartLine = collections.namedtuple('RequestStartLine', ['method',
-    'path', 'version'])
-_http_version_re = re.compile('^HTTP/1\\.[0-9]$')

+_http_version_re = re.compile(r"^HTTP/1\.[0-9]$")

-def parse_request_start_line(line: str) ->RequestStartLine:
+
+def parse_request_start_line(line: str) -> RequestStartLine:
     """Returns a (method, path, version) tuple for an HTTP 1.x request line.

     The response is a `collections.namedtuple`.
@@ -600,15 +892,28 @@ def parse_request_start_line(line: str) ->RequestStartLine:
     >>> parse_request_start_line("GET /foo HTTP/1.1")
     RequestStartLine(method='GET', path='/foo', version='HTTP/1.1')
     """
-    pass
+    try:
+        method, path, version = line.split(" ")
+    except ValueError:
+        # https://tools.ietf.org/html/rfc7230#section-3.1.1
+        # invalid request-line SHOULD respond with a 400 (Bad Request)
+        raise HTTPInputError("Malformed HTTP request line")
+    if not _http_version_re.match(version):
+        raise HTTPInputError(
+            "Malformed HTTP version in HTTP Request-Line: %r" % version
+        )
+    return RequestStartLine(method, path, version)
+

+ResponseStartLine = collections.namedtuple(
+    "ResponseStartLine", ["version", "code", "reason"]
+)

-ResponseStartLine = collections.namedtuple('ResponseStartLine', ['version',
-    'code', 'reason'])
-_http_response_line_re = re.compile('(HTTP/1.[0-9]) ([0-9]+) ([^\\r]*)')

+_http_response_line_re = re.compile(r"(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)")

-def parse_response_start_line(line: str) ->ResponseStartLine:
+
+def parse_response_start_line(line: str) -> ResponseStartLine:
     """Returns a (version, code, reason) tuple for an HTTP 1.x response line.

     The response is a `collections.namedtuple`.
@@ -616,84 +921,198 @@ def parse_response_start_line(line: str) ->ResponseStartLine:
     >>> parse_response_start_line("HTTP/1.1 200 OK")
     ResponseStartLine(version='HTTP/1.1', code=200, reason='OK')
     """
-    pass
+    line = native_str(line)
+    match = _http_response_line_re.match(line)
+    if not match:
+        raise HTTPInputError("Error parsing response start line")
+    return ResponseStartLine(match.group(1), int(match.group(2)), match.group(3))


-def _parse_header(line: str) ->Tuple[str, Dict[str, str]]:
-    """Parse a Content-type like header.
+# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
+# The original 2.7 version of this code did not correctly support some
+# combinations of semicolons and double quotes.
+# It has also been modified to support valueless parameters as seen in
+# websocket extension negotiations, and to support non-ascii values in
+# RFC 2231/5987 format.
+
+
+def _parseparam(s: str) -> Generator[str, None, None]:
+    while s[:1] == ";":
+        s = s[1:]
+        end = s.find(";")
+        while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
+            end = s.find(";", end + 1)
+        if end < 0:
+            end = len(s)
+        f = s[:end]
+        yield f.strip()
+        s = s[end:]
+
+
+def _parse_header(line: str) -> Tuple[str, Dict[str, str]]:
+    r"""Parse a Content-type like header.

     Return the main content-type and a dictionary of options.

-    >>> d = "form-data; foo=\\"b\\\\\\\\a\\\\\\"r\\"; file*=utf-8''T%C3%A4st"
+    >>> d = "form-data; foo=\"b\\\\a\\\"r\"; file*=utf-8''T%C3%A4st"
     >>> ct, d = _parse_header(d)
     >>> ct
     'form-data'
-    >>> d['file'] == r'T\\u00e4st'.encode('ascii').decode('unicode_escape')
+    >>> d['file'] == r'T\u00e4st'.encode('ascii').decode('unicode_escape')
     True
     >>> d['foo']
-    'b\\\\a"r'
+    'b\\a"r'
     """
-    pass
-
-
-def _encode_header(key: str, pdict: Dict[str, str]) ->str:
+    parts = _parseparam(";" + line)
+    key = next(parts)
+    # decode_params treats first argument special, but we already stripped key
+    params = [("Dummy", "value")]
+    for p in parts:
+        i = p.find("=")
+        if i >= 0:
+            name = p[:i].strip().lower()
+            value = p[i + 1 :].strip()
+            params.append((name, native_str(value)))
+    decoded_params = email.utils.decode_params(params)
+    decoded_params.pop(0)  # get rid of the dummy again
+    pdict = {}
+    for name, decoded_value in decoded_params:
+        value = email.utils.collapse_rfc2231_value(decoded_value)
+        if len(value) >= 2 and value[0] == '"' and value[-1] == '"':
+            value = value[1:-1]
+        pdict[name] = value
+    return key, pdict
+
+
+def _encode_header(key: str, pdict: Dict[str, str]) -> str:
     """Inverse of _parse_header.

     >>> _encode_header('permessage-deflate',
     ...     {'client_max_window_bits': 15, 'client_no_context_takeover': None})
     'permessage-deflate; client_max_window_bits=15; client_no_context_takeover'
     """
-    pass
+    if not pdict:
+        return key
+    out = [key]
+    # Sort the parameters just to make it easy to test.
+    for k, v in sorted(pdict.items()):
+        if v is None:
+            out.append(k)
+        else:
+            # TODO: quote if necessary.
+            out.append("%s=%s" % (k, v))
+    return "; ".join(out)


-def encode_username_password(username: Union[str, bytes], password: Union[
-    str, bytes]) ->bytes:
+def encode_username_password(
+    username: Union[str, bytes], password: Union[str, bytes]
+) -> bytes:
     """Encodes a username/password pair in the format used by HTTP auth.

     The return value is a byte string in the form ``username:password``.

     .. versionadded:: 5.1
     """
-    pass
+    if isinstance(username, unicode_type):
+        username = unicodedata.normalize("NFC", username)
+    if isinstance(password, unicode_type):
+        password = unicodedata.normalize("NFC", password)
+    return utf8(username) + b":" + utf8(password)


-_netloc_re = re.compile('^(.+):(\\d+)$')
+def doctests():
+    # type: () -> unittest.TestSuite
+    import doctest

+    return doctest.DocTestSuite()

-def split_host_and_port(netloc: str) ->Tuple[str, Optional[int]]:
+
+_netloc_re = re.compile(r"^(.+):(\d+)$")
+
+
+def split_host_and_port(netloc: str) -> Tuple[str, Optional[int]]:
     """Returns ``(host, port)`` tuple from ``netloc``.

     Returned ``port`` will be ``None`` if not present.

     .. versionadded:: 4.1
     """
-    pass
+    match = _netloc_re.match(netloc)
+    if match:
+        host = match.group(1)
+        port = int(match.group(2))  # type: Optional[int]
+    else:
+        host = netloc
+        port = None
+    return (host, port)


-def qs_to_qsl(qs: Dict[str, List[AnyStr]]) ->Iterable[Tuple[str, AnyStr]]:
+def qs_to_qsl(qs: Dict[str, List[AnyStr]]) -> Iterable[Tuple[str, AnyStr]]:
     """Generator converting a result of ``parse_qs`` back to name-value pairs.

     .. versionadded:: 5.0
     """
-    pass
+    for k, vs in qs.items():
+        for v in vs:
+            yield (k, v)


-_OctalPatt = re.compile('\\\\[0-3][0-7][0-7]')
-_QuotePatt = re.compile('[\\\\].')
-_nulljoin = ''.join
+_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
+_QuotePatt = re.compile(r"[\\].")
+_nulljoin = "".join


-def _unquote_cookie(s: str) ->str:
+def _unquote_cookie(s: str) -> str:
     """Handle double quotes and escaping in cookie values.

     This method is copied verbatim from the Python 3.5 standard
     library (http.cookies._unquote) so we don't have to depend on
     non-public interfaces.
     """
-    pass
-
-
-def parse_cookie(cookie: str) ->Dict[str, str]:
+    # If there aren't any doublequotes,
+    # then there can't be any special characters.  See RFC 2109.
+    if s is None or len(s) < 2:
+        return s
+    if s[0] != '"' or s[-1] != '"':
+        return s
+
+    # We have to assume that we must decode this string.
+    # Down to work.
+
+    # Remove the "s
+    s = s[1:-1]
+
+    # Check for special sequences.  Examples:
+    #    \012 --> \n
+    #    \"   --> "
+    #
+    i = 0
+    n = len(s)
+    res = []
+    while 0 <= i < n:
+        o_match = _OctalPatt.search(s, i)
+        q_match = _QuotePatt.search(s, i)
+        if not o_match and not q_match:  # Neither matched
+            res.append(s[i:])
+            break
+        # else:
+        j = k = -1
+        if o_match:
+            j = o_match.start(0)
+        if q_match:
+            k = q_match.start(0)
+        if q_match and (not o_match or k < j):  # QuotePatt matched
+            res.append(s[i:k])
+            res.append(s[k + 1])
+            i = k + 2
+        else:  # OctalPatt matched
+            res.append(s[i:j])
+            res.append(chr(int(s[j + 1 : j + 4], 8)))
+            i = j + 4
+    return _nulljoin(res)
+
+
+def parse_cookie(cookie: str) -> Dict[str, str]:
     """Parse a ``Cookie`` HTTP header into a dict of name/value pairs.

     This function attempts to mimic browser cookie parsing behavior;
@@ -704,4 +1123,16 @@ def parse_cookie(cookie: str) ->Dict[str, str]:

     .. versionadded:: 4.4.2
     """
-    pass
+    cookiedict = {}
+    for chunk in cookie.split(str(";")):
+        if str("=") in chunk:
+            key, val = chunk.split(str("="), 1)
+        else:
+            # Assume an empty name per
+            # https://bugzilla.mozilla.org/show_bug.cgi?id=169091
+            key, val = str(""), chunk
+        key, val = key.strip(), val.strip()
+        if key or val:
+            # unquote using Python's algorithm.
+            cookiedict[key] = _unquote_cookie(val)
+    return cookiedict
diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index 114f1626..3fb1359a 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """An I/O event loop for non-blocking sockets.

 In Tornado 6.0, `.IOLoop` is a wrapper around the `asyncio` event loop, with a
@@ -7,6 +22,7 @@ loop interface directly. The `IOLoop.current` class method provides the
 `IOLoop` instance corresponding to the running `asyncio` event loop.

 """
+
 import asyncio
 import concurrent.futures
 import datetime
@@ -19,24 +35,38 @@ import math
 import random
 import warnings
 from inspect import isawaitable
-from tornado.concurrent import Future, is_future, chain_future, future_set_exc_info, future_add_done_callback
+
+from tornado.concurrent import (
+    Future,
+    is_future,
+    chain_future,
+    future_set_exc_info,
+    future_add_done_callback,
+)
 from tornado.log import app_log
 from tornado.util import Configurable, TimeoutError, import_object
+
 import typing
 from typing import Union, Any, Type, Optional, Callable, TypeVar, Tuple, Awaitable
+
 if typing.TYPE_CHECKING:
-    from typing import Dict, List, Set
+    from typing import Dict, List, Set  # noqa: F401
+
     from typing_extensions import Protocol
 else:
     Protocol = object


 class _Selectable(Protocol):
-    pass
+    def fileno(self) -> int:
+        pass

+    def close(self) -> None:
+        pass

-_T = TypeVar('_T')
-_S = TypeVar('_S', bound=_Selectable)
+
+_T = TypeVar("_T")
+_S = TypeVar("_S", bound=_Selectable)


 class IOLoop(Configurable):
@@ -119,15 +149,42 @@ class IOLoop(Configurable):
        previously the default was to make the event loop current if there wasn't
        already a current one.
     """
+
+    # These constants were originally based on constants from the epoll module.
     NONE = 0
-    READ = 1
-    WRITE = 4
-    ERROR = 24
-    _ioloop_for_asyncio = dict()
-    _pending_tasks = set()
+    READ = 0x001
+    WRITE = 0x004
+    ERROR = 0x018
+
+    # In Python 3, _ioloop_for_asyncio maps from asyncio loops to IOLoops.
+    _ioloop_for_asyncio = dict()  # type: Dict[asyncio.AbstractEventLoop, IOLoop]
+
+    # Maintain a set of all pending tasks to follow the warning in the docs
+    # of asyncio.create_tasks:
+    # https://docs.python.org/3.11/library/asyncio-task.html#asyncio.create_task
+    # This ensures that all pending tasks have a strong reference so they
+    # will not be garbage collected before they are finished.
+    # (Thus avoiding "task was destroyed but it is pending" warnings)
+    # An analogous change has been proposed in cpython for 3.13:
+    # https://github.com/python/cpython/issues/91887
+    # If that change is accepted, this can eventually be removed.
+    # If it is not, we will consider the rationale and may remove this.
+    _pending_tasks = set()  # type: Set[Future]
+
+    @classmethod
+    def configure(
+        cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any
+    ) -> None:
+        from tornado.platform.asyncio import BaseAsyncIOLoop
+
+        if isinstance(impl, str):
+            impl = import_object(impl)
+        if isinstance(impl, type) and not issubclass(impl, BaseAsyncIOLoop):
+            raise RuntimeError("only AsyncIOLoop is allowed when asyncio is available")
+        super(IOLoop, cls).configure(impl, **kwargs)

     @staticmethod
-    def instance() ->'IOLoop':
+    def instance() -> "IOLoop":
         """Deprecated alias for `IOLoop.current()`.

         .. versionchanged:: 5.0
@@ -146,9 +203,9 @@ class IOLoop(Configurable):

         .. deprecated:: 5.0
         """
-        pass
+        return IOLoop.current()

-    def install(self) ->None:
+    def install(self) -> None:
         """Deprecated alias for `make_current()`.

         .. versionchanged:: 5.0
@@ -160,10 +217,10 @@ class IOLoop(Configurable):

         .. deprecated:: 5.0
         """
-        pass
+        self.make_current()

     @staticmethod
-    def clear_instance() ->None:
+    def clear_instance() -> None:
         """Deprecated alias for `clear_current()`.

         .. versionchanged:: 5.0
@@ -176,10 +233,20 @@ class IOLoop(Configurable):
         .. deprecated:: 5.0

         """
+        IOLoop.clear_current()
+
+    @typing.overload
+    @staticmethod
+    def current() -> "IOLoop":
         pass

+    @typing.overload
     @staticmethod
-    def current(instance: bool=True) ->Optional['IOLoop']:
+    def current(instance: bool = True) -> Optional["IOLoop"]:  # noqa: F811
+        pass
+
+    @staticmethod
+    def current(instance: bool = True) -> Optional["IOLoop"]:  # noqa: F811
         """Returns the current thread's `IOLoop`.

         If an `IOLoop` is currently running or has been marked as
@@ -203,9 +270,27 @@ class IOLoop(Configurable):
            It is deprecated to call ``IOLoop.current()`` when no `asyncio`
            event loop is running.
         """
-        pass
-
-    def make_current(self) ->None:
+        try:
+            loop = asyncio.get_event_loop()
+        except RuntimeError:
+            if not instance:
+                return None
+            # Create a new asyncio event loop for this thread.
+            loop = asyncio.new_event_loop()
+            asyncio.set_event_loop(loop)
+
+        try:
+            return IOLoop._ioloop_for_asyncio[loop]
+        except KeyError:
+            if instance:
+                from tornado.platform.asyncio import AsyncIOMainLoop
+
+                current = AsyncIOMainLoop()  # type: Optional[IOLoop]
+            else:
+                current = None
+        return current
+
+    def make_current(self) -> None:
         """Makes this the `IOLoop` for the current thread.

         An `IOLoop` automatically becomes current for its thread
@@ -225,10 +310,19 @@ class IOLoop(Configurable):
            Setting and clearing the current event loop through Tornado is
            deprecated. Use ``asyncio.set_event_loop`` instead if you need this.
         """
-        pass
+        warnings.warn(
+            "make_current is deprecated; start the event loop first",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        self._make_current()
+
+    def _make_current(self) -> None:
+        # The asyncio event loops override this method.
+        raise NotImplementedError()

     @staticmethod
-    def clear_current() ->None:
+    def clear_current() -> None:
         """Clears the `IOLoop` for the current thread.

         Intended primarily for use by test frameworks in between tests.
@@ -237,16 +331,41 @@ class IOLoop(Configurable):
            This method also clears the current `asyncio` event loop.
         .. deprecated:: 6.2
         """
-        pass
+        warnings.warn(
+            "clear_current is deprecated",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        IOLoop._clear_current()
+
+    @staticmethod
+    def _clear_current() -> None:
+        old = IOLoop.current(instance=False)
+        if old is not None:
+            old._clear_current_hook()

-    def _clear_current_hook(self) ->None:
+    def _clear_current_hook(self) -> None:
         """Instance method called when an IOLoop ceases to be current.

         May be overridden by subclasses as a counterpart to make_current.
         """
         pass

-    def close(self, all_fds: bool=False) ->None:
+    @classmethod
+    def configurable_base(cls) -> Type[Configurable]:
+        return IOLoop
+
+    @classmethod
+    def configurable_default(cls) -> Type[Configurable]:
+        from tornado.platform.asyncio import AsyncIOLoop
+
+        return AsyncIOLoop
+
+    def initialize(self, make_current: bool = True) -> None:
+        if make_current:
+            self._make_current()
+
+    def close(self, all_fds: bool = False) -> None:
         """Closes the `IOLoop`, freeing any resources used.

         If ``all_fds`` is true, all file descriptors registered on the
@@ -271,10 +390,23 @@ class IOLoop(Configurable):
            for "file descriptors", those objects will have their
            ``close`` method when ``all_fds`` is true.
         """
+        raise NotImplementedError()
+
+    @typing.overload
+    def add_handler(
+        self, fd: int, handler: Callable[[int, int], None], events: int
+    ) -> None:
         pass

-    def add_handler(self, fd: Union[int, _Selectable], handler: Callable[
-        ..., None], events: int) ->None:
+    @typing.overload  # noqa: F811
+    def add_handler(
+        self, fd: _S, handler: Callable[[_S, int], None], events: int
+    ) -> None:
+        pass
+
+    def add_handler(  # noqa: F811
+        self, fd: Union[int, _Selectable], handler: Callable[..., None], events: int
+    ) -> None:
         """Registers the given handler to receive the given events for ``fd``.

         The ``fd`` argument may either be an integer file descriptor or
@@ -289,35 +421,35 @@ class IOLoop(Configurable):
            Added the ability to pass file-like objects in addition to
            raw file descriptors.
         """
-        pass
+        raise NotImplementedError()

-    def update_handler(self, fd: Union[int, _Selectable], events: int) ->None:
+    def update_handler(self, fd: Union[int, _Selectable], events: int) -> None:
         """Changes the events we listen for ``fd``.

         .. versionchanged:: 4.0
            Added the ability to pass file-like objects in addition to
            raw file descriptors.
         """
-        pass
+        raise NotImplementedError()

-    def remove_handler(self, fd: Union[int, _Selectable]) ->None:
+    def remove_handler(self, fd: Union[int, _Selectable]) -> None:
         """Stop listening for events on ``fd``.

         .. versionchanged:: 4.0
            Added the ability to pass file-like objects in addition to
            raw file descriptors.
         """
-        pass
+        raise NotImplementedError()

-    def start(self) ->None:
+    def start(self) -> None:
         """Starts the I/O loop.

         The loop will run until one of the callbacks calls `stop()`, which
         will make the loop stop after the current event iteration completes.
         """
-        pass
+        raise NotImplementedError()

-    def stop(self) ->None:
+    def stop(self) -> None:
         """Stop the I/O loop.

         If the event loop is not currently running, the next call to `start()`
@@ -328,9 +460,9 @@ class IOLoop(Configurable):
         Some work that was scheduled before the call to `stop` may still
         be run before the `IOLoop` shuts down.
         """
-        pass
+        raise NotImplementedError()

-    def run_sync(self, func: Callable, timeout: Optional[float]=None) ->Any:
+    def run_sync(self, func: Callable, timeout: Optional[float] = None) -> Any:
         """Starts the `IOLoop`, runs the given function, and stops the loop.

         The function must return either an awaitable object or
@@ -362,9 +494,51 @@ class IOLoop(Configurable):
         .. versionchanged:: 6.2
            ``tornado.util.TimeoutError`` is now an alias to ``asyncio.TimeoutError``.
         """
-        pass
-
-    def time(self) ->float:
+        future_cell = [None]  # type: List[Optional[Future]]
+
+        def run() -> None:
+            try:
+                result = func()
+                if result is not None:
+                    from tornado.gen import convert_yielded
+
+                    result = convert_yielded(result)
+            except Exception:
+                fut = Future()  # type: Future[Any]
+                future_cell[0] = fut
+                future_set_exc_info(fut, sys.exc_info())
+            else:
+                if is_future(result):
+                    future_cell[0] = result
+                else:
+                    fut = Future()
+                    future_cell[0] = fut
+                    fut.set_result(result)
+            assert future_cell[0] is not None
+            self.add_future(future_cell[0], lambda future: self.stop())
+
+        self.add_callback(run)
+        if timeout is not None:
+
+            def timeout_callback() -> None:
+                # If we can cancel the future, do so and wait on it. If not,
+                # Just stop the loop and return with the task still pending.
+                # (If we neither cancel nor wait for the task, a warning
+                # will be logged).
+                assert future_cell[0] is not None
+                if not future_cell[0].cancel():
+                    self.stop()
+
+            timeout_handle = self.add_timeout(self.time() + timeout, timeout_callback)
+        self.start()
+        if timeout is not None:
+            self.remove_timeout(timeout_handle)
+        assert future_cell[0] is not None
+        if future_cell[0].cancelled() or not future_cell[0].done():
+            raise TimeoutError("Operation timed out after %s seconds" % timeout)
+        return future_cell[0].result()
+
+    def time(self) -> float:
         """Returns the current time according to the `IOLoop`'s clock.

         The return value is a floating-point number relative to an
@@ -376,10 +550,15 @@ class IOLoop(Configurable):
         `time.time`.

         """
-        pass
-
-    def add_timeout(self, deadline: Union[float, datetime.timedelta],
-        callback: Callable, *args: Any, **kwargs: Any) ->object:
+        return time.time()
+
+    def add_timeout(
+        self,
+        deadline: Union[float, datetime.timedelta],
+        callback: Callable,
+        *args: Any,
+        **kwargs: Any
+    ) -> object:
         """Runs the ``callback`` at the time ``deadline`` from the I/O loop.

         Returns an opaque handle that may be passed to
@@ -405,10 +584,18 @@ class IOLoop(Configurable):
         .. versionchanged:: 4.0
            Now passes through ``*args`` and ``**kwargs`` to the callback.
         """
-        pass
+        if isinstance(deadline, numbers.Real):
+            return self.call_at(deadline, callback, *args, **kwargs)
+        elif isinstance(deadline, datetime.timedelta):
+            return self.call_at(
+                self.time() + deadline.total_seconds(), callback, *args, **kwargs
+            )
+        else:
+            raise TypeError("Unsupported deadline %r" % deadline)

-    def call_later(self, delay: float, callback: Callable, *args: Any, **
-        kwargs: Any) ->object:
+    def call_later(
+        self, delay: float, callback: Callable, *args: Any, **kwargs: Any
+    ) -> object:
         """Runs the ``callback`` after ``delay`` seconds have passed.

         Returns an opaque handle that may be passed to `remove_timeout`
@@ -419,10 +606,11 @@ class IOLoop(Configurable):

         .. versionadded:: 4.0
         """
-        pass
+        return self.call_at(self.time() + delay, callback, *args, **kwargs)

-    def call_at(self, when: float, callback: Callable, *args: Any, **kwargs:
-        Any) ->object:
+    def call_at(
+        self, when: float, callback: Callable, *args: Any, **kwargs: Any
+    ) -> object:
         """Runs the ``callback`` at the absolute time designated by ``when``.

         ``when`` must be a number using the same reference point as
@@ -436,19 +624,18 @@ class IOLoop(Configurable):

         .. versionadded:: 4.0
         """
-        pass
+        return self.add_timeout(when, callback, *args, **kwargs)

-    def remove_timeout(self, timeout: object) ->None:
+    def remove_timeout(self, timeout: object) -> None:
         """Cancels a pending timeout.

         The argument is a handle as returned by `add_timeout`.  It is
         safe to call `remove_timeout` even if the callback has already
         been run.
         """
-        pass
+        raise NotImplementedError()

-    def add_callback(self, callback: Callable, *args: Any, **kwargs: Any
-        ) ->None:
+    def add_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None:
         """Calls the given callback on the next I/O loop iteration.

         It is safe to call this method from any thread at any time,
@@ -458,10 +645,11 @@ class IOLoop(Configurable):
         `IOLoop`'s thread.  `add_callback()` may be used to transfer
         control from other threads to the `IOLoop`'s thread.
         """
-        pass
+        raise NotImplementedError()

-    def add_callback_from_signal(self, callback: Callable, *args: Any, **
-        kwargs: Any) ->None:
+    def add_callback_from_signal(
+        self, callback: Callable, *args: Any, **kwargs: Any
+    ) -> None:
         """Calls the given callback on the next I/O loop iteration.

         Intended to be afe for use from a Python signal handler; should not be
@@ -472,21 +660,22 @@ class IOLoop(Configurable):
            This method is suspected to have been broken since Tornado 5.0 and
            will be removed in version 7.0.
         """
-        pass
+        raise NotImplementedError()

-    def spawn_callback(self, callback: Callable, *args: Any, **kwargs: Any
-        ) ->None:
+    def spawn_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None:
         """Calls the given callback on the next IOLoop iteration.

         As of Tornado 6.0, this method is equivalent to `add_callback`.

         .. versionadded:: 4.0
         """
-        pass
+        self.add_callback(callback, *args, **kwargs)

-    def add_future(self, future:
-        'Union[Future[_T], concurrent.futures.Future[_T]]', callback:
-        Callable[['Future[_T]'], None]) ->None:
+    def add_future(
+        self,
+        future: "Union[Future[_T], concurrent.futures.Future[_T]]",
+        callback: Callable[["Future[_T]"], None],
+    ) -> None:
         """Schedules a callback on the ``IOLoop`` when the given
         `.Future` is finished.

@@ -497,10 +686,30 @@ class IOLoop(Configurable):
         awaitables (unlike most of Tornado where the two are
         interchangeable).
         """
-        pass
-
-    def run_in_executor(self, executor: Optional[concurrent.futures.
-        Executor], func: Callable[..., _T], *args: Any) ->'Future[_T]':
+        if isinstance(future, Future):
+            # Note that we specifically do not want the inline behavior of
+            # tornado.concurrent.future_add_done_callback. We always want
+            # this callback scheduled on the next IOLoop iteration (which
+            # asyncio.Future always does).
+            #
+            # Wrap the callback in self._run_callback so we control
+            # the error logging (i.e. it goes to tornado.log.app_log
+            # instead of asyncio's log).
+            future.add_done_callback(
+                lambda f: self._run_callback(functools.partial(callback, f))
+            )
+        else:
+            assert is_future(future)
+            # For concurrent futures, we use self.add_callback, so
+            # it's fine if future_add_done_callback inlines that call.
+            future_add_done_callback(future, lambda f: self.add_callback(callback, f))
+
+    def run_in_executor(
+        self,
+        executor: Optional[concurrent.futures.Executor],
+        func: Callable[..., _T],
+        *args: Any
+    ) -> "Future[_T]":
         """Runs a function in a ``concurrent.futures.Executor``. If
         ``executor`` is ``None``, the IO loop's default executor will be used.

@@ -508,46 +717,137 @@ class IOLoop(Configurable):

         .. versionadded:: 5.0
         """
-        pass
-
-    def set_default_executor(self, executor: concurrent.futures.Executor
-        ) ->None:
+        if executor is None:
+            if not hasattr(self, "_executor"):
+                from tornado.process import cpu_count
+
+                self._executor = concurrent.futures.ThreadPoolExecutor(
+                    max_workers=(cpu_count() * 5)
+                )  # type: concurrent.futures.Executor
+            executor = self._executor
+        c_future = executor.submit(func, *args)
+        # Concurrent Futures are not usable with await. Wrap this in a
+        # Tornado Future instead, using self.add_future for thread-safety.
+        t_future = Future()  # type: Future[_T]
+        self.add_future(c_future, lambda f: chain_future(f, t_future))
+        return t_future
+
+    def set_default_executor(self, executor: concurrent.futures.Executor) -> None:
         """Sets the default executor to use with :meth:`run_in_executor`.

         .. versionadded:: 5.0
         """
-        pass
+        self._executor = executor

-    def _run_callback(self, callback: Callable[[], Any]) ->None:
+    def _run_callback(self, callback: Callable[[], Any]) -> None:
         """Runs a callback with error handling.

         .. versionchanged:: 6.0

            CancelledErrors are no longer logged.
         """
-        pass
-
-    def _discard_future_result(self, future: Future) ->None:
+        try:
+            ret = callback()
+            if ret is not None:
+                from tornado import gen
+
+                # Functions that return Futures typically swallow all
+                # exceptions and store them in the Future.  If a Future
+                # makes it out to the IOLoop, ensure its exception (if any)
+                # gets logged too.
+                try:
+                    ret = gen.convert_yielded(ret)
+                except gen.BadYieldError:
+                    # It's not unusual for add_callback to be used with
+                    # methods returning a non-None and non-yieldable
+                    # result, which should just be ignored.
+                    pass
+                else:
+                    self.add_future(ret, self._discard_future_result)
+        except asyncio.CancelledError:
+            pass
+        except Exception:
+            app_log.error("Exception in callback %r", callback, exc_info=True)
+
+    def _discard_future_result(self, future: Future) -> None:
         """Avoid unhandled-exception warnings from spawned coroutines."""
-        pass
+        future.result()
+
+    def split_fd(
+        self, fd: Union[int, _Selectable]
+    ) -> Tuple[int, Union[int, _Selectable]]:
+        # """Returns an (fd, obj) pair from an ``fd`` parameter.
+
+        # We accept both raw file descriptors and file-like objects as
+        # input to `add_handler` and related methods.  When a file-like
+        # object is passed, we must retain the object itself so we can
+        # close it correctly when the `IOLoop` shuts down, but the
+        # poller interfaces favor file descriptors (they will accept
+        # file-like objects and call ``fileno()`` for you, but they
+        # always return the descriptor itself).
+
+        # This method is provided for use by `IOLoop` subclasses and should
+        # not generally be used by application code.
+
+        # .. versionadded:: 4.0
+        # """
+        if isinstance(fd, int):
+            return fd, fd
+        return fd.fileno(), fd
+
+    def close_fd(self, fd: Union[int, _Selectable]) -> None:
+        # """Utility method to close an ``fd``.
+
+        # If ``fd`` is a file-like object, we close it directly; otherwise
+        # we use `os.close`.
+
+        # This method is provided for use by `IOLoop` subclasses (in
+        # implementations of ``IOLoop.close(all_fds=True)`` and should
+        # not generally be used by application code.
+
+        # .. versionadded:: 4.0
+        # """
+        try:
+            if isinstance(fd, int):
+                os.close(fd)
+            else:
+                fd.close()
+        except OSError:
+            pass
+
+    def _register_task(self, f: Future) -> None:
+        self._pending_tasks.add(f)
+
+    def _unregister_task(self, f: Future) -> None:
+        self._pending_tasks.discard(f)


 class _Timeout(object):
     """An IOLoop timeout, a UNIX timestamp and a callback"""
-    __slots__ = ['deadline', 'callback', 'tdeadline']

-    def __init__(self, deadline: float, callback: Callable[[], None],
-        io_loop: IOLoop) ->None:
+    # Reduce memory overhead when there are lots of pending callbacks
+    __slots__ = ["deadline", "callback", "tdeadline"]
+
+    def __init__(
+        self, deadline: float, callback: Callable[[], None], io_loop: IOLoop
+    ) -> None:
         if not isinstance(deadline, numbers.Real):
-            raise TypeError('Unsupported deadline %r' % deadline)
+            raise TypeError("Unsupported deadline %r" % deadline)
         self.deadline = deadline
         self.callback = callback
-        self.tdeadline = deadline, next(io_loop._timeout_counter)
-
-    def __lt__(self, other: '_Timeout') ->bool:
+        self.tdeadline = (
+            deadline,
+            next(io_loop._timeout_counter),
+        )  # type: Tuple[float, int]
+
+    # Comparison methods to sort by deadline, with object id as a tiebreaker
+    # to guarantee a consistent ordering.  The heapq module uses __le__
+    # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons
+    # use __lt__).
+    def __lt__(self, other: "_Timeout") -> bool:
         return self.tdeadline < other.tdeadline

-    def __le__(self, other: '_Timeout') ->bool:
+    def __le__(self, other: "_Timeout") -> bool:
         return self.tdeadline <= other.tdeadline


@@ -589,33 +889,90 @@ class PeriodicCallback(object):
        in addition to the previous numeric milliseconds.
     """

-    def __init__(self, callback: Callable[[], Optional[Awaitable]],
-        callback_time: Union[datetime.timedelta, float], jitter: float=0
-        ) ->None:
+    def __init__(
+        self,
+        callback: Callable[[], Optional[Awaitable]],
+        callback_time: Union[datetime.timedelta, float],
+        jitter: float = 0,
+    ) -> None:
         self.callback = callback
         if isinstance(callback_time, datetime.timedelta):
-            self.callback_time = callback_time / datetime.timedelta(
-                milliseconds=1)
+            self.callback_time = callback_time / datetime.timedelta(milliseconds=1)
         else:
             if callback_time <= 0:
-                raise ValueError(
-                    'Periodic callback must have a positive callback_time')
+                raise ValueError("Periodic callback must have a positive callback_time")
             self.callback_time = callback_time
         self.jitter = jitter
         self._running = False
-        self._timeout = None
+        self._timeout = None  # type: object

-    def start(self) ->None:
+    def start(self) -> None:
         """Starts the timer."""
-        pass
-
-    def stop(self) ->None:
+        # Looking up the IOLoop here allows to first instantiate the
+        # PeriodicCallback in another thread, then start it using
+        # IOLoop.add_callback().
+        self.io_loop = IOLoop.current()
+        self._running = True
+        self._next_timeout = self.io_loop.time()
+        self._schedule_next()
+
+    def stop(self) -> None:
         """Stops the timer."""
-        pass
+        self._running = False
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+            self._timeout = None

-    def is_running(self) ->bool:
+    def is_running(self) -> bool:
         """Returns ``True`` if this `.PeriodicCallback` has been started.

         .. versionadded:: 4.1
         """
-        pass
+        return self._running
+
+    async def _run(self) -> None:
+        if not self._running:
+            return
+        try:
+            val = self.callback()
+            if val is not None and isawaitable(val):
+                await val
+        except Exception:
+            app_log.error("Exception in callback %r", self.callback, exc_info=True)
+        finally:
+            self._schedule_next()
+
+    def _schedule_next(self) -> None:
+        if self._running:
+            self._update_next(self.io_loop.time())
+            self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run)
+
+    def _update_next(self, current_time: float) -> None:
+        callback_time_sec = self.callback_time / 1000.0
+        if self.jitter:
+            # apply jitter fraction
+            callback_time_sec *= 1 + (self.jitter * (random.random() - 0.5))
+        if self._next_timeout <= current_time:
+            # The period should be measured from the start of one call
+            # to the start of the next. If one call takes too long,
+            # skip cycles to get back to a multiple of the original
+            # schedule.
+            self._next_timeout += (
+                math.floor((current_time - self._next_timeout) / callback_time_sec) + 1
+            ) * callback_time_sec
+        else:
+            # If the clock moved backwards, ensure we advance the next
+            # timeout instead of recomputing the same value again.
+            # This may result in long gaps between callbacks if the
+            # clock jumps backwards by a lot, but the far more common
+            # scenario is a small NTP adjustment that should just be
+            # ignored.
+            #
+            # Note that on some systems if time.time() runs slower
+            # than time.monotonic() (most common on windows), we
+            # effectively experience a small backwards time jump on
+            # every iteration because PeriodicCallback uses
+            # time.time() while asyncio schedules callbacks using
+            # time.monotonic().
+            # https://github.com/tornadoweb/tornado/issues/2333
+            self._next_timeout += callback_time_sec
diff --git a/tornado/iostream.py b/tornado/iostream.py
index 1f9c2e76..ee577593 100644
--- a/tornado/iostream.py
+++ b/tornado/iostream.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Utility classes to write to and read from non-blocking files and sockets.

 Contents:
@@ -7,6 +22,7 @@ Contents:
 * `SSLIOStream`: SSL-aware version of IOStream.
 * `PipeIOStream`: Pipe-based IOStream implementation.
 """
+
 import asyncio
 import collections
 import errno
@@ -17,25 +33,52 @@ import socket
 import ssl
 import sys
 import re
+
 from tornado.concurrent import Future, future_set_result_unless_cancelled
 from tornado import ioloop
 from tornado.log import gen_log
 from tornado.netutil import ssl_wrap_socket, _client_ssl_defaults, _server_ssl_defaults
 from tornado.util import errno_from_exception
+
 import typing
-from typing import Union, Optional, Awaitable, Callable, Pattern, Any, Dict, TypeVar, Tuple
+from typing import (
+    Union,
+    Optional,
+    Awaitable,
+    Callable,
+    Pattern,
+    Any,
+    Dict,
+    TypeVar,
+    Tuple,
+)
 from types import TracebackType
+
 if typing.TYPE_CHECKING:
-    from typing import Deque, List, Type
-_IOStreamType = TypeVar('_IOStreamType', bound='IOStream')
-_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE,
-    errno.ETIMEDOUT)
-if hasattr(errno, 'WSAECONNRESET'):
-    _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.
-        WSAETIMEDOUT)
-if sys.platform == 'darwin':
-    _ERRNO_CONNRESET += errno.EPROTOTYPE,
-_WINDOWS = sys.platform.startswith('win')
+    from typing import Deque, List, Type  # noqa: F401
+
+_IOStreamType = TypeVar("_IOStreamType", bound="IOStream")
+
+# These errnos indicate that a connection has been abruptly terminated.
+# They should be caught and handled less noisily than other errors.
+_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, errno.ETIMEDOUT)
+
+if hasattr(errno, "WSAECONNRESET"):
+    _ERRNO_CONNRESET += (  # type: ignore
+        errno.WSAECONNRESET,  # type: ignore
+        errno.WSAECONNABORTED,  # type: ignore
+        errno.WSAETIMEDOUT,  # type: ignore
+    )
+
+if sys.platform == "darwin":
+    # OSX appears to have a race condition that causes send(2) to return
+    # EPROTOTYPE if called while a socket is being torn down:
+    # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+    # Since the socket is being closed anyway, treat this as an ECONNRESET
+    # instead of an unexpected error.
+    _ERRNO_CONNRESET += (errno.EPROTOTYPE,)  # type: ignore
+
+_WINDOWS = sys.platform.startswith("win")


 class StreamClosedError(IOError):
@@ -52,8 +95,8 @@ class StreamClosedError(IOError):
        Added the ``real_error`` attribute.
     """

-    def __init__(self, real_error: Optional[BaseException]=None) ->None:
-        super().__init__('Stream is closed')
+    def __init__(self, real_error: Optional[BaseException] = None) -> None:
+        super().__init__("Stream is closed")
         self.real_error = real_error


@@ -63,6 +106,7 @@ class UnsatisfiableReadError(Exception):
     Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes``
     argument.
     """
+
     pass


@@ -76,33 +120,88 @@ class _StreamBuffer(object):
     of data are encountered.
     """

-    def __init__(self) ->None:
-        self._buffers = collections.deque()
+    def __init__(self) -> None:
+        # A sequence of (False, bytearray) and (True, memoryview) objects
+        self._buffers = (
+            collections.deque()
+        )  # type: Deque[Tuple[bool, Union[bytearray, memoryview]]]
+        # Position in the first buffer
         self._first_pos = 0
         self._size = 0

-    def __len__(self) ->int:
+    def __len__(self) -> int:
         return self._size
+
+    # Data above this size will be appended separately instead
+    # of extending an existing bytearray
     _large_buf_threshold = 2048

-    def append(self, data: Union[bytes, bytearray, memoryview]) ->None:
+    def append(self, data: Union[bytes, bytearray, memoryview]) -> None:
         """
         Append the given piece of data (should be a buffer-compatible object).
         """
-        pass
-
-    def peek(self, size: int) ->memoryview:
+        size = len(data)
+        if size > self._large_buf_threshold:
+            if not isinstance(data, memoryview):
+                data = memoryview(data)
+            self._buffers.append((True, data))
+        elif size > 0:
+            if self._buffers:
+                is_memview, b = self._buffers[-1]
+                new_buf = is_memview or len(b) >= self._large_buf_threshold
+            else:
+                new_buf = True
+            if new_buf:
+                self._buffers.append((False, bytearray(data)))
+            else:
+                b += data  # type: ignore
+
+        self._size += size
+
+    def peek(self, size: int) -> memoryview:
         """
         Get a view over at most ``size`` bytes (possibly fewer) at the
         current buffer position.
         """
-        pass
+        assert size > 0
+        try:
+            is_memview, b = self._buffers[0]
+        except IndexError:
+            return memoryview(b"")

-    def advance(self, size: int) ->None:
+        pos = self._first_pos
+        if is_memview:
+            return typing.cast(memoryview, b[pos : pos + size])
+        else:
+            return memoryview(b)[pos : pos + size]
+
+    def advance(self, size: int) -> None:
         """
         Advance the current buffer position by ``size`` bytes.
         """
-        pass
+        assert 0 < size <= self._size
+        self._size -= size
+        pos = self._first_pos
+
+        buffers = self._buffers
+        while buffers and size > 0:
+            is_large, b = buffers[0]
+            b_remain = len(b) - size - pos
+            if b_remain <= 0:
+                buffers.popleft()
+                size -= len(b) - pos
+                pos = 0
+            elif is_large:
+                pos += size
+                size = 0
+            else:
+                pos += size
+                del typing.cast(bytearray, b)[:pos]
+                pos = 0
+                size = 0
+
+        assert size == 0
+        self._first_pos = pos


 class BaseIOStream(object):
@@ -123,8 +222,12 @@ class BaseIOStream(object):

     """

-    def __init__(self, max_buffer_size: Optional[int]=None, read_chunk_size:
-        Optional[int]=None, max_write_buffer_size: Optional[int]=None) ->None:
+    def __init__(
+        self,
+        max_buffer_size: Optional[int] = None,
+        read_chunk_size: Optional[int] = None,
+        max_write_buffer_size: Optional[int] = None,
+    ) -> None:
         """`BaseIOStream` constructor.

         :arg max_buffer_size: Maximum amount of incoming data to buffer;
@@ -143,52 +246,59 @@ class BaseIOStream(object):
         """
         self.io_loop = ioloop.IOLoop.current()
         self.max_buffer_size = max_buffer_size or 104857600
-        self.read_chunk_size = min(read_chunk_size or 65536, self.
-            max_buffer_size // 2)
+        # A chunk size that is too close to max_buffer_size can cause
+        # spurious failures.
+        self.read_chunk_size = min(read_chunk_size or 65536, self.max_buffer_size // 2)
         self.max_write_buffer_size = max_write_buffer_size
-        self.error = None
+        self.error = None  # type: Optional[BaseException]
         self._read_buffer = bytearray()
         self._read_buffer_size = 0
         self._user_read_buffer = False
-        self._after_user_read_buffer = None
+        self._after_user_read_buffer = None  # type: Optional[bytearray]
         self._write_buffer = _StreamBuffer()
         self._total_write_index = 0
         self._total_write_done_index = 0
-        self._read_delimiter = None
-        self._read_regex = None
-        self._read_max_bytes = None
-        self._read_bytes = None
+        self._read_delimiter = None  # type: Optional[bytes]
+        self._read_regex = None  # type: Optional[Pattern]
+        self._read_max_bytes = None  # type: Optional[int]
+        self._read_bytes = None  # type: Optional[int]
         self._read_partial = False
         self._read_until_close = False
-        self._read_future = None
-        self._write_futures = collections.deque()
-        self._close_callback = None
-        self._connect_future = None
-        self._ssl_connect_future = None
+        self._read_future = None  # type: Optional[Future]
+        self._write_futures = (
+            collections.deque()
+        )  # type: Deque[Tuple[int, Future[None]]]
+        self._close_callback = None  # type: Optional[Callable[[], None]]
+        self._connect_future = None  # type: Optional[Future[IOStream]]
+        # _ssl_connect_future should be defined in SSLIOStream
+        # but it's here so we can clean it up in _signal_closed
+        # TODO: refactor that so subclasses can add additional futures
+        # to be cancelled.
+        self._ssl_connect_future = None  # type: Optional[Future[SSLIOStream]]
         self._connecting = False
-        self._state = None
+        self._state = None  # type: Optional[int]
         self._closed = False

-    def fileno(self) ->Union[int, ioloop._Selectable]:
+    def fileno(self) -> Union[int, ioloop._Selectable]:
         """Returns the file descriptor for this stream."""
-        pass
+        raise NotImplementedError()

-    def close_fd(self) ->None:
+    def close_fd(self) -> None:
         """Closes the file underlying this stream.

         ``close_fd`` is called by `BaseIOStream` and should not be called
         elsewhere; other users should call `close` instead.
         """
-        pass
+        raise NotImplementedError()

-    def write_to_fd(self, data: memoryview) ->int:
+    def write_to_fd(self, data: memoryview) -> int:
         """Attempts to write ``data`` to the underlying file.

         Returns the number of bytes written.
         """
-        pass
+        raise NotImplementedError()

-    def read_from_fd(self, buf: Union[bytearray, memoryview]) ->Optional[int]:
+    def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]:
         """Attempts to read from the underlying file.

         Reads up to ``len(buf)`` bytes, storing them in the buffer.
@@ -201,9 +311,9 @@ class BaseIOStream(object):
            Interface redesigned to take a buffer and return a number
            of bytes instead of a freshly-allocated object.
         """
-        pass
+        raise NotImplementedError()

-    def get_fd_error(self) ->Optional[Exception]:
+    def get_fd_error(self) -> Optional[Exception]:
         """Returns information about any error on the underlying file.

         This method is called after the `.IOLoop` has signaled an error on the
@@ -211,10 +321,11 @@ class BaseIOStream(object):
         with additional information, or None if no such information is
         available.
         """
-        pass
+        return None

-    def read_until_regex(self, regex: bytes, max_bytes: Optional[int]=None
-        ) ->Awaitable[bytes]:
+    def read_until_regex(
+        self, regex: bytes, max_bytes: Optional[int] = None
+    ) -> Awaitable[bytes]:
         """Asynchronously read until we have matched the given regex.

         The result includes the data that matches the regex and anything
@@ -234,10 +345,26 @@ class BaseIOStream(object):
            `.Future` instead.

         """
-        pass
-
-    def read_until(self, delimiter: bytes, max_bytes: Optional[int]=None
-        ) ->Awaitable[bytes]:
+        future = self._start_read()
+        self._read_regex = re.compile(regex)
+        self._read_max_bytes = max_bytes
+        try:
+            self._try_inline_read()
+        except UnsatisfiableReadError as e:
+            # Handle this the same way as in _handle_events.
+            gen_log.info("Unsatisfiable read, closing connection: %s" % e)
+            self.close(exc_info=e)
+            return future
+        except:
+            # Ensure that the future doesn't log an error because its
+            # failure was never examined.
+            future.add_done_callback(lambda f: f.exception())
+            raise
+        return future
+
+    def read_until(
+        self, delimiter: bytes, max_bytes: Optional[int] = None
+    ) -> Awaitable[bytes]:
         """Asynchronously read until we have found the given delimiter.

         The result includes all the data read including the delimiter.
@@ -255,10 +382,22 @@ class BaseIOStream(object):
            The ``callback`` argument was removed. Use the returned
            `.Future` instead.
         """
-        pass
-
-    def read_bytes(self, num_bytes: int, partial: bool=False) ->Awaitable[bytes
-        ]:
+        future = self._start_read()
+        self._read_delimiter = delimiter
+        self._read_max_bytes = max_bytes
+        try:
+            self._try_inline_read()
+        except UnsatisfiableReadError as e:
+            # Handle this the same way as in _handle_events.
+            gen_log.info("Unsatisfiable read, closing connection: %s" % e)
+            self.close(exc_info=e)
+            return future
+        except:
+            future.add_done_callback(lambda f: f.exception())
+            raise
+        return future
+
+    def read_bytes(self, num_bytes: int, partial: bool = False) -> Awaitable[bytes]:
         """Asynchronously read a number of bytes.

         If ``partial`` is true, data is returned as soon as we have
@@ -275,9 +414,18 @@ class BaseIOStream(object):
            ``partial=True`` for ``streaming_callback``) instead.

         """
-        pass
+        future = self._start_read()
+        assert isinstance(num_bytes, numbers.Integral)
+        self._read_bytes = num_bytes
+        self._read_partial = partial
+        try:
+            self._try_inline_read()
+        except:
+            future.add_done_callback(lambda f: f.exception())
+            raise
+        return future

-    def read_into(self, buf: bytearray, partial: bool=False) ->Awaitable[int]:
+    def read_into(self, buf: bytearray, partial: bool = False) -> Awaitable[int]:
         """Asynchronously read a number of bytes.

         ``buf`` must be a writable buffer into which data will be read.
@@ -294,9 +442,35 @@ class BaseIOStream(object):
            `.Future` instead.

         """
-        pass
+        future = self._start_read()
+
+        # First copy data already in read buffer
+        available_bytes = self._read_buffer_size
+        n = len(buf)
+        if available_bytes >= n:
+            buf[:] = memoryview(self._read_buffer)[:n]
+            del self._read_buffer[:n]
+            self._after_user_read_buffer = self._read_buffer
+        elif available_bytes > 0:
+            buf[:available_bytes] = memoryview(self._read_buffer)[:]
+
+        # Set up the supplied buffer as our temporary read buffer.
+        # The original (if it had any data remaining) has been
+        # saved for later.
+        self._user_read_buffer = True
+        self._read_buffer = buf
+        self._read_buffer_size = available_bytes
+        self._read_bytes = n
+        self._read_partial = partial
+
+        try:
+            self._try_inline_read()
+        except:
+            future.add_done_callback(lambda f: f.exception())
+            raise
+        return future

-    def read_until_close(self) ->Awaitable[bytes]:
+    def read_until_close(self) -> Awaitable[bytes]:
         """Asynchronously reads all data from the socket until it is closed.

         This will buffer all available data until ``max_buffer_size``
@@ -314,9 +488,19 @@ class BaseIOStream(object):
            with ``partial=True`` for ``streaming_callback``) instead.

         """
-        pass
+        future = self._start_read()
+        if self.closed():
+            self._finish_read(self._read_buffer_size)
+            return future
+        self._read_until_close = True
+        try:
+            self._try_inline_read()
+        except:
+            future.add_done_callback(lambda f: f.exception())
+            raise
+        return future

-    def write(self, data: Union[bytes, memoryview]) ->'Future[None]':
+    def write(self, data: Union[bytes, memoryview]) -> "Future[None]":
         """Asynchronously write the given data to this stream.

         This method returns a `.Future` that resolves (with a result
@@ -336,10 +520,29 @@ class BaseIOStream(object):
            `.Future` instead.

         """
-        pass
-
-    def set_close_callback(self, callback: Optional[Callable[[], None]]
-        ) ->None:
+        self._check_closed()
+        if data:
+            if isinstance(data, memoryview):
+                # Make sure that ``len(data) == data.nbytes``
+                data = memoryview(data).cast("B")
+            if (
+                self.max_write_buffer_size is not None
+                and len(self._write_buffer) + len(data) > self.max_write_buffer_size
+            ):
+                raise StreamBufferFullError("Reached maximum write buffer size")
+            self._write_buffer.append(data)
+            self._total_write_index += len(data)
+        future = Future()  # type: Future[None]
+        future.add_done_callback(lambda f: f.exception())
+        self._write_futures.append((self._total_write_index, future))
+        if not self._connecting:
+            self._handle_write()
+            if self._write_buffer:
+                self._add_io_state(self.io_loop.WRITE)
+            self._maybe_add_error_listener()
+        return future
+
+    def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None:
         """Call the given callback when the stream is closed.

         This mostly is not necessary for applications that use the
@@ -351,32 +554,109 @@ class BaseIOStream(object):
         Unlike other callback-based interfaces, ``set_close_callback``
         was not removed in Tornado 6.0.
         """
-        pass
-
-    def close(self, exc_info: Union[None, bool, BaseException, Tuple[
-        'Optional[Type[BaseException]]', Optional[BaseException], Optional[
-        TracebackType]]]=False) ->None:
+        self._close_callback = callback
+        self._maybe_add_error_listener()
+
+    def close(
+        self,
+        exc_info: Union[
+            None,
+            bool,
+            BaseException,
+            Tuple[
+                "Optional[Type[BaseException]]",
+                Optional[BaseException],
+                Optional[TracebackType],
+            ],
+        ] = False,
+    ) -> None:
         """Close this stream.

         If ``exc_info`` is true, set the ``error`` attribute to the current
         exception from `sys.exc_info` (or if ``exc_info`` is a tuple,
         use that instead of `sys.exc_info`).
         """
-        pass
-
-    def reading(self) ->bool:
+        if not self.closed():
+            if exc_info:
+                if isinstance(exc_info, tuple):
+                    self.error = exc_info[1]
+                elif isinstance(exc_info, BaseException):
+                    self.error = exc_info
+                else:
+                    exc_info = sys.exc_info()
+                    if any(exc_info):
+                        self.error = exc_info[1]
+            if self._read_until_close:
+                self._read_until_close = False
+                self._finish_read(self._read_buffer_size)
+            elif self._read_future is not None:
+                # resolve reads that are pending and ready to complete
+                try:
+                    pos = self._find_read_pos()
+                except UnsatisfiableReadError:
+                    pass
+                else:
+                    if pos is not None:
+                        self._read_from_buffer(pos)
+            if self._state is not None:
+                self.io_loop.remove_handler(self.fileno())
+                self._state = None
+            self.close_fd()
+            self._closed = True
+        self._signal_closed()
+
+    def _signal_closed(self) -> None:
+        futures = []  # type: List[Future]
+        if self._read_future is not None:
+            futures.append(self._read_future)
+            self._read_future = None
+        futures += [future for _, future in self._write_futures]
+        self._write_futures.clear()
+        if self._connect_future is not None:
+            futures.append(self._connect_future)
+            self._connect_future = None
+        for future in futures:
+            if not future.done():
+                future.set_exception(StreamClosedError(real_error=self.error))
+            # Reference the exception to silence warnings. Annoyingly,
+            # this raises if the future was cancelled, but just
+            # returns any other error.
+            try:
+                future.exception()
+            except asyncio.CancelledError:
+                pass
+        if self._ssl_connect_future is not None:
+            # _ssl_connect_future expects to see the real exception (typically
+            # an ssl.SSLError), not just StreamClosedError.
+            if not self._ssl_connect_future.done():
+                if self.error is not None:
+                    self._ssl_connect_future.set_exception(self.error)
+                else:
+                    self._ssl_connect_future.set_exception(StreamClosedError())
+            self._ssl_connect_future.exception()
+            self._ssl_connect_future = None
+        if self._close_callback is not None:
+            cb = self._close_callback
+            self._close_callback = None
+            self.io_loop.add_callback(cb)
+        # Clear the buffers so they can be cleared immediately even
+        # if the IOStream object is kept alive by a reference cycle.
+        # TODO: Clear the read buffer too; it currently breaks some tests.
+        self._write_buffer = None  # type: ignore
+
+    def reading(self) -> bool:
         """Returns ``True`` if we are currently reading from the stream."""
-        pass
+        return self._read_future is not None

-    def writing(self) ->bool:
+    def writing(self) -> bool:
         """Returns ``True`` if we are currently writing to the stream."""
-        pass
+        return bool(self._write_buffer)

-    def closed(self) ->bool:
+    def closed(self) -> bool:
         """Returns ``True`` if the stream has been closed."""
-        pass
+        return self._closed

-    def set_nodelay(self, value: bool) ->None:
+    def set_nodelay(self, value: bool) -> None:
         """Sets the no-delay flag for this stream.

         By default, data written to TCP streams may be held for a time
@@ -391,42 +671,348 @@ class BaseIOStream(object):
         """
         pass

-    def _try_inline_read(self) ->None:
+    def _handle_connect(self) -> None:
+        raise NotImplementedError()
+
+    def _handle_events(self, fd: Union[int, ioloop._Selectable], events: int) -> None:
+        if self.closed():
+            gen_log.warning("Got events for closed stream %s", fd)
+            return
+        try:
+            if self._connecting:
+                # Most IOLoops will report a write failed connect
+                # with the WRITE event, but SelectIOLoop reports a
+                # READ as well so we must check for connecting before
+                # either.
+                self._handle_connect()
+            if self.closed():
+                return
+            if events & self.io_loop.READ:
+                self._handle_read()
+            if self.closed():
+                return
+            if events & self.io_loop.WRITE:
+                self._handle_write()
+            if self.closed():
+                return
+            if events & self.io_loop.ERROR:
+                self.error = self.get_fd_error()
+                # We may have queued up a user callback in _handle_read or
+                # _handle_write, so don't close the IOStream until those
+                # callbacks have had a chance to run.
+                self.io_loop.add_callback(self.close)
+                return
+            state = self.io_loop.ERROR
+            if self.reading():
+                state |= self.io_loop.READ
+            if self.writing():
+                state |= self.io_loop.WRITE
+            if state == self.io_loop.ERROR and self._read_buffer_size == 0:
+                # If the connection is idle, listen for reads too so
+                # we can tell if the connection is closed.  If there is
+                # data in the read buffer we won't run the close callback
+                # yet anyway, so we don't need to listen in this case.
+                state |= self.io_loop.READ
+            if state != self._state:
+                assert (
+                    self._state is not None
+                ), "shouldn't happen: _handle_events without self._state"
+                self._state = state
+                self.io_loop.update_handler(self.fileno(), self._state)
+        except UnsatisfiableReadError as e:
+            gen_log.info("Unsatisfiable read, closing connection: %s" % e)
+            self.close(exc_info=e)
+        except Exception as e:
+            gen_log.error("Uncaught exception, closing connection.", exc_info=True)
+            self.close(exc_info=e)
+            raise
+
+    def _read_to_buffer_loop(self) -> Optional[int]:
+        # This method is called from _handle_read and _try_inline_read.
+        if self._read_bytes is not None:
+            target_bytes = self._read_bytes  # type: Optional[int]
+        elif self._read_max_bytes is not None:
+            target_bytes = self._read_max_bytes
+        elif self.reading():
+            # For read_until without max_bytes, or
+            # read_until_close, read as much as we can before
+            # scanning for the delimiter.
+            target_bytes = None
+        else:
+            target_bytes = 0
+        next_find_pos = 0
+        while not self.closed():
+            # Read from the socket until we get EWOULDBLOCK or equivalent.
+            # SSL sockets do some internal buffering, and if the data is
+            # sitting in the SSL object's buffer select() and friends
+            # can't see it; the only way to find out if it's there is to
+            # try to read it.
+            if self._read_to_buffer() == 0:
+                break
+
+            # If we've read all the bytes we can use, break out of
+            # this loop.
+
+            # If we've reached target_bytes, we know we're done.
+            if target_bytes is not None and self._read_buffer_size >= target_bytes:
+                break
+
+            # Otherwise, we need to call the more expensive find_read_pos.
+            # It's inefficient to do this on every read, so instead
+            # do it on the first read and whenever the read buffer
+            # size has doubled.
+            if self._read_buffer_size >= next_find_pos:
+                pos = self._find_read_pos()
+                if pos is not None:
+                    return pos
+                next_find_pos = self._read_buffer_size * 2
+        return self._find_read_pos()
+
+    def _handle_read(self) -> None:
+        try:
+            pos = self._read_to_buffer_loop()
+        except UnsatisfiableReadError:
+            raise
+        except asyncio.CancelledError:
+            raise
+        except Exception as e:
+            gen_log.warning("error on read: %s" % e)
+            self.close(exc_info=e)
+            return
+        if pos is not None:
+            self._read_from_buffer(pos)
+
+    def _start_read(self) -> Future:
+        if self._read_future is not None:
+            # It is an error to start a read while a prior read is unresolved.
+            # However, if the prior read is unresolved because the stream was
+            # closed without satisfying it, it's better to raise
+            # StreamClosedError instead of AssertionError. In particular, this
+            # situation occurs in harmless situations in http1connection.py and
+            # an AssertionError would be logged noisily.
+            #
+            # On the other hand, it is legal to start a new read while the
+            # stream is closed, in case the read can be satisfied from the
+            # read buffer. So we only want to check the closed status of the
+            # stream if we need to decide what kind of error to raise for
+            # "already reading".
+            #
+            # These conditions have proven difficult to test; we have no
+            # unittests that reliably verify this behavior so be careful
+            # when making changes here. See #2651 and #2719.
+            self._check_closed()
+            assert self._read_future is None, "Already reading"
+        self._read_future = Future()
+        return self._read_future
+
+    def _finish_read(self, size: int) -> None:
+        if self._user_read_buffer:
+            self._read_buffer = self._after_user_read_buffer or bytearray()
+            self._after_user_read_buffer = None
+            self._read_buffer_size = len(self._read_buffer)
+            self._user_read_buffer = False
+            result = size  # type: Union[int, bytes]
+        else:
+            result = self._consume(size)
+        if self._read_future is not None:
+            future = self._read_future
+            self._read_future = None
+            future_set_result_unless_cancelled(future, result)
+        self._maybe_add_error_listener()
+
+    def _try_inline_read(self) -> None:
         """Attempt to complete the current read operation from buffered data.

         If the read can be completed without blocking, schedules the
         read callback on the next IOLoop iteration; otherwise starts
         listening for reads on the socket.
         """
-        pass
-
-    def _read_to_buffer(self) ->Optional[int]:
+        # See if we've already got the data from a previous read
+        pos = self._find_read_pos()
+        if pos is not None:
+            self._read_from_buffer(pos)
+            return
+        self._check_closed()
+        pos = self._read_to_buffer_loop()
+        if pos is not None:
+            self._read_from_buffer(pos)
+            return
+        # We couldn't satisfy the read inline, so make sure we're
+        # listening for new data unless the stream is closed.
+        if not self.closed():
+            self._add_io_state(ioloop.IOLoop.READ)
+
+    def _read_to_buffer(self) -> Optional[int]:
         """Reads from the socket and appends the result to the read buffer.

         Returns the number of bytes read.  Returns 0 if there is nothing
         to read (i.e. the read returns EWOULDBLOCK or equivalent).  On
         error closes the socket and raises an exception.
         """
-        pass
-
-    def _read_from_buffer(self, pos: int) ->None:
+        try:
+            while True:
+                try:
+                    if self._user_read_buffer:
+                        buf = memoryview(self._read_buffer)[
+                            self._read_buffer_size :
+                        ]  # type: Union[memoryview, bytearray]
+                    else:
+                        buf = bytearray(self.read_chunk_size)
+                    bytes_read = self.read_from_fd(buf)
+                except (socket.error, IOError, OSError) as e:
+                    # ssl.SSLError is a subclass of socket.error
+                    if self._is_connreset(e):
+                        # Treat ECONNRESET as a connection close rather than
+                        # an error to minimize log spam  (the exception will
+                        # be available on self.error for apps that care).
+                        self.close(exc_info=e)
+                        return None
+                    self.close(exc_info=e)
+                    raise
+                break
+            if bytes_read is None:
+                return 0
+            elif bytes_read == 0:
+                self.close()
+                return 0
+            if not self._user_read_buffer:
+                self._read_buffer += memoryview(buf)[:bytes_read]
+            self._read_buffer_size += bytes_read
+        finally:
+            # Break the reference to buf so we don't waste a chunk's worth of
+            # memory in case an exception hangs on to our stack frame.
+            del buf
+        if self._read_buffer_size > self.max_buffer_size:
+            gen_log.error("Reached maximum read buffer size")
+            self.close()
+            raise StreamBufferFullError("Reached maximum read buffer size")
+        return bytes_read
+
+    def _read_from_buffer(self, pos: int) -> None:
         """Attempts to complete the currently-pending read from the buffer.

         The argument is either a position in the read buffer or None,
         as returned by _find_read_pos.
         """
-        pass
+        self._read_bytes = self._read_delimiter = self._read_regex = None
+        self._read_partial = False
+        self._finish_read(pos)

-    def _find_read_pos(self) ->Optional[int]:
+    def _find_read_pos(self) -> Optional[int]:
         """Attempts to find a position in the read buffer that satisfies
         the currently-pending read.

         Returns a position in the buffer if the current read can be satisfied,
         or None if it cannot.
         """
-        pass
-
-    def _add_io_state(self, state: int) ->None:
+        if self._read_bytes is not None and (
+            self._read_buffer_size >= self._read_bytes
+            or (self._read_partial and self._read_buffer_size > 0)
+        ):
+            num_bytes = min(self._read_bytes, self._read_buffer_size)
+            return num_bytes
+        elif self._read_delimiter is not None:
+            # Multi-byte delimiters (e.g. '\r\n') may straddle two
+            # chunks in the read buffer, so we can't easily find them
+            # without collapsing the buffer.  However, since protocols
+            # using delimited reads (as opposed to reads of a known
+            # length) tend to be "line" oriented, the delimiter is likely
+            # to be in the first few chunks.  Merge the buffer gradually
+            # since large merges are relatively expensive and get undone in
+            # _consume().
+            if self._read_buffer:
+                loc = self._read_buffer.find(self._read_delimiter)
+                if loc != -1:
+                    delimiter_len = len(self._read_delimiter)
+                    self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
+                    return loc + delimiter_len
+                self._check_max_bytes(self._read_delimiter, self._read_buffer_size)
+        elif self._read_regex is not None:
+            if self._read_buffer:
+                m = self._read_regex.search(self._read_buffer)
+                if m is not None:
+                    loc = m.end()
+                    self._check_max_bytes(self._read_regex, loc)
+                    return loc
+                self._check_max_bytes(self._read_regex, self._read_buffer_size)
+        return None
+
+    def _check_max_bytes(self, delimiter: Union[bytes, Pattern], size: int) -> None:
+        if self._read_max_bytes is not None and size > self._read_max_bytes:
+            raise UnsatisfiableReadError(
+                "delimiter %r not found within %d bytes"
+                % (delimiter, self._read_max_bytes)
+            )
+
+    def _handle_write(self) -> None:
+        while True:
+            size = len(self._write_buffer)
+            if not size:
+                break
+            assert size > 0
+            try:
+                if _WINDOWS:
+                    # On windows, socket.send blows up if given a
+                    # write buffer that's too large, instead of just
+                    # returning the number of bytes it was able to
+                    # process.  Therefore we must not call socket.send
+                    # with more than 128KB at a time.
+                    size = 128 * 1024
+
+                num_bytes = self.write_to_fd(self._write_buffer.peek(size))
+                if num_bytes == 0:
+                    break
+                self._write_buffer.advance(num_bytes)
+                self._total_write_done_index += num_bytes
+            except BlockingIOError:
+                break
+            except (socket.error, IOError, OSError) as e:
+                if not self._is_connreset(e):
+                    # Broken pipe errors are usually caused by connection
+                    # reset, and its better to not log EPIPE errors to
+                    # minimize log spam
+                    gen_log.warning("Write error on %s: %s", self.fileno(), e)
+                self.close(exc_info=e)
+                return
+
+        while self._write_futures:
+            index, future = self._write_futures[0]
+            if index > self._total_write_done_index:
+                break
+            self._write_futures.popleft()
+            future_set_result_unless_cancelled(future, None)
+
+    def _consume(self, loc: int) -> bytes:
+        # Consume loc bytes from the read buffer and return them
+        if loc == 0:
+            return b""
+        assert loc <= self._read_buffer_size
+        # Slice the bytearray buffer into bytes, without intermediate copying
+        b = (memoryview(self._read_buffer)[:loc]).tobytes()
+        self._read_buffer_size -= loc
+        del self._read_buffer[:loc]
+        return b
+
+    def _check_closed(self) -> None:
+        if self.closed():
+            raise StreamClosedError(real_error=self.error)
+
+    def _maybe_add_error_listener(self) -> None:
+        # This method is part of an optimization: to detect a connection that
+        # is closed when we're not actively reading or writing, we must listen
+        # for read events.  However, it is inefficient to do this when the
+        # connection is first established because we are going to read or write
+        # immediately anyway.  Instead, we insert checks at various times to
+        # see if the connection is idle and add the read listener then.
+        if self._state is None or self._state == ioloop.IOLoop.ERROR:
+            if (
+                not self.closed()
+                and self._read_buffer_size == 0
+                and self._close_callback is not None
+            ):
+                self._add_io_state(ioloop.IOLoop.READ)
+
+    def _add_io_state(self, state: int) -> None:
         """Adds `state` (IOLoop.{READ,WRITE} flags) to our event handler.

         Implementation notes: Reads and writes have a fast path and a
@@ -445,18 +1031,29 @@ class BaseIOStream(object):
         TODO: reevaluate this now that callbacks are gone.

         """
-        pass
-
-    def _is_connreset(self, exc: BaseException) ->bool:
+        if self.closed():
+            # connection has been closed, so there can be no future events
+            return
+        if self._state is None:
+            self._state = ioloop.IOLoop.ERROR | state
+            self.io_loop.add_handler(self.fileno(), self._handle_events, self._state)
+        elif not self._state & state:
+            self._state = self._state | state
+            self.io_loop.update_handler(self.fileno(), self._state)
+
+    def _is_connreset(self, exc: BaseException) -> bool:
         """Return ``True`` if exc is ECONNRESET or equivalent.

         May be overridden in subclasses.
         """
-        pass
+        return (
+            isinstance(exc, (socket.error, IOError))
+            and errno_from_exception(exc) in _ERRNO_CONNRESET
+        )


 class IOStream(BaseIOStream):
-    """Socket-based `IOStream` implementation.
+    r"""Socket-based `IOStream` implementation.

     This class supports the read and write methods from `BaseIOStream`
     plus a `connect` method.
@@ -479,10 +1076,10 @@ class IOStream(BaseIOStream):
             s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
             stream = tornado.iostream.IOStream(s)
             await stream.connect(("friendfeed.com", 80))
-            await stream.write(b"GET / HTTP/1.0\\r\\nHost: friendfeed.com\\r\\n\\r\\n")
-            header_data = await stream.read_until(b"\\r\\n\\r\\n")
+            await stream.write(b"GET / HTTP/1.0\r\nHost: friendfeed.com\r\n\r\n")
+            header_data = await stream.read_until(b"\r\n\r\n")
             headers = {}
-            for line in header_data.split(b"\\r\\n"):
+            for line in header_data.split(b"\r\n"):
                 parts = line.split(b":")
                 if len(parts) == 2:
                     headers[parts[0].strip()] = parts[1].strip()
@@ -498,14 +1095,41 @@ class IOStream(BaseIOStream):

     """

-    def __init__(self, socket: socket.socket, *args: Any, **kwargs: Any
-        ) ->None:
+    def __init__(self, socket: socket.socket, *args: Any, **kwargs: Any) -> None:
         self.socket = socket
         self.socket.setblocking(False)
         super().__init__(*args, **kwargs)

-    def connect(self: _IOStreamType, address: Any, server_hostname:
-        Optional[str]=None) ->'Future[_IOStreamType]':
+    def fileno(self) -> Union[int, ioloop._Selectable]:
+        return self.socket
+
+    def close_fd(self) -> None:
+        self.socket.close()
+        self.socket = None  # type: ignore
+
+    def get_fd_error(self) -> Optional[Exception]:
+        errno = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
+        return socket.error(errno, os.strerror(errno))
+
+    def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]:
+        try:
+            return self.socket.recv_into(buf, len(buf))
+        except BlockingIOError:
+            return None
+        finally:
+            del buf
+
+    def write_to_fd(self, data: memoryview) -> int:
+        try:
+            return self.socket.send(data)  # type: ignore
+        finally:
+            # Avoid keeping to data, which can be a memoryview.
+            # See https://github.com/tornadoweb/tornado/pull/2008
+            del data
+
+    def connect(
+        self: _IOStreamType, address: Any, server_hostname: Optional[str] = None
+    ) -> "Future[_IOStreamType]":
         """Connects the socket to a remote address without blocking.

         May only be called if the socket passed to the constructor was
@@ -550,11 +1174,33 @@ class IOStream(BaseIOStream):
            `.Future` instead.

         """
-        pass
-
-    def start_tls(self, server_side: bool, ssl_options: Optional[Union[Dict
-        [str, Any], ssl.SSLContext]]=None, server_hostname: Optional[str]=None
-        ) ->Awaitable['SSLIOStream']:
+        self._connecting = True
+        future = Future()  # type: Future[_IOStreamType]
+        self._connect_future = typing.cast("Future[IOStream]", future)
+        try:
+            self.socket.connect(address)
+        except BlockingIOError:
+            # In non-blocking mode we expect connect() to raise an
+            # exception with EINPROGRESS or EWOULDBLOCK.
+            pass
+        except socket.error as e:
+            # On freebsd, other errors such as ECONNREFUSED may be
+            # returned immediately when attempting to connect to
+            # localhost, so handle them the same way as an error
+            # reported later in _handle_connect.
+            if future is None:
+                gen_log.warning("Connect error on fd %s: %s", self.socket.fileno(), e)
+            self.close(exc_info=e)
+            return future
+        self._add_io_state(self.io_loop.WRITE)
+        return future
+
+    def start_tls(
+        self,
+        server_side: bool,
+        ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
+        server_hostname: Optional[str] = None,
+    ) -> Awaitable["SSLIOStream"]:
         """Convert this `IOStream` to an `SSLIOStream`.

         This enables protocols that begin in clear-text mode and
@@ -589,7 +1235,85 @@ class IOStream(BaseIOStream):
            ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a
            suitably-configured `ssl.SSLContext` to disable.
         """
-        pass
+        if (
+            self._read_future
+            or self._write_futures
+            or self._connect_future
+            or self._closed
+            or self._read_buffer
+            or self._write_buffer
+        ):
+            raise ValueError("IOStream is not idle; cannot convert to SSL")
+        if ssl_options is None:
+            if server_side:
+                ssl_options = _server_ssl_defaults
+            else:
+                ssl_options = _client_ssl_defaults
+
+        socket = self.socket
+        self.io_loop.remove_handler(socket)
+        self.socket = None  # type: ignore
+        socket = ssl_wrap_socket(
+            socket,
+            ssl_options,
+            server_hostname=server_hostname,
+            server_side=server_side,
+            do_handshake_on_connect=False,
+        )
+        orig_close_callback = self._close_callback
+        self._close_callback = None
+
+        future = Future()  # type: Future[SSLIOStream]
+        ssl_stream = SSLIOStream(socket, ssl_options=ssl_options)
+        ssl_stream.set_close_callback(orig_close_callback)
+        ssl_stream._ssl_connect_future = future
+        ssl_stream.max_buffer_size = self.max_buffer_size
+        ssl_stream.read_chunk_size = self.read_chunk_size
+        return future
+
+    def _handle_connect(self) -> None:
+        try:
+            err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
+        except socket.error as e:
+            # Hurd doesn't allow SO_ERROR for loopback sockets because all
+            # errors for such sockets are reported synchronously.
+            if errno_from_exception(e) == errno.ENOPROTOOPT:
+                err = 0
+        if err != 0:
+            self.error = socket.error(err, os.strerror(err))
+            # IOLoop implementations may vary: some of them return
+            # an error state before the socket becomes writable, so
+            # in that case a connection failure would be handled by the
+            # error path in _handle_events instead of here.
+            if self._connect_future is None:
+                gen_log.warning(
+                    "Connect error on fd %s: %s",
+                    self.socket.fileno(),
+                    errno.errorcode[err],
+                )
+            self.close()
+            return
+        if self._connect_future is not None:
+            future = self._connect_future
+            self._connect_future = None
+            future_set_result_unless_cancelled(future, self)
+        self._connecting = False
+
+    def set_nodelay(self, value: bool) -> None:
+        if self.socket is not None and self.socket.family in (
+            socket.AF_INET,
+            socket.AF_INET6,
+        ):
+            try:
+                self.socket.setsockopt(
+                    socket.IPPROTO_TCP, socket.TCP_NODELAY, 1 if value else 0
+                )
+            except socket.error as e:
+                # Sometimes setsockopt will fail if the socket is closed
+                # at the wrong time.  This can happen with HTTPServer
+                # resetting the value to ``False`` between requests.
+                if e.errno != errno.EINVAL and not self._is_connreset(e):
+                    raise


 class SSLIOStream(IOStream):
@@ -603,27 +1327,161 @@ class SSLIOStream(IOStream):
     before constructing the `SSLIOStream`.  Unconnected sockets will be
     wrapped when `IOStream.connect` is finished.
     """
-    socket = None

-    def __init__(self, *args: Any, **kwargs: Any) ->None:
+    socket = None  # type: ssl.SSLSocket
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
         """The ``ssl_options`` keyword argument may either be an
         `ssl.SSLContext` object or a dictionary of keywords arguments
         for `ssl.SSLContext.wrap_socket`
         """
-        self._ssl_options = kwargs.pop('ssl_options', _client_ssl_defaults)
+        self._ssl_options = kwargs.pop("ssl_options", _client_ssl_defaults)
         super().__init__(*args, **kwargs)
         self._ssl_accepting = True
         self._handshake_reading = False
         self._handshake_writing = False
-        self._server_hostname = None
+        self._server_hostname = None  # type: Optional[str]
+
+        # If the socket is already connected, attempt to start the handshake.
         try:
             self.socket.getpeername()
         except socket.error:
             pass
         else:
+            # Indirectly start the handshake, which will run on the next
+            # IOLoop iteration and then the real IO state will be set in
+            # _handle_events.
             self._add_io_state(self.io_loop.WRITE)

-    def wait_for_handshake(self) ->'Future[SSLIOStream]':
+    def reading(self) -> bool:
+        return self._handshake_reading or super().reading()
+
+    def writing(self) -> bool:
+        return self._handshake_writing or super().writing()
+
+    def _do_ssl_handshake(self) -> None:
+        # Based on code from test_ssl.py in the python stdlib
+        try:
+            self._handshake_reading = False
+            self._handshake_writing = False
+            self.socket.do_handshake()
+        except ssl.SSLError as err:
+            if err.args[0] == ssl.SSL_ERROR_WANT_READ:
+                self._handshake_reading = True
+                return
+            elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
+                self._handshake_writing = True
+                return
+            elif err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN):
+                return self.close(exc_info=err)
+            elif err.args[0] in (ssl.SSL_ERROR_SSL, ssl.SSL_ERROR_SYSCALL):
+                try:
+                    peer = self.socket.getpeername()
+                except Exception:
+                    peer = "(not connected)"
+                gen_log.warning(
+                    "SSL Error on %s %s: %s", self.socket.fileno(), peer, err
+                )
+                return self.close(exc_info=err)
+            raise
+        except ssl.CertificateError as err:
+            # CertificateError can happen during handshake (hostname
+            # verification) and should be passed to user. Starting
+            # in Python 3.7, this error is a subclass of SSLError
+            # and will be handled by the previous block instead.
+            return self.close(exc_info=err)
+        except socket.error as err:
+            # Some port scans (e.g. nmap in -sT mode) have been known
+            # to cause do_handshake to raise EBADF and ENOTCONN, so make
+            # those errors quiet as well.
+            # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0
+            # Errno 0 is also possible in some cases (nc -z).
+            # https://github.com/tornadoweb/tornado/issues/2504
+            if self._is_connreset(err) or err.args[0] in (
+                0,
+                errno.EBADF,
+                errno.ENOTCONN,
+            ):
+                return self.close(exc_info=err)
+            raise
+        except AttributeError as err:
+            # On Linux, if the connection was reset before the call to
+            # wrap_socket, do_handshake will fail with an
+            # AttributeError.
+            return self.close(exc_info=err)
+        else:
+            self._ssl_accepting = False
+            # Prior to the introduction of SNI, this is where we would check
+            # the server's claimed hostname.
+            assert ssl.HAS_SNI
+            self._finish_ssl_connect()
+
+    def _finish_ssl_connect(self) -> None:
+        if self._ssl_connect_future is not None:
+            future = self._ssl_connect_future
+            self._ssl_connect_future = None
+            future_set_result_unless_cancelled(future, self)
+
+    def _handle_read(self) -> None:
+        if self._ssl_accepting:
+            self._do_ssl_handshake()
+            return
+        super()._handle_read()
+
+    def _handle_write(self) -> None:
+        if self._ssl_accepting:
+            self._do_ssl_handshake()
+            return
+        super()._handle_write()
+
+    def connect(
+        self, address: Tuple, server_hostname: Optional[str] = None
+    ) -> "Future[SSLIOStream]":
+        self._server_hostname = server_hostname
+        # Ignore the result of connect(). If it fails,
+        # wait_for_handshake will raise an error too. This is
+        # necessary for the old semantics of the connect callback
+        # (which takes no arguments). In 6.0 this can be refactored to
+        # be a regular coroutine.
+        # TODO: This is trickier than it looks, since if write()
+        # is called with a connect() pending, we want the connect
+        # to resolve before the write. Or do we care about this?
+        # (There's a test for it, but I think in practice users
+        # either wait for the connect before performing a write or
+        # they don't care about the connect Future at all)
+        fut = super().connect(address)
+        fut.add_done_callback(lambda f: f.exception())
+        return self.wait_for_handshake()
+
+    def _handle_connect(self) -> None:
+        # Call the superclass method to check for errors.
+        super()._handle_connect()
+        if self.closed():
+            return
+        # When the connection is complete, wrap the socket for SSL
+        # traffic.  Note that we do this by overriding _handle_connect
+        # instead of by passing a callback to super().connect because
+        # user callbacks are enqueued asynchronously on the IOLoop,
+        # but since _handle_events calls _handle_connect immediately
+        # followed by _handle_write we need this to be synchronous.
+        #
+        # The IOLoop will get confused if we swap out self.socket while the
+        # fd is registered, so remove it now and re-register after
+        # wrap_socket().
+        self.io_loop.remove_handler(self.socket)
+        old_state = self._state
+        assert old_state is not None
+        self._state = None
+        self.socket = ssl_wrap_socket(
+            self.socket,
+            self._ssl_options,
+            server_hostname=self._server_hostname,
+            do_handshake_on_connect=False,
+            server_side=False,
+        )
+        self._add_io_state(old_state)
+
+    def wait_for_handshake(self) -> "Future[SSLIOStream]":
         """Wait for the initial SSL handshake to complete.

         If a ``callback`` is given, it will be called with no
@@ -648,7 +1506,66 @@ class SSLIOStream(IOStream):
            `.Future` instead.

         """
-        pass
+        if self._ssl_connect_future is not None:
+            raise RuntimeError("Already waiting")
+        future = self._ssl_connect_future = Future()
+        if not self._ssl_accepting:
+            self._finish_ssl_connect()
+        return future
+
+    def write_to_fd(self, data: memoryview) -> int:
+        # clip buffer size at 1GB since SSL sockets only support upto 2GB
+        # this change in behaviour is transparent, since the function is
+        # already expected to (possibly) write less than the provided buffer
+        if len(data) >> 30:
+            data = memoryview(data)[: 1 << 30]
+        try:
+            return self.socket.send(data)  # type: ignore
+        except ssl.SSLError as e:
+            if e.args[0] == ssl.SSL_ERROR_WANT_WRITE:
+                # In Python 3.5+, SSLSocket.send raises a WANT_WRITE error if
+                # the socket is not writeable; we need to transform this into
+                # an EWOULDBLOCK socket.error or a zero return value,
+                # either of which will be recognized by the caller of this
+                # method. Prior to Python 3.5, an unwriteable socket would
+                # simply return 0 bytes written.
+                return 0
+            raise
+        finally:
+            # Avoid keeping to data, which can be a memoryview.
+            # See https://github.com/tornadoweb/tornado/pull/2008
+            del data
+
+    def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]:
+        try:
+            if self._ssl_accepting:
+                # If the handshake hasn't finished yet, there can't be anything
+                # to read (attempting to read may or may not raise an exception
+                # depending on the SSL version)
+                return None
+            # clip buffer size at 1GB since SSL sockets only support upto 2GB
+            # this change in behaviour is transparent, since the function is
+            # already expected to (possibly) read less than the provided buffer
+            if len(buf) >> 30:
+                buf = memoryview(buf)[: 1 << 30]
+            try:
+                return self.socket.recv_into(buf, len(buf))
+            except ssl.SSLError as e:
+                # SSLError is a subclass of socket.error, so this except
+                # block must come first.
+                if e.args[0] == ssl.SSL_ERROR_WANT_READ:
+                    return None
+                else:
+                    raise
+            except BlockingIOError:
+                return None
+        finally:
+            del buf
+
+    def _is_connreset(self, e: BaseException) -> bool:
+        if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF:
+            return True
+        return super()._is_connreset(e)


 class PipeIOStream(BaseIOStream):
@@ -662,10 +1579,49 @@ class PipeIOStream(BaseIOStream):
     ``PipeIOStream`` is only available on Unix-based platforms.
     """

-    def __init__(self, fd: int, *args: Any, **kwargs: Any) ->None:
+    def __init__(self, fd: int, *args: Any, **kwargs: Any) -> None:
         self.fd = fd
-        self._fio = io.FileIO(self.fd, 'r+')
-        if sys.platform == 'win32':
-            raise AssertionError('PipeIOStream is not supported on Windows')
+        self._fio = io.FileIO(self.fd, "r+")
+        if sys.platform == "win32":
+            # The form and placement of this assertion is important to mypy.
+            # A plain assert statement isn't recognized here. If the assertion
+            # were earlier it would worry that the attributes of self aren't
+            # set on windows. If it were missing it would complain about
+            # the absence of the set_blocking function.
+            raise AssertionError("PipeIOStream is not supported on Windows")
         os.set_blocking(fd, False)
         super().__init__(*args, **kwargs)
+
+    def fileno(self) -> int:
+        return self.fd
+
+    def close_fd(self) -> None:
+        self._fio.close()
+
+    def write_to_fd(self, data: memoryview) -> int:
+        try:
+            return os.write(self.fd, data)  # type: ignore
+        finally:
+            # Avoid keeping to data, which can be a memoryview.
+            # See https://github.com/tornadoweb/tornado/pull/2008
+            del data
+
+    def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]:
+        try:
+            return self._fio.readinto(buf)  # type: ignore
+        except (IOError, OSError) as e:
+            if errno_from_exception(e) == errno.EBADF:
+                # If the writing half of a pipe is closed, select will
+                # report it as readable but reads will fail with EBADF.
+                self.close(exc_info=e)
+                return None
+            else:
+                raise
+        finally:
+            del buf
+
+
+def doctests() -> Any:
+    import doctest
+
+    return doctest.DocTestSuite()
diff --git a/tornado/locale.py b/tornado/locale.py
index 201f720a..c5526703 100644
--- a/tornado/locale.py
+++ b/tornado/locale.py
@@ -1,3 +1,17 @@
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Translation methods for generating localized strings.

 To load a locale and generate a translated string::
@@ -22,6 +36,7 @@ CSV format) or `load_gettext_translations` (which uses the ``.mo`` format
 supported by `gettext` and related tools).  If neither method is called,
 the `Locale.translate` method will simply return the original string.
 """
+
 import codecs
 import csv
 import datetime
@@ -29,18 +44,22 @@ import gettext
 import glob
 import os
 import re
+
 from tornado import escape
 from tornado.log import gen_log
+
 from tornado._locale_data import LOCALE_NAMES
+
 from typing import Iterable, Any, Union, Dict, Optional
-_default_locale = 'en_US'
-_translations = {}
+
+_default_locale = "en_US"
+_translations = {}  # type: Dict[str, Any]
 _supported_locales = frozenset([_default_locale])
 _use_gettext = False
-CONTEXT_SEPARATOR = '\x04'
+CONTEXT_SEPARATOR = "\x04"


-def get(*locale_codes: str) ->'Locale':
+def get(*locale_codes: str) -> "Locale":
     """Returns the closest match for the given locale codes.

     We iterate over all given locale codes in order. If we have a tight
@@ -51,10 +70,10 @@ def get(*locale_codes: str) ->'Locale':
     the specified locales. You can change the default locale with
     `set_default_locale()`.
     """
-    pass
+    return Locale.get_closest(*locale_codes)


-def set_default_locale(code: str) ->None:
+def set_default_locale(code: str) -> None:
     """Sets the default locale.

     The default locale is assumed to be the language used for all strings
@@ -62,10 +81,13 @@ def set_default_locale(code: str) ->None:
     the default locale to the destination locale. Consequently, you don't
     need to create a translation file for the default locale.
     """
-    pass
+    global _default_locale
+    global _supported_locales
+    _default_locale = code
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])


-def load_translations(directory: str, encoding: Optional[str]=None) ->None:
+def load_translations(directory: str, encoding: Optional[str] = None) -> None:
     """Loads translations from CSV files in a directory.

     Translations are strings with optional Python-style named placeholders
@@ -100,10 +122,59 @@ def load_translations(directory: str, encoding: Optional[str]=None) ->None:
        Added ``encoding`` parameter. Added support for BOM-based encoding
        detection, UTF-16, and UTF-8-with-BOM.
     """
-    pass
-
-
-def load_gettext_translations(directory: str, domain: str) ->None:
+    global _translations
+    global _supported_locales
+    _translations = {}
+    for path in os.listdir(directory):
+        if not path.endswith(".csv"):
+            continue
+        locale, extension = path.split(".")
+        if not re.match("[a-z]+(_[A-Z]+)?$", locale):
+            gen_log.error(
+                "Unrecognized locale %r (path: %s)",
+                locale,
+                os.path.join(directory, path),
+            )
+            continue
+        full_path = os.path.join(directory, path)
+        if encoding is None:
+            # Try to autodetect encoding based on the BOM.
+            with open(full_path, "rb") as bf:
+                data = bf.read(len(codecs.BOM_UTF16_LE))
+            if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+                encoding = "utf-16"
+            else:
+                # utf-8-sig is "utf-8 with optional BOM". It's discouraged
+                # in most cases but is common with CSV files because Excel
+                # cannot read utf-8 files without a BOM.
+                encoding = "utf-8-sig"
+        # python 3: csv.reader requires a file open in text mode.
+        # Specify an encoding to avoid dependence on $LANG environment variable.
+        with open(full_path, encoding=encoding) as f:
+            _translations[locale] = {}
+            for i, row in enumerate(csv.reader(f)):
+                if not row or len(row) < 2:
+                    continue
+                row = [escape.to_unicode(c).strip() for c in row]
+                english, translation = row[:2]
+                if len(row) > 2:
+                    plural = row[2] or "unknown"
+                else:
+                    plural = "unknown"
+                if plural not in ("plural", "singular", "unknown"):
+                    gen_log.error(
+                        "Unrecognized plural indicator %r in %s line %d",
+                        plural,
+                        path,
+                        i + 1,
+                    )
+                    continue
+                _translations[locale].setdefault(plural, {})[english] = translation
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
+    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
+
+
+def load_gettext_translations(directory: str, domain: str) -> None:
     """Loads translations from `gettext`'s locale tree

     Locale tree is similar to system's ``/usr/share/locale``, like::
@@ -124,12 +195,30 @@ def load_gettext_translations(directory: str, domain: str) ->None:

         msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo
     """
-    pass
-
-
-def get_supported_locales() ->Iterable[str]:
+    global _translations
+    global _supported_locales
+    global _use_gettext
+    _translations = {}
+
+    for filename in glob.glob(
+        os.path.join(directory, "*", "LC_MESSAGES", domain + ".mo")
+    ):
+        lang = os.path.basename(os.path.dirname(os.path.dirname(filename)))
+        try:
+            _translations[lang] = gettext.translation(
+                domain, directory, languages=[lang]
+            )
+        except Exception as e:
+            gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
+            continue
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
+    _use_gettext = True
+    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
+
+
+def get_supported_locales() -> Iterable[str]:
     """Returns a list of all the supported locale codes."""
-    pass
+    return _supported_locales


 class Locale(object):
@@ -138,38 +227,86 @@ class Locale(object):
     After calling one of `load_translations` or `load_gettext_translations`,
     call `get` or `get_closest` to get a Locale object.
     """
-    _cache = {}
+
+    _cache = {}  # type: Dict[str, Locale]

     @classmethod
-    def get_closest(cls, *locale_codes: str) ->'Locale':
+    def get_closest(cls, *locale_codes: str) -> "Locale":
         """Returns the closest match for the given locale code."""
-        pass
+        for code in locale_codes:
+            if not code:
+                continue
+            code = code.replace("-", "_")
+            parts = code.split("_")
+            if len(parts) > 2:
+                continue
+            elif len(parts) == 2:
+                code = parts[0].lower() + "_" + parts[1].upper()
+            if code in _supported_locales:
+                return cls.get(code)
+            if parts[0].lower() in _supported_locales:
+                return cls.get(parts[0].lower())
+        return cls.get(_default_locale)

     @classmethod
-    def get(cls, code: str) ->'Locale':
+    def get(cls, code: str) -> "Locale":
         """Returns the Locale for the given locale code.

         If it is not supported, we raise an exception.
         """
-        pass
-
-    def __init__(self, code: str) ->None:
+        if code not in cls._cache:
+            assert code in _supported_locales
+            translations = _translations.get(code, None)
+            if translations is None:
+                locale = CSVLocale(code, {})  # type: Locale
+            elif _use_gettext:
+                locale = GettextLocale(code, translations)
+            else:
+                locale = CSVLocale(code, translations)
+            cls._cache[code] = locale
+        return cls._cache[code]
+
+    def __init__(self, code: str) -> None:
         self.code = code
-        self.name = LOCALE_NAMES.get(code, {}).get('name', 'Unknown')
+        self.name = LOCALE_NAMES.get(code, {}).get("name", "Unknown")
         self.rtl = False
-        for prefix in ['fa', 'ar', 'he']:
+        for prefix in ["fa", "ar", "he"]:
             if self.code.startswith(prefix):
                 self.rtl = True
                 break
+
+        # Initialize strings for date formatting
         _ = self.translate
-        self._months = [_('January'), _('February'), _('March'), _('April'),
-            _('May'), _('June'), _('July'), _('August'), _('September'), _(
-            'October'), _('November'), _('December')]
-        self._weekdays = [_('Monday'), _('Tuesday'), _('Wednesday'), _(
-            'Thursday'), _('Friday'), _('Saturday'), _('Sunday')]
-
-    def translate(self, message: str, plural_message: Optional[str]=None,
-        count: Optional[int]=None) ->str:
+        self._months = [
+            _("January"),
+            _("February"),
+            _("March"),
+            _("April"),
+            _("May"),
+            _("June"),
+            _("July"),
+            _("August"),
+            _("September"),
+            _("October"),
+            _("November"),
+            _("December"),
+        ]
+        self._weekdays = [
+            _("Monday"),
+            _("Tuesday"),
+            _("Wednesday"),
+            _("Thursday"),
+            _("Friday"),
+            _("Saturday"),
+            _("Sunday"),
+        ]
+
+    def translate(
+        self,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
         """Returns the translation for the given message for this locale.

         If ``plural_message`` is given, you must also provide
@@ -177,11 +314,25 @@ class Locale(object):
         and we return the singular form for the given message when
         ``count == 1``.
         """
-        pass
-
-    def format_date(self, date: Union[int, float, datetime.datetime],
-        gmt_offset: int=0, relative: bool=True, shorter: bool=False,
-        full_format: bool=False) ->str:
+        raise NotImplementedError()
+
+    def pgettext(
+        self,
+        context: str,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
+        raise NotImplementedError()
+
+    def format_date(
+        self,
+        date: Union[int, float, datetime.datetime],
+        gmt_offset: int = 0,
+        relative: bool = True,
+        shorter: bool = False,
+        full_format: bool = False,
+    ) -> str:
         """Formats the given date.

         By default, we return a relative time (e.g., "2 minutes ago"). You
@@ -197,50 +348,205 @@ class Locale(object):
            Aware `datetime.datetime` objects are now supported (naive
            datetimes are still assumed to be UTC).
         """
-        pass
+        if isinstance(date, (int, float)):
+            date = datetime.datetime.fromtimestamp(date, datetime.timezone.utc)
+        if date.tzinfo is None:
+            date = date.replace(tzinfo=datetime.timezone.utc)
+        now = datetime.datetime.now(datetime.timezone.utc)
+        if date > now:
+            if relative and (date - now).seconds < 60:
+                # Due to click skew, things are some things slightly
+                # in the future. Round timestamps in the immediate
+                # future down to now in relative mode.
+                date = now
+            else:
+                # Otherwise, future dates always use the full format.
+                full_format = True
+        local_date = date - datetime.timedelta(minutes=gmt_offset)
+        local_now = now - datetime.timedelta(minutes=gmt_offset)
+        local_yesterday = local_now - datetime.timedelta(hours=24)
+        difference = now - date
+        seconds = difference.seconds
+        days = difference.days

-    def format_day(self, date: datetime.datetime, gmt_offset: int=0, dow:
-        bool=True) ->bool:
+        _ = self.translate
+        format = None
+        if not full_format:
+            if relative and days == 0:
+                if seconds < 50:
+                    return _("1 second ago", "%(seconds)d seconds ago", seconds) % {
+                        "seconds": seconds
+                    }
+
+                if seconds < 50 * 60:
+                    minutes = round(seconds / 60.0)
+                    return _("1 minute ago", "%(minutes)d minutes ago", minutes) % {
+                        "minutes": minutes
+                    }
+
+                hours = round(seconds / (60.0 * 60))
+                return _("1 hour ago", "%(hours)d hours ago", hours) % {"hours": hours}
+
+            if days == 0:
+                format = _("%(time)s")
+            elif days == 1 and local_date.day == local_yesterday.day and relative:
+                format = _("yesterday") if shorter else _("yesterday at %(time)s")
+            elif days < 5:
+                format = _("%(weekday)s") if shorter else _("%(weekday)s at %(time)s")
+            elif days < 334:  # 11mo, since confusing for same month last year
+                format = (
+                    _("%(month_name)s %(day)s")
+                    if shorter
+                    else _("%(month_name)s %(day)s at %(time)s")
+                )
+
+        if format is None:
+            format = (
+                _("%(month_name)s %(day)s, %(year)s")
+                if shorter
+                else _("%(month_name)s %(day)s, %(year)s at %(time)s")
+            )
+
+        tfhour_clock = self.code not in ("en", "en_US", "zh_CN")
+        if tfhour_clock:
+            str_time = "%d:%02d" % (local_date.hour, local_date.minute)
+        elif self.code == "zh_CN":
+            str_time = "%s%d:%02d" % (
+                ("\u4e0a\u5348", "\u4e0b\u5348")[local_date.hour >= 12],
+                local_date.hour % 12 or 12,
+                local_date.minute,
+            )
+        else:
+            str_time = "%d:%02d %s" % (
+                local_date.hour % 12 or 12,
+                local_date.minute,
+                ("am", "pm")[local_date.hour >= 12],
+            )
+
+        return format % {
+            "month_name": self._months[local_date.month - 1],
+            "weekday": self._weekdays[local_date.weekday()],
+            "day": str(local_date.day),
+            "year": str(local_date.year),
+            "time": str_time,
+        }
+
+    def format_day(
+        self, date: datetime.datetime, gmt_offset: int = 0, dow: bool = True
+    ) -> bool:
         """Formats the given date as a day of week.

         Example: "Monday, January 22". You can remove the day of week with
         ``dow=False``.
         """
-        pass
-
-    def list(self, parts: Any) ->str:
+        local_date = date - datetime.timedelta(minutes=gmt_offset)
+        _ = self.translate
+        if dow:
+            return _("%(weekday)s, %(month_name)s %(day)s") % {
+                "month_name": self._months[local_date.month - 1],
+                "weekday": self._weekdays[local_date.weekday()],
+                "day": str(local_date.day),
+            }
+        else:
+            return _("%(month_name)s %(day)s") % {
+                "month_name": self._months[local_date.month - 1],
+                "day": str(local_date.day),
+            }
+
+    def list(self, parts: Any) -> str:
         """Returns a comma-separated list for the given list of parts.

         The format is, e.g., "A, B and C", "A and B" or just "A" for lists
         of size 1.
         """
-        pass
-
-    def friendly_number(self, value: int) ->str:
+        _ = self.translate
+        if len(parts) == 0:
+            return ""
+        if len(parts) == 1:
+            return parts[0]
+        comma = " \u0648 " if self.code.startswith("fa") else ", "
+        return _("%(commas)s and %(last)s") % {
+            "commas": comma.join(parts[:-1]),
+            "last": parts[len(parts) - 1],
+        }
+
+    def friendly_number(self, value: int) -> str:
         """Returns a comma-separated number for the given integer."""
-        pass
+        if self.code not in ("en", "en_US"):
+            return str(value)
+        s = str(value)
+        parts = []
+        while s:
+            parts.append(s[-3:])
+            s = s[:-3]
+        return ",".join(reversed(parts))


 class CSVLocale(Locale):
     """Locale implementation using tornado's CSV translation format."""

-    def __init__(self, code: str, translations: Dict[str, Dict[str, str]]
-        ) ->None:
+    def __init__(self, code: str, translations: Dict[str, Dict[str, str]]) -> None:
         self.translations = translations
         super().__init__(code)

+    def translate(
+        self,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
+        if plural_message is not None:
+            assert count is not None
+            if count != 1:
+                message = plural_message
+                message_dict = self.translations.get("plural", {})
+            else:
+                message_dict = self.translations.get("singular", {})
+        else:
+            message_dict = self.translations.get("unknown", {})
+        return message_dict.get(message, message)
+
+    def pgettext(
+        self,
+        context: str,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
+        if self.translations:
+            gen_log.warning("pgettext is not supported by CSVLocale")
+        return self.translate(message, plural_message, count)
+

 class GettextLocale(Locale):
     """Locale implementation using the `gettext` module."""

-    def __init__(self, code: str, translations: gettext.NullTranslations
-        ) ->None:
+    def __init__(self, code: str, translations: gettext.NullTranslations) -> None:
         self.ngettext = translations.ngettext
         self.gettext = translations.gettext
+        # self.gettext must exist before __init__ is called, since it
+        # calls into self.translate
         super().__init__(code)

-    def pgettext(self, context: str, message: str, plural_message: Optional
-        [str]=None, count: Optional[int]=None) ->str:
+    def translate(
+        self,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
+        if plural_message is not None:
+            assert count is not None
+            return self.ngettext(message, plural_message, count)
+        else:
+            return self.gettext(message)
+
+    def pgettext(
+        self,
+        context: str,
+        message: str,
+        plural_message: Optional[str] = None,
+        count: Optional[int] = None,
+    ) -> str:
         """Allows to set context for translation, accepts plural forms.

         Usage example::
@@ -260,4 +566,22 @@ class GettextLocale(Locale):

         .. versionadded:: 4.2
         """
-        pass
+        if plural_message is not None:
+            assert count is not None
+            msgs_with_ctxt = (
+                "%s%s%s" % (context, CONTEXT_SEPARATOR, message),
+                "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message),
+                count,
+            )
+            result = self.ngettext(*msgs_with_ctxt)
+            if CONTEXT_SEPARATOR in result:
+                # Translation not found
+                result = self.ngettext(message, plural_message, count)
+            return result
+        else:
+            msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message)
+            result = self.gettext(msg_with_ctxt)
+            if CONTEXT_SEPARATOR in result:
+                # Translation not found
+                result = message
+            return result
diff --git a/tornado/locks.py b/tornado/locks.py
index bacc3ebe..1bcec1b3 100644
--- a/tornado/locks.py
+++ b/tornado/locks.py
@@ -1,13 +1,31 @@
+# Copyright 2015 The Tornado Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 import collections
 import datetime
 import types
+
 from tornado import gen, ioloop
 from tornado.concurrent import Future, future_set_result_unless_cancelled
+
 from typing import Union, Optional, Type, Any, Awaitable
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Deque, Set
-__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock']
+    from typing import Deque, Set  # noqa: F401
+
+__all__ = ["Condition", "Event", "Semaphore", "BoundedSemaphore", "Lock"]


 class _TimeoutGarbageCollector(object):
@@ -20,10 +38,17 @@ class _TimeoutGarbageCollector(object):
             print('looping....')
     """

-    def __init__(self) ->None:
-        self._waiters = collections.deque()
+    def __init__(self) -> None:
+        self._waiters = collections.deque()  # type: Deque[Future]
         self._timeouts = 0

+    def _garbage_collect(self) -> None:
+        # Occasionally clear timed-out waiters.
+        self._timeouts += 1
+        if self._timeouts > 100:
+            self._timeouts = 0
+            self._waiters = collections.deque(w for w in self._waiters if not w.done())
+

 class Condition(_TimeoutGarbageCollector):
     """A condition allows one or more coroutines to wait until notified.
@@ -85,28 +110,49 @@ class Condition(_TimeoutGarbageCollector):
        next iteration of the `.IOLoop`.
     """

-    def __repr__(self) ->str:
-        result = '<%s' % (self.__class__.__name__,)
+    def __repr__(self) -> str:
+        result = "<%s" % (self.__class__.__name__,)
         if self._waiters:
-            result += ' waiters[%s]' % len(self._waiters)
-        return result + '>'
+            result += " waiters[%s]" % len(self._waiters)
+        return result + ">"

-    def wait(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[bool]:
+    def wait(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[bool]:
         """Wait for `.notify`.

         Returns a `.Future` that resolves ``True`` if the condition is notified,
         or ``False`` after a timeout.
         """
-        pass
+        waiter = Future()  # type: Future[bool]
+        self._waiters.append(waiter)
+        if timeout:

-    def notify(self, n: int=1) ->None:
+            def on_timeout() -> None:
+                if not waiter.done():
+                    future_set_result_unless_cancelled(waiter, False)
+                self._garbage_collect()
+
+            io_loop = ioloop.IOLoop.current()
+            timeout_handle = io_loop.add_timeout(timeout, on_timeout)
+            waiter.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle))
+        return waiter
+
+    def notify(self, n: int = 1) -> None:
         """Wake ``n`` waiters."""
-        pass
+        waiters = []  # Waiters we plan to run right now.
+        while n and self._waiters:
+            waiter = self._waiters.popleft()
+            if not waiter.done():  # Might have timed out.
+                n -= 1
+                waiters.append(waiter)
+
+        for waiter in waiters:
+            future_set_result_unless_cancelled(waiter, True)

-    def notify_all(self) ->None:
+    def notify_all(self) -> None:
         """Wake all waiters."""
-        pass
+        self.notify(len(self._waiters))


 class Event(object):
@@ -149,40 +195,64 @@ class Event(object):
         Done
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._value = False
-        self._waiters = set()
+        self._waiters = set()  # type: Set[Future[None]]

-    def __repr__(self) ->str:
-        return '<%s %s>' % (self.__class__.__name__, 'set' if self.is_set()
-             else 'clear')
+    def __repr__(self) -> str:
+        return "<%s %s>" % (
+            self.__class__.__name__,
+            "set" if self.is_set() else "clear",
+        )

-    def is_set(self) ->bool:
+    def is_set(self) -> bool:
         """Return ``True`` if the internal flag is true."""
-        pass
+        return self._value

-    def set(self) ->None:
+    def set(self) -> None:
         """Set the internal flag to ``True``. All waiters are awakened.

         Calling `.wait` once the flag is set will not block.
         """
-        pass
+        if not self._value:
+            self._value = True

-    def clear(self) ->None:
+            for fut in self._waiters:
+                if not fut.done():
+                    fut.set_result(None)
+
+    def clear(self) -> None:
         """Reset the internal flag to ``False``.

         Calls to `.wait` will block until `.set` is called.
         """
-        pass
+        self._value = False

-    def wait(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[None]:
+    def wait(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[None]:
         """Block until the internal flag is true.

         Returns an awaitable, which raises `tornado.util.TimeoutError` after a
         timeout.
         """
-        pass
+        fut = Future()  # type: Future[None]
+        if self._value:
+            fut.set_result(None)
+            return fut
+        self._waiters.add(fut)
+        fut.add_done_callback(lambda fut: self._waiters.remove(fut))
+        if timeout is None:
+            return fut
+        else:
+            timeout_fut = gen.with_timeout(timeout, fut)
+            # This is a slightly clumsy workaround for the fact that
+            # gen.with_timeout doesn't cancel its futures. Cancelling
+            # fut will remove it from the waiters list.
+            timeout_fut.add_done_callback(
+                lambda tf: fut.cancel() if not fut.done() else None
+            )
+            return timeout_fut


 class _ReleasingContextManager(object):
@@ -194,14 +264,18 @@ class _ReleasingContextManager(object):
     # Now semaphore.release() has been called.
     """

-    def __init__(self, obj: Any) ->None:
+    def __init__(self, obj: Any) -> None:
         self._obj = obj

-    def __enter__(self) ->None:
+    def __enter__(self) -> None:
         pass

-    def __exit__(self, exc_type: 'Optional[Type[BaseException]]', exc_val:
-        Optional[BaseException], exc_tb: Optional[types.TracebackType]) ->None:
+    def __exit__(
+        self,
+        exc_type: "Optional[Type[BaseException]]",
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[types.TracebackType],
+    ) -> None:
         self._obj.release()


@@ -306,46 +380,87 @@ class Semaphore(_TimeoutGarbageCollector):

     """

-    def __init__(self, value: int=1) ->None:
+    def __init__(self, value: int = 1) -> None:
         super().__init__()
         if value < 0:
-            raise ValueError('semaphore initial value must be >= 0')
+            raise ValueError("semaphore initial value must be >= 0")
+
         self._value = value

-    def __repr__(self) ->str:
+    def __repr__(self) -> str:
         res = super().__repr__()
-        extra = 'locked' if self._value == 0 else 'unlocked,value:{0}'.format(
-            self._value)
+        extra = (
+            "locked" if self._value == 0 else "unlocked,value:{0}".format(self._value)
+        )
         if self._waiters:
-            extra = '{0},waiters:{1}'.format(extra, len(self._waiters))
-        return '<{0} [{1}]>'.format(res[1:-1], extra)
+            extra = "{0},waiters:{1}".format(extra, len(self._waiters))
+        return "<{0} [{1}]>".format(res[1:-1], extra)

-    def release(self) ->None:
+    def release(self) -> None:
         """Increment the counter and wake one waiter."""
-        pass
-
-    def acquire(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[_ReleasingContextManager]:
+        self._value += 1
+        while self._waiters:
+            waiter = self._waiters.popleft()
+            if not waiter.done():
+                self._value -= 1
+
+                # If the waiter is a coroutine paused at
+                #
+                #     with (yield semaphore.acquire()):
+                #
+                # then the context manager's __exit__ calls release() at the end
+                # of the "with" block.
+                waiter.set_result(_ReleasingContextManager(self))
+                break
+
+    def acquire(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[_ReleasingContextManager]:
         """Decrement the counter. Returns an awaitable.

         Block if the counter is zero and wait for a `.release`. The awaitable
         raises `.TimeoutError` after the deadline.
         """
-        pass
-
-    def __enter__(self) ->None:
+        waiter = Future()  # type: Future[_ReleasingContextManager]
+        if self._value > 0:
+            self._value -= 1
+            waiter.set_result(_ReleasingContextManager(self))
+        else:
+            self._waiters.append(waiter)
+            if timeout:
+
+                def on_timeout() -> None:
+                    if not waiter.done():
+                        waiter.set_exception(gen.TimeoutError())
+                    self._garbage_collect()
+
+                io_loop = ioloop.IOLoop.current()
+                timeout_handle = io_loop.add_timeout(timeout, on_timeout)
+                waiter.add_done_callback(
+                    lambda _: io_loop.remove_timeout(timeout_handle)
+                )
+        return waiter
+
+    def __enter__(self) -> None:
         raise RuntimeError("Use 'async with' instead of 'with' for Semaphore")

-    def __exit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], traceback: Optional[types.TracebackType]
-        ) ->None:
+    def __exit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        traceback: Optional[types.TracebackType],
+    ) -> None:
         self.__enter__()

-    async def __aenter__(self) ->None:
+    async def __aenter__(self) -> None:
         await self.acquire()

-    async def __aexit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: Optional[types.TracebackType]) ->None:
+    async def __aexit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[types.TracebackType],
+    ) -> None:
         self.release()


@@ -358,13 +473,15 @@ class BoundedSemaphore(Semaphore):
     is a sign of a bug.
     """

-    def __init__(self, value: int=1) ->None:
+    def __init__(self, value: int = 1) -> None:
         super().__init__(value=value)
         self._initial_value = value

-    def release(self) ->None:
+    def release(self) -> None:
         """Increment the counter and wake one waiter."""
-        pass
+        if self._value >= self._initial_value:
+            raise ValueError("Semaphore released too many times")
+        super().release()


 class Lock(object):
@@ -404,40 +521,52 @@ class Lock(object):

     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         self._block = BoundedSemaphore(value=1)

-    def __repr__(self) ->str:
-        return '<%s _block=%s>' % (self.__class__.__name__, self._block)
+    def __repr__(self) -> str:
+        return "<%s _block=%s>" % (self.__class__.__name__, self._block)

-    def acquire(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[_ReleasingContextManager]:
+    def acquire(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[_ReleasingContextManager]:
         """Attempt to lock. Returns an awaitable.

         Returns an awaitable, which raises `tornado.util.TimeoutError` after a
         timeout.
         """
-        pass
+        return self._block.acquire(timeout)

-    def release(self) ->None:
+    def release(self) -> None:
         """Unlock.

         The first coroutine in line waiting for `acquire` gets the lock.

         If not locked, raise a `RuntimeError`.
         """
-        pass
-
-    def __enter__(self) ->None:
-        raise RuntimeError('Use `async with` instead of `with` for Lock')
-
-    def __exit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: Optional[types.TracebackType]) ->None:
+        try:
+            self._block.release()
+        except ValueError:
+            raise RuntimeError("release unlocked lock")
+
+    def __enter__(self) -> None:
+        raise RuntimeError("Use `async with` instead of `with` for Lock")
+
+    def __exit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[types.TracebackType],
+    ) -> None:
         self.__enter__()

-    async def __aenter__(self) ->None:
+    async def __aenter__(self) -> None:
         await self.acquire()

-    async def __aexit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: Optional[types.TracebackType]) ->None:
+    async def __aexit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[types.TracebackType],
+    ) -> None:
         self.release()
diff --git a/tornado/log.py b/tornado/log.py
index 6e3e6e4d..86998961 100644
--- a/tornado/log.py
+++ b/tornado/log.py
@@ -1,3 +1,17 @@
+#
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
 """Logging support for Tornado.

 Tornado uses three logger streams:
@@ -16,20 +30,52 @@ to a separate file for analysis.
 import logging
 import logging.handlers
 import sys
+
 from tornado.escape import _unicode
 from tornado.util import unicode_type, basestring_type
+
 try:
-    import colorama
+    import colorama  # type: ignore
 except ImportError:
     colorama = None
+
 try:
     import curses
 except ImportError:
-    curses = None
+    curses = None  # type: ignore
+
 from typing import Dict, Any, cast, Optional
-access_log = logging.getLogger('tornado.access')
-app_log = logging.getLogger('tornado.application')
-gen_log = logging.getLogger('tornado.general')
+
+# Logger objects for internal tornado use
+access_log = logging.getLogger("tornado.access")
+app_log = logging.getLogger("tornado.application")
+gen_log = logging.getLogger("tornado.general")
+
+
+def _stderr_supports_color() -> bool:
+    try:
+        if hasattr(sys.stderr, "isatty") and sys.stderr.isatty():
+            if curses:
+                curses.setupterm()
+                if curses.tigetnum("colors") > 0:
+                    return True
+            elif colorama:
+                if sys.stderr is getattr(
+                    colorama.initialise, "wrapped_stderr", object()
+                ):
+                    return True
+    except Exception:
+        # Very broad exception handling because it's always better to
+        # fall back to non-colored logs than to break at startup.
+        pass
+    return False
+
+
+def _safe_unicode(s: Any) -> str:
+    try:
+        return _unicode(s)
+    except UnicodeDecodeError:
+        return repr(s)


 class LogFormatter(logging.Formatter):
@@ -56,17 +102,26 @@ class LogFormatter(logging.Formatter):
        Added support for ``colorama``. Changed the constructor
        signature to be compatible with `logging.config.dictConfig`.
     """
-    DEFAULT_FORMAT = (
-        '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s'
-        )
-    DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S'
-    DEFAULT_COLORS = {logging.DEBUG: 4, logging.INFO: 2, logging.WARNING: 3,
-        logging.ERROR: 1, logging.CRITICAL: 5}
-
-    def __init__(self, fmt: str=DEFAULT_FORMAT, datefmt: str=
-        DEFAULT_DATE_FORMAT, style: str='%', color: bool=True, colors: Dict
-        [int, int]=DEFAULT_COLORS) ->None:
-        """
+
+    DEFAULT_FORMAT = "%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s"  # noqa: E501
+    DEFAULT_DATE_FORMAT = "%y%m%d %H:%M:%S"
+    DEFAULT_COLORS = {
+        logging.DEBUG: 4,  # Blue
+        logging.INFO: 2,  # Green
+        logging.WARNING: 3,  # Yellow
+        logging.ERROR: 1,  # Red
+        logging.CRITICAL: 5,  # Magenta
+    }
+
+    def __init__(
+        self,
+        fmt: str = DEFAULT_FORMAT,
+        datefmt: str = DEFAULT_DATE_FORMAT,
+        style: str = "%",
+        color: bool = True,
+        colors: Dict[int, int] = DEFAULT_COLORS,
+    ) -> None:
+        r"""
         :arg bool color: Enables color support.
         :arg str fmt: Log message format.
           It will be applied to the attributes dict of log records. The
@@ -83,38 +138,131 @@ class LogFormatter(logging.Formatter):
         """
         logging.Formatter.__init__(self, datefmt=datefmt)
         self._fmt = fmt
-        self._colors = {}
+
+        self._colors = {}  # type: Dict[int, str]
         if color and _stderr_supports_color():
             if curses is not None:
-                fg_color = curses.tigetstr('setaf') or curses.tigetstr('setf'
-                    ) or b''
+                fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b""
+
                 for levelno, code in colors.items():
-                    self._colors[levelno] = unicode_type(curses.tparm(
-                        fg_color, code), 'ascii')
-                normal = curses.tigetstr('sgr0')
+                    # Convert the terminal control characters from
+                    # bytes to unicode strings for easier use with the
+                    # logging module.
+                    self._colors[levelno] = unicode_type(
+                        curses.tparm(fg_color, code), "ascii"
+                    )
+                normal = curses.tigetstr("sgr0")
                 if normal is not None:
-                    self._normal = unicode_type(normal, 'ascii')
+                    self._normal = unicode_type(normal, "ascii")
                 else:
-                    self._normal = ''
+                    self._normal = ""
             else:
+                # If curses is not present (currently we'll only get here for
+                # colorama on windows), assume hard-coded ANSI color codes.
                 for levelno, code in colors.items():
-                    self._colors[levelno] = '\x1b[2;3%dm' % code
-                self._normal = '\x1b[0m'
+                    self._colors[levelno] = "\033[2;3%dm" % code
+                self._normal = "\033[0m"
         else:
-            self._normal = ''
+            self._normal = ""

+    def format(self, record: Any) -> str:
+        try:
+            message = record.getMessage()
+            assert isinstance(message, basestring_type)  # guaranteed by logging
+            # Encoding notes:  The logging module prefers to work with character
+            # strings, but only enforces that log messages are instances of
+            # basestring.  In python 2, non-ascii bytestrings will make
+            # their way through the logging framework until they blow up with
+            # an unhelpful decoding error (with this formatter it happens
+            # when we attach the prefix, but there are other opportunities for
+            # exceptions further along in the framework).
+            #
+            # If a byte string makes it this far, convert it to unicode to
+            # ensure it will make it out to the logs.  Use repr() as a fallback
+            # to ensure that all byte strings can be converted successfully,
+            # but don't do it by default so we don't add extra quotes to ascii
+            # bytestrings.  This is a bit of a hacky place to do this, but
+            # it's worth it since the encoding errors that would otherwise
+            # result are so useless (and tornado is fond of using utf8-encoded
+            # byte strings wherever possible).
+            record.message = _safe_unicode(message)
+        except Exception as e:
+            record.message = "Bad message (%r): %r" % (e, record.__dict__)

-def enable_pretty_logging(options: Any=None, logger: Optional[logging.
-    Logger]=None) ->None:
+        record.asctime = self.formatTime(record, cast(str, self.datefmt))
+
+        if record.levelno in self._colors:
+            record.color = self._colors[record.levelno]
+            record.end_color = self._normal
+        else:
+            record.color = record.end_color = ""
+
+        formatted = self._fmt % record.__dict__
+
+        if record.exc_info:
+            if not record.exc_text:
+                record.exc_text = self.formatException(record.exc_info)
+        if record.exc_text:
+            # exc_text contains multiple lines.  We need to _safe_unicode
+            # each line separately so that non-utf8 bytes don't cause
+            # all the newlines to turn into '\n'.
+            lines = [formatted.rstrip()]
+            lines.extend(_safe_unicode(ln) for ln in record.exc_text.split("\n"))
+            formatted = "\n".join(lines)
+        return formatted.replace("\n", "\n    ")
+
+
+def enable_pretty_logging(
+    options: Any = None, logger: Optional[logging.Logger] = None
+) -> None:
     """Turns on formatted logging output as configured.

     This is called automatically by `tornado.options.parse_command_line`
     and `tornado.options.parse_config_file`.
     """
-    pass
+    if options is None:
+        import tornado.options

+        options = tornado.options.options
+    if options.logging is None or options.logging.lower() == "none":
+        return
+    if logger is None:
+        logger = logging.getLogger()
+    logger.setLevel(getattr(logging, options.logging.upper()))
+    if options.log_file_prefix:
+        rotate_mode = options.log_rotate_mode
+        if rotate_mode == "size":
+            channel = logging.handlers.RotatingFileHandler(
+                filename=options.log_file_prefix,
+                maxBytes=options.log_file_max_size,
+                backupCount=options.log_file_num_backups,
+                encoding="utf-8",
+            )  # type: logging.Handler
+        elif rotate_mode == "time":
+            channel = logging.handlers.TimedRotatingFileHandler(
+                filename=options.log_file_prefix,
+                when=options.log_rotate_when,
+                interval=options.log_rotate_interval,
+                backupCount=options.log_file_num_backups,
+                encoding="utf-8",
+            )
+        else:
+            error_message = (
+                "The value of log_rotate_mode option should be "
+                + '"size" or "time", not "%s".' % rotate_mode
+            )
+            raise ValueError(error_message)
+        channel.setFormatter(LogFormatter(color=False))
+        logger.addHandler(channel)

-def define_logging_options(options: Any=None) ->None:
+    if options.log_to_stderr or (options.log_to_stderr is None and not logger.handlers):
+        # Set up color if we are in a tty and curses is installed
+        channel = logging.StreamHandler()
+        channel.setFormatter(LogFormatter())
+        logger.addHandler(channel)
+
+
+def define_logging_options(options: Any = None) -> None:
     """Add logging-related flags to ``options``.

     These options are present automatically on the default options instance;
@@ -123,4 +271,73 @@ def define_logging_options(options: Any=None) ->None:
     .. versionadded:: 4.2
         This function existed in prior versions but was broken and undocumented until 4.2.
     """
-    pass
+    if options is None:
+        # late import to prevent cycle
+        import tornado.options
+
+        options = tornado.options.options
+    options.define(
+        "logging",
+        default="info",
+        help=(
+            "Set the Python log level. If 'none', tornado won't touch the "
+            "logging configuration."
+        ),
+        metavar="debug|info|warning|error|none",
+    )
+    options.define(
+        "log_to_stderr",
+        type=bool,
+        default=None,
+        help=(
+            "Send log output to stderr (colorized if possible). "
+            "By default use stderr if --log_file_prefix is not set and "
+            "no other logging is configured."
+        ),
+    )
+    options.define(
+        "log_file_prefix",
+        type=str,
+        default=None,
+        metavar="PATH",
+        help=(
+            "Path prefix for log files. "
+            "Note that if you are running multiple tornado processes, "
+            "log_file_prefix must be different for each of them (e.g. "
+            "include the port number)"
+        ),
+    )
+    options.define(
+        "log_file_max_size",
+        type=int,
+        default=100 * 1000 * 1000,
+        help="max size of log files before rollover",
+    )
+    options.define(
+        "log_file_num_backups", type=int, default=10, help="number of log files to keep"
+    )
+
+    options.define(
+        "log_rotate_when",
+        type=str,
+        default="midnight",
+        help=(
+            "specify the type of TimedRotatingFileHandler interval "
+            "other options:('S', 'M', 'H', 'D', 'W0'-'W6')"
+        ),
+    )
+    options.define(
+        "log_rotate_interval",
+        type=int,
+        default=1,
+        help="The interval value of timed rotating",
+    )
+
+    options.define(
+        "log_rotate_mode",
+        type=str,
+        default="size",
+        help="The mode of rotating files(time or size)",
+    )
+
+    options.add_parse_callback(lambda: enable_pretty_logging(options))
diff --git a/tornado/netutil.py b/tornado/netutil.py
index 5afa2659..18c91e67 100644
--- a/tornado/netutil.py
+++ b/tornado/netutil.py
@@ -1,4 +1,20 @@
+#
+# Copyright 2011 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Miscellaneous network utility code."""
+
 import asyncio
 import concurrent.futures
 import errno
@@ -7,23 +23,44 @@ import sys
 import socket
 import ssl
 import stat
+
 from tornado.concurrent import dummy_executor, run_on_executor
 from tornado.ioloop import IOLoop
 from tornado.util import Configurable, errno_from_exception
+
 from typing import List, Callable, Any, Type, Dict, Union, Tuple, Awaitable, Optional
+
+# Note that the naming of ssl.Purpose is confusing; the purpose
+# of a context is to authenticate the opposite side of the connection.
 _client_ssl_defaults = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
 _server_ssl_defaults = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
-if hasattr(ssl, 'OP_NO_COMPRESSION'):
+if hasattr(ssl, "OP_NO_COMPRESSION"):
+    # See netutil.ssl_options_to_context
     _client_ssl_defaults.options |= ssl.OP_NO_COMPRESSION
     _server_ssl_defaults.options |= ssl.OP_NO_COMPRESSION
-"""foo""".encode('idna')
-"""foo""".encode('latin1')
+
+# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode,
+# getaddrinfo attempts to import encodings.idna. If this is done at
+# module-import time, the import lock is already held by the main thread,
+# leading to deadlock. Avoid it by caching the idna encoder on the main
+# thread now.
+"foo".encode("idna")
+
+# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
+"foo".encode("latin1")
+
+# Default backlog used when calling sock.listen()
 _DEFAULT_BACKLOG = 128


-def bind_sockets(port: int, address: Optional[str]=None, family: socket.
-    AddressFamily=socket.AF_UNSPEC, backlog: int=_DEFAULT_BACKLOG, flags:
-    Optional[int]=None, reuse_port: bool=False) ->List[socket.socket]:
+def bind_sockets(
+    port: int,
+    address: Optional[str] = None,
+    family: socket.AddressFamily = socket.AF_UNSPEC,
+    backlog: int = _DEFAULT_BACKLOG,
+    flags: Optional[int] = None,
+    reuse_port: bool = False,
+) -> List[socket.socket]:
     """Creates listening sockets bound to the given port and address.

     Returns a list of socket objects (multiple sockets are returned if
@@ -47,13 +84,114 @@ def bind_sockets(port: int, address: Optional[str]=None, family: socket.
     in the list. If your platform doesn't support this option ValueError will
     be raised.
     """
-    pass
-
-
-if hasattr(socket, 'AF_UNIX'):
-
-    def bind_unix_socket(file: str, mode: int=384, backlog: int=
-        _DEFAULT_BACKLOG) ->socket.socket:
+    if reuse_port and not hasattr(socket, "SO_REUSEPORT"):
+        raise ValueError("the platform doesn't support SO_REUSEPORT")
+
+    sockets = []
+    if address == "":
+        address = None
+    if not socket.has_ipv6 and family == socket.AF_UNSPEC:
+        # Python can be compiled with --disable-ipv6, which causes
+        # operations on AF_INET6 sockets to fail, but does not
+        # automatically exclude those results from getaddrinfo
+        # results.
+        # http://bugs.python.org/issue16208
+        family = socket.AF_INET
+    if flags is None:
+        flags = socket.AI_PASSIVE
+    bound_port = None
+    unique_addresses = set()  # type: set
+    for res in sorted(
+        socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, 0, flags),
+        key=lambda x: x[0],
+    ):
+        if res in unique_addresses:
+            continue
+
+        unique_addresses.add(res)
+
+        af, socktype, proto, canonname, sockaddr = res
+        if (
+            sys.platform == "darwin"
+            and address == "localhost"
+            and af == socket.AF_INET6
+            and sockaddr[3] != 0  # type: ignore
+        ):
+            # Mac OS X includes a link-local address fe80::1%lo0 in the
+            # getaddrinfo results for 'localhost'.  However, the firewall
+            # doesn't understand that this is a local address and will
+            # prompt for access (often repeatedly, due to an apparent
+            # bug in its ability to remember granting access to an
+            # application). Skip these addresses.
+            continue
+        try:
+            sock = socket.socket(af, socktype, proto)
+        except socket.error as e:
+            if errno_from_exception(e) == errno.EAFNOSUPPORT:
+                continue
+            raise
+        if os.name != "nt":
+            try:
+                sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            except socket.error as e:
+                if errno_from_exception(e) != errno.ENOPROTOOPT:
+                    # Hurd doesn't support SO_REUSEADDR.
+                    raise
+        if reuse_port:
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+        if af == socket.AF_INET6:
+            # On linux, ipv6 sockets accept ipv4 too by default,
+            # but this makes it impossible to bind to both
+            # 0.0.0.0 in ipv4 and :: in ipv6.  On other systems,
+            # separate sockets *must* be used to listen for both ipv4
+            # and ipv6.  For consistency, always disable ipv4 on our
+            # ipv6 sockets and use a separate ipv4 socket when needed.
+            #
+            # Python 2.x on windows doesn't have IPPROTO_IPV6.
+            if hasattr(socket, "IPPROTO_IPV6"):
+                sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
+
+        # automatic port allocation with port=None
+        # should bind on the same port on IPv4 and IPv6
+        host, requested_port = sockaddr[:2]
+        if requested_port == 0 and bound_port is not None:
+            sockaddr = tuple([host, bound_port] + list(sockaddr[2:]))
+
+        sock.setblocking(False)
+        try:
+            sock.bind(sockaddr)
+        except OSError as e:
+            if (
+                errno_from_exception(e) == errno.EADDRNOTAVAIL
+                and address == "localhost"
+                and sockaddr[0] == "::1"
+            ):
+                # On some systems (most notably docker with default
+                # configurations), ipv6 is partially disabled:
+                # socket.has_ipv6 is true, we can create AF_INET6
+                # sockets, and getaddrinfo("localhost", ...,
+                # AF_PASSIVE) resolves to ::1, but we get an error
+                # when binding.
+                #
+                # Swallow the error, but only for this specific case.
+                # If EADDRNOTAVAIL occurs in other situations, it
+                # might be a real problem like a typo in a
+                # configuration.
+                sock.close()
+                continue
+            else:
+                raise
+        bound_port = sock.getsockname()[1]
+        sock.listen(backlog)
+        sockets.append(sock)
+    return sockets
+
+
+if hasattr(socket, "AF_UNIX"):
+
+    def bind_unix_socket(
+        file: str, mode: int = 0o600, backlog: int = _DEFAULT_BACKLOG
+    ) -> socket.socket:
         """Creates a listening unix socket.

         If a socket with the given name already exists, it will be deleted.
@@ -63,11 +201,32 @@ if hasattr(socket, 'AF_UNIX'):
         Returns a socket object (not a list of socket objects like
         `bind_sockets`)
         """
-        pass
-
-
-def add_accept_handler(sock: socket.socket, callback: Callable[[socket.
-    socket, Any], None]) ->Callable[[], None]:
+        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        try:
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        except socket.error as e:
+            if errno_from_exception(e) != errno.ENOPROTOOPT:
+                # Hurd doesn't support SO_REUSEADDR
+                raise
+        sock.setblocking(False)
+        try:
+            st = os.stat(file)
+        except FileNotFoundError:
+            pass
+        else:
+            if stat.S_ISSOCK(st.st_mode):
+                os.remove(file)
+            else:
+                raise ValueError("File %s exists and is not a socket", file)
+        sock.bind(file)
+        os.chmod(file, mode)
+        sock.listen(backlog)
+        return sock
+
+
+def add_accept_handler(
+    sock: socket.socket, callback: Callable[[socket.socket, Any], None]
+) -> Callable[[], None]:
     """Adds an `.IOLoop` event handler to accept new connections on ``sock``.

     When a connection is accepted, ``callback(connection, address)`` will
@@ -85,15 +244,71 @@ def add_accept_handler(sock: socket.socket, callback: Callable[[socket.
     .. versionchanged:: 5.0
        A callable is returned (``None`` was returned before).
     """
-    pass
-
-
-def is_valid_ip(ip: str) ->bool:
+    io_loop = IOLoop.current()
+    removed = [False]
+
+    def accept_handler(fd: socket.socket, events: int) -> None:
+        # More connections may come in while we're handling callbacks;
+        # to prevent starvation of other tasks we must limit the number
+        # of connections we accept at a time.  Ideally we would accept
+        # up to the number of connections that were waiting when we
+        # entered this method, but this information is not available
+        # (and rearranging this method to call accept() as many times
+        # as possible before running any callbacks would have adverse
+        # effects on load balancing in multiprocess configurations).
+        # Instead, we use the (default) listen backlog as a rough
+        # heuristic for the number of connections we can reasonably
+        # accept at once.
+        for i in range(_DEFAULT_BACKLOG):
+            if removed[0]:
+                # The socket was probably closed
+                return
+            try:
+                connection, address = sock.accept()
+            except BlockingIOError:
+                # EWOULDBLOCK indicates we have accepted every
+                # connection that is available.
+                return
+            except ConnectionAbortedError:
+                # ECONNABORTED indicates that there was a connection
+                # but it was closed while still in the accept queue.
+                # (observed on FreeBSD).
+                continue
+            callback(connection, address)
+
+    def remove_handler() -> None:
+        io_loop.remove_handler(sock)
+        removed[0] = True
+
+    io_loop.add_handler(sock, accept_handler, IOLoop.READ)
+    return remove_handler
+
+
+def is_valid_ip(ip: str) -> bool:
     """Returns ``True`` if the given string is a well-formed IP address.

     Supports IPv4 and IPv6.
     """
-    pass
+    if not ip or "\x00" in ip:
+        # getaddrinfo resolves empty strings to localhost, and truncates
+        # on zero bytes.
+        return False
+    try:
+        res = socket.getaddrinfo(
+            ip, 0, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_NUMERICHOST
+        )
+        return bool(res)
+    except socket.gaierror as e:
+        if e.args[0] == socket.EAI_NONAME:
+            return False
+        raise
+    except UnicodeError:
+        # `socket.getaddrinfo` will raise a UnicodeError from the
+        # `idna` decoder if the input is longer than 63 characters,
+        # even for socket.AI_NUMERICHOST.  See
+        # https://bugs.python.org/issue32958 for discussion
+        return False
+    return True


 class Resolver(Configurable):
@@ -125,8 +340,17 @@ class Resolver(Configurable):
        `DefaultLoopResolver`.
     """

-    def resolve(self, host: str, port: int, family: socket.AddressFamily=
-        socket.AF_UNSPEC) ->Awaitable[List[Tuple[int, Any]]]:
+    @classmethod
+    def configurable_base(cls) -> Type["Resolver"]:
+        return Resolver
+
+    @classmethod
+    def configurable_default(cls) -> Type["Resolver"]:
+        return DefaultLoopResolver
+
+    def resolve(
+        self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+    ) -> Awaitable[List[Tuple[int, Any]]]:
         """Resolves an address.

         The ``host`` argument is a string which may be a hostname or a
@@ -148,9 +372,9 @@ class Resolver(Configurable):
            Use the returned awaitable object instead.

         """
-        pass
+        raise NotImplementedError()

-    def close(self) ->None:
+    def close(self) -> None:
         """Closes the `Resolver`, freeing any resources used.

         .. versionadded:: 3.1
@@ -159,6 +383,21 @@ class Resolver(Configurable):
         pass


+def _resolve_addr(
+    host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+) -> List[Tuple[int, Any]]:
+    # On Solaris, getaddrinfo fails if the given port is not found
+    # in /etc/services and no socket type is given, so we must pass
+    # one here.  The socket type used here doesn't seem to actually
+    # matter (we discard the one we get back in the results),
+    # so the addresses we return should still be usable with SOCK_DGRAM.
+    addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM)
+    results = []
+    for fam, socktype, proto, canonname, address in addrinfo:
+        results.append((fam, address))
+    return results  # type: ignore
+
+
 class DefaultExecutorResolver(Resolver):
     """Resolver implementation using `.IOLoop.run_in_executor`.

@@ -169,10 +408,33 @@ class DefaultExecutorResolver(Resolver):
        Use `DefaultLoopResolver` instead.
     """

+    async def resolve(
+        self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+    ) -> List[Tuple[int, Any]]:
+        result = await IOLoop.current().run_in_executor(
+            None, _resolve_addr, host, port, family
+        )
+        return result
+

 class DefaultLoopResolver(Resolver):
     """Resolver implementation using `asyncio.loop.getaddrinfo`."""

+    async def resolve(
+        self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+    ) -> List[Tuple[int, Any]]:
+        # On Solaris, getaddrinfo fails if the given port is not found
+        # in /etc/services and no socket type is given, so we must pass
+        # one here.  The socket type used here doesn't seem to actually
+        # matter (we discard the one we get back in the results),
+        # so the addresses we return should still be usable with SOCK_DGRAM.
+        return [
+            (fam, address)
+            for fam, _, _, _, address in await asyncio.get_running_loop().getaddrinfo(
+                host, port, family=family, type=socket.SOCK_STREAM
+            )
+        ]
+

 class ExecutorResolver(Resolver):
     """Resolver implementation using a `concurrent.futures.Executor`.
@@ -192,6 +454,29 @@ class ExecutorResolver(Resolver):
        use that instead of this class.
     """

+    def initialize(
+        self,
+        executor: Optional[concurrent.futures.Executor] = None,
+        close_executor: bool = True,
+    ) -> None:
+        if executor is not None:
+            self.executor = executor
+            self.close_executor = close_executor
+        else:
+            self.executor = dummy_executor
+            self.close_executor = False
+
+    def close(self) -> None:
+        if self.close_executor:
+            self.executor.shutdown()
+        self.executor = None  # type: ignore
+
+    @run_on_executor
+    def resolve(
+        self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+    ) -> List[Tuple[int, Any]]:
+        return _resolve_addr(host, port, family)
+

 class BlockingResolver(ExecutorResolver):
     """Default `Resolver` implementation, using `socket.getaddrinfo`.
@@ -204,6 +489,9 @@ class BlockingResolver(ExecutorResolver):
        of this class.
     """

+    def initialize(self) -> None:  # type: ignore
+        super().initialize()
+

 class ThreadedResolver(ExecutorResolver):
     """Multithreaded non-blocking `Resolver` implementation.
@@ -225,8 +513,27 @@ class ThreadedResolver(ExecutorResolver):
        The default `Resolver` now uses `.IOLoop.run_in_executor`; use that instead
        of this class.
     """
-    _threadpool = None
-    _threadpool_pid = None
+
+    _threadpool = None  # type: ignore
+    _threadpool_pid = None  # type: int
+
+    def initialize(self, num_threads: int = 10) -> None:  # type: ignore
+        threadpool = ThreadedResolver._create_threadpool(num_threads)
+        super().initialize(executor=threadpool, close_executor=False)
+
+    @classmethod
+    def _create_threadpool(
+        cls, num_threads: int
+    ) -> concurrent.futures.ThreadPoolExecutor:
+        pid = os.getpid()
+        if cls._threadpool_pid != pid:
+            # Threads cannot survive after a fork, so if our pid isn't what it
+            # was when we created the pool then delete it.
+            cls._threadpool = None
+        if cls._threadpool is None:
+            cls._threadpool = concurrent.futures.ThreadPoolExecutor(num_threads)
+            cls._threadpool_pid = pid
+        return cls._threadpool


 class OverrideResolver(Resolver):
@@ -252,13 +559,37 @@ class OverrideResolver(Resolver):
        Added support for host-port-family triplets.
     """

-
-_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
-    'cert_reqs', 'ca_certs', 'ciphers'])
-
-
-def ssl_options_to_context(ssl_options: Union[Dict[str, Any], ssl.
-    SSLContext], server_side: Optional[bool]=None) ->ssl.SSLContext:
+    def initialize(self, resolver: Resolver, mapping: dict) -> None:
+        self.resolver = resolver
+        self.mapping = mapping
+
+    def close(self) -> None:
+        self.resolver.close()
+
+    def resolve(
+        self, host: str, port: int, family: socket.AddressFamily = socket.AF_UNSPEC
+    ) -> Awaitable[List[Tuple[int, Any]]]:
+        if (host, port, family) in self.mapping:
+            host, port = self.mapping[(host, port, family)]
+        elif (host, port) in self.mapping:
+            host, port = self.mapping[(host, port)]
+        elif host in self.mapping:
+            host = self.mapping[host]
+        return self.resolver.resolve(host, port, family)
+
+
+# These are the keyword arguments to ssl.wrap_socket that must be translated
+# to their SSLContext equivalents (the other arguments are still passed
+# to SSLContext.wrap_socket).
+_SSL_CONTEXT_KEYWORDS = frozenset(
+    ["ssl_version", "certfile", "keyfile", "cert_reqs", "ca_certs", "ciphers"]
+)
+
+
+def ssl_options_to_context(
+    ssl_options: Union[Dict[str, Any], ssl.SSLContext],
+    server_side: Optional[bool] = None,
+) -> ssl.SSLContext:
     """Try to convert an ``ssl_options`` dictionary to an
     `~ssl.SSLContext` object.

@@ -275,12 +606,48 @@ def ssl_options_to_context(ssl_options: Union[Dict[str, Any], ssl.
        result in a DeprecationWarning on Python 3.10.

     """
-    pass
-
-
-def ssl_wrap_socket(socket: socket.socket, ssl_options: Union[Dict[str, Any
-    ], ssl.SSLContext], server_hostname: Optional[str]=None, server_side:
-    Optional[bool]=None, **kwargs: Any) ->ssl.SSLSocket:
+    if isinstance(ssl_options, ssl.SSLContext):
+        return ssl_options
+    assert isinstance(ssl_options, dict)
+    assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
+    # TODO: Now that we have the server_side argument, can we switch to
+    # create_default_context or would that change behavior?
+    default_version = ssl.PROTOCOL_TLS
+    if server_side:
+        default_version = ssl.PROTOCOL_TLS_SERVER
+    elif server_side is not None:
+        default_version = ssl.PROTOCOL_TLS_CLIENT
+    context = ssl.SSLContext(ssl_options.get("ssl_version", default_version))
+    if "certfile" in ssl_options:
+        context.load_cert_chain(
+            ssl_options["certfile"], ssl_options.get("keyfile", None)
+        )
+    if "cert_reqs" in ssl_options:
+        if ssl_options["cert_reqs"] == ssl.CERT_NONE:
+            # This may have been set automatically by PROTOCOL_TLS_CLIENT but is
+            # incompatible with CERT_NONE so we must manually clear it.
+            context.check_hostname = False
+        context.verify_mode = ssl_options["cert_reqs"]
+    if "ca_certs" in ssl_options:
+        context.load_verify_locations(ssl_options["ca_certs"])
+    if "ciphers" in ssl_options:
+        context.set_ciphers(ssl_options["ciphers"])
+    if hasattr(ssl, "OP_NO_COMPRESSION"):
+        # Disable TLS compression to avoid CRIME and related attacks.
+        # This constant depends on openssl version 1.0.
+        # TODO: Do we need to do this ourselves or can we trust
+        # the defaults?
+        context.options |= ssl.OP_NO_COMPRESSION
+    return context
+
+
+def ssl_wrap_socket(
+    socket: socket.socket,
+    ssl_options: Union[Dict[str, Any], ssl.SSLContext],
+    server_hostname: Optional[str] = None,
+    server_side: Optional[bool] = None,
+    **kwargs: Any
+) -> ssl.SSLSocket:
     """Returns an ``ssl.SSLSocket`` wrapping the given socket.

     ``ssl_options`` may be either an `ssl.SSLContext` object or a
@@ -292,4 +659,13 @@ def ssl_wrap_socket(socket: socket.socket, ssl_options: Union[Dict[str, Any
        Added server_side argument. Omitting this argument will
        result in a DeprecationWarning on Python 3.10.
     """
-    pass
+    context = ssl_options_to_context(ssl_options, server_side=server_side)
+    if server_side is None:
+        server_side = False
+    assert ssl.HAS_SNI
+    # TODO: add a unittest for hostname validation (python added server-side SNI support in 3.4)
+    # In the meantime it can be manually tested with
+    # python3 -m tornado.httpclient https://sni.velox.ch
+    return context.wrap_socket(
+        socket, server_hostname=server_hostname, server_side=server_side, **kwargs
+    )
diff --git a/tornado/options.py b/tornado/options.py
index 9356e4bd..b8296691 100644
--- a/tornado/options.py
+++ b/tornado/options.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """A command line parsing module that lets modules define their own options.

 This module is inspired by Google's `gflags
@@ -83,20 +98,35 @@ instances to define isolated sets of options, such as for subcommands.
    Dashes are typical for command-line usage while config files require
    underscores.
 """
+
 import datetime
 import numbers
 import re
 import sys
 import os
 import textwrap
+
 from tornado.escape import _unicode, native_str
 from tornado.log import define_logging_options
 from tornado.util import basestring_type, exec_in
-from typing import Any, Iterator, Iterable, Tuple, Set, Dict, Callable, List, TextIO, Optional
+
+from typing import (
+    Any,
+    Iterator,
+    Iterable,
+    Tuple,
+    Set,
+    Dict,
+    Callable,
+    List,
+    TextIO,
+    Optional,
+)


 class Error(Exception):
     """Exception raised by errors in the options module."""
+
     pass


@@ -107,52 +137,60 @@ class OptionParser(object):
     which reference a global instance.
     """

-    def __init__(self) ->None:
-        self.__dict__['_options'] = {}
-        self.__dict__['_parse_callbacks'] = []
-        self.define('help', type=bool, help='show this help information',
-            callback=self._help_callback)
-
-    def __getattr__(self, name: str) ->Any:
+    def __init__(self) -> None:
+        # we have to use self.__dict__ because we override setattr.
+        self.__dict__["_options"] = {}
+        self.__dict__["_parse_callbacks"] = []
+        self.define(
+            "help",
+            type=bool,
+            help="show this help information",
+            callback=self._help_callback,
+        )
+
+    def _normalize_name(self, name: str) -> str:
+        return name.replace("_", "-")
+
+    def __getattr__(self, name: str) -> Any:
         name = self._normalize_name(name)
         if isinstance(self._options.get(name), _Option):
             return self._options[name].value()
-        raise AttributeError('Unrecognized option %r' % name)
+        raise AttributeError("Unrecognized option %r" % name)

-    def __setattr__(self, name: str, value: Any) ->None:
+    def __setattr__(self, name: str, value: Any) -> None:
         name = self._normalize_name(name)
         if isinstance(self._options.get(name), _Option):
             return self._options[name].set(value)
-        raise AttributeError('Unrecognized option %r' % name)
+        raise AttributeError("Unrecognized option %r" % name)

-    def __iter__(self) ->Iterator:
+    def __iter__(self) -> Iterator:
         return (opt.name for opt in self._options.values())

-    def __contains__(self, name: str) ->bool:
+    def __contains__(self, name: str) -> bool:
         name = self._normalize_name(name)
         return name in self._options

-    def __getitem__(self, name: str) ->Any:
+    def __getitem__(self, name: str) -> Any:
         return self.__getattr__(name)

-    def __setitem__(self, name: str, value: Any) ->None:
+    def __setitem__(self, name: str, value: Any) -> None:
         return self.__setattr__(name, value)

-    def items(self) ->Iterable[Tuple[str, Any]]:
+    def items(self) -> Iterable[Tuple[str, Any]]:
         """An iterable of (name, value) pairs.

         .. versionadded:: 3.1
         """
-        pass
+        return [(opt.name, opt.value()) for name, opt in self._options.items()]

-    def groups(self) ->Set[str]:
+    def groups(self) -> Set[str]:
         """The set of option-groups created by ``define``.

         .. versionadded:: 3.1
         """
-        pass
+        return set(opt.group_name for opt in self._options.values())

-    def group_dict(self, group: str) ->Dict[str, Any]:
+    def group_dict(self, group: str) -> Dict[str, Any]:
         """The names and values of options in a group.

         Useful for copying options into Application settings::
@@ -169,19 +207,30 @@ class OptionParser(object):

         .. versionadded:: 3.1
         """
-        pass
+        return dict(
+            (opt.name, opt.value())
+            for name, opt in self._options.items()
+            if not group or group == opt.group_name
+        )

-    def as_dict(self) ->Dict[str, Any]:
+    def as_dict(self) -> Dict[str, Any]:
         """The names and values of all options.

         .. versionadded:: 3.1
         """
-        pass
-
-    def define(self, name: str, default: Any=None, type: Optional[type]=
-        None, help: Optional[str]=None, metavar: Optional[str]=None,
-        multiple: bool=False, group: Optional[str]=None, callback: Optional
-        [Callable[[Any], None]]=None) ->None:
+        return dict((opt.name, opt.value()) for name, opt in self._options.items())
+
+    def define(
+        self,
+        name: str,
+        default: Any = None,
+        type: Optional[type] = None,
+        help: Optional[str] = None,
+        metavar: Optional[str] = None,
+        multiple: bool = False,
+        group: Optional[str] = None,
+        callback: Optional[Callable[[Any], None]] = None,
+    ) -> None:
         """Defines a new command line option.

         ``type`` can be any of `str`, `int`, `float`, `bool`,
@@ -216,10 +265,56 @@ class OptionParser(object):
         by later flags.

         """
-        pass
-
-    def parse_command_line(self, args: Optional[List[str]]=None, final:
-        bool=True) ->List[str]:
+        normalized = self._normalize_name(name)
+        if normalized in self._options:
+            raise Error(
+                "Option %r already defined in %s"
+                % (normalized, self._options[normalized].file_name)
+            )
+        frame = sys._getframe(0)
+        if frame is not None:
+            options_file = frame.f_code.co_filename
+
+            # Can be called directly, or through top level define() fn, in which
+            # case, step up above that frame to look for real caller.
+            if (
+                frame.f_back is not None
+                and frame.f_back.f_code.co_filename == options_file
+                and frame.f_back.f_code.co_name == "define"
+            ):
+                frame = frame.f_back
+
+            assert frame.f_back is not None
+            file_name = frame.f_back.f_code.co_filename
+        else:
+            file_name = "<unknown>"
+        if file_name == options_file:
+            file_name = ""
+        if type is None:
+            if not multiple and default is not None:
+                type = default.__class__
+            else:
+                type = str
+        if group:
+            group_name = group  # type: Optional[str]
+        else:
+            group_name = file_name
+        option = _Option(
+            name,
+            file_name=file_name,
+            default=default,
+            type=type,
+            help=help,
+            metavar=metavar,
+            multiple=multiple,
+            group_name=group_name,
+            callback=callback,
+        )
+        self._options[normalized] = option
+
+    def parse_command_line(
+        self, args: Optional[List[str]] = None, final: bool = True
+    ) -> List[str]:
         """Parses all options given on the command line (defaults to
         `sys.argv`).

@@ -241,9 +336,37 @@ class OptionParser(object):
         from multiple sources.

         """
-        pass
-
-    def parse_config_file(self, path: str, final: bool=True) ->None:
+        if args is None:
+            args = sys.argv
+        remaining = []  # type: List[str]
+        for i in range(1, len(args)):
+            # All things after the last option are command line arguments
+            if not args[i].startswith("-"):
+                remaining = args[i:]
+                break
+            if args[i] == "--":
+                remaining = args[i + 1 :]
+                break
+            arg = args[i].lstrip("-")
+            name, equals, value = arg.partition("=")
+            name = self._normalize_name(name)
+            if name not in self._options:
+                self.print_help()
+                raise Error("Unrecognized command line option: %r" % name)
+            option = self._options[name]
+            if not equals:
+                if option.type == bool:
+                    value = "true"
+                else:
+                    raise Error("Option %r requires a value" % name)
+            option.parse(value)
+
+        if final:
+            self.run_parse_callbacks()
+
+        return remaining
+
+    def parse_config_file(self, path: str, final: bool = True) -> None:
         """Parses and loads the config file at the given path.

         The config file contains Python code that will be executed (so
@@ -289,17 +412,75 @@ class OptionParser(object):
            Added the ability to set options via strings in config files.

         """
-        pass
-
-    def print_help(self, file: Optional[TextIO]=None) ->None:
+        config = {"__file__": os.path.abspath(path)}
+        with open(path, "rb") as f:
+            exec_in(native_str(f.read()), config, config)
+        for name in config:
+            normalized = self._normalize_name(name)
+            if normalized in self._options:
+                option = self._options[normalized]
+                if option.multiple:
+                    if not isinstance(config[name], (list, str)):
+                        raise Error(
+                            "Option %r is required to be a list of %s "
+                            "or a comma-separated string"
+                            % (option.name, option.type.__name__)
+                        )
+
+                if type(config[name]) == str and (
+                    option.type != str or option.multiple
+                ):
+                    option.parse(config[name])
+                else:
+                    option.set(config[name])
+
+        if final:
+            self.run_parse_callbacks()
+
+    def print_help(self, file: Optional[TextIO] = None) -> None:
         """Prints all the command line options to stderr (or another file)."""
-        pass
-
-    def add_parse_callback(self, callback: Callable[[], None]) ->None:
+        if file is None:
+            file = sys.stderr
+        print("Usage: %s [OPTIONS]" % sys.argv[0], file=file)
+        print("\nOptions:\n", file=file)
+        by_group = {}  # type: Dict[str, List[_Option]]
+        for option in self._options.values():
+            by_group.setdefault(option.group_name, []).append(option)
+
+        for filename, o in sorted(by_group.items()):
+            if filename:
+                print("\n%s options:\n" % os.path.normpath(filename), file=file)
+            o.sort(key=lambda option: option.name)
+            for option in o:
+                # Always print names with dashes in a CLI context.
+                prefix = self._normalize_name(option.name)
+                if option.metavar:
+                    prefix += "=" + option.metavar
+                description = option.help or ""
+                if option.default is not None and option.default != "":
+                    description += " (default %s)" % option.default
+                lines = textwrap.wrap(description, 79 - 35)
+                if len(prefix) > 30 or len(lines) == 0:
+                    lines.insert(0, "")
+                print("  --%-30s %s" % (prefix, lines[0]), file=file)
+                for line in lines[1:]:
+                    print("%-34s %s" % (" ", line), file=file)
+        print(file=file)
+
+    def _help_callback(self, value: bool) -> None:
+        if value:
+            self.print_help()
+            sys.exit(0)
+
+    def add_parse_callback(self, callback: Callable[[], None]) -> None:
         """Adds a parse callback, to be invoked when option parsing is done."""
-        pass
+        self._parse_callbacks.append(callback)
+
+    def run_parse_callbacks(self) -> None:
+        for callback in self._parse_callbacks:
+            callback()

-    def mockable(self) ->'_Mockable':
+    def mockable(self) -> "_Mockable":
         """Returns a wrapper around self that is compatible with
         `mock.patch <unittest.mock.patch>`.

@@ -314,7 +495,7 @@ class OptionParser(object):
             with mock.patch.object(options.mockable(), 'name', value):
                 assert options.name == value
         """
-        pass
+        return _Mockable(self)


 class _Mockable(object):
@@ -330,35 +511,46 @@ class _Mockable(object):
     OptionParser, and delattr undoes the effect of a previous setattr.
     """

-    def __init__(self, options: OptionParser) ->None:
-        self.__dict__['_options'] = options
-        self.__dict__['_originals'] = {}
+    def __init__(self, options: OptionParser) -> None:
+        # Modify __dict__ directly to bypass __setattr__
+        self.__dict__["_options"] = options
+        self.__dict__["_originals"] = {}

-    def __getattr__(self, name: str) ->Any:
+    def __getattr__(self, name: str) -> Any:
         return getattr(self._options, name)

-    def __setattr__(self, name: str, value: Any) ->None:
+    def __setattr__(self, name: str, value: Any) -> None:
         assert name not in self._originals, "don't reuse mockable objects"
         self._originals[name] = getattr(self._options, name)
         setattr(self._options, name, value)

-    def __delattr__(self, name: str) ->None:
+    def __delattr__(self, name: str) -> None:
         setattr(self._options, name, self._originals.pop(name))


 class _Option(object):
+    # This class could almost be made generic, but the way the types
+    # interact with the multiple argument makes this tricky. (default
+    # and the callback use List[T], but type is still Type[T]).
     UNSET = object()

-    def __init__(self, name: str, default: Any=None, type: Optional[type]=
-        None, help: Optional[str]=None, metavar: Optional[str]=None,
-        multiple: bool=False, file_name: Optional[str]=None, group_name:
-        Optional[str]=None, callback: Optional[Callable[[Any], None]]=None
-        ) ->None:
+    def __init__(
+        self,
+        name: str,
+        default: Any = None,
+        type: Optional[type] = None,
+        help: Optional[str] = None,
+        metavar: Optional[str] = None,
+        multiple: bool = False,
+        file_name: Optional[str] = None,
+        group_name: Optional[str] = None,
+        callback: Optional[Callable[[Any], None]] = None,
+    ) -> None:
         if default is None and multiple:
             default = []
         self.name = name
         if type is None:
-            raise ValueError('type must not be None')
+            raise ValueError("type must not be None")
         self.type = type
         self.help = help
         self.metavar = metavar
@@ -367,16 +559,124 @@ class _Option(object):
         self.group_name = group_name
         self.callback = callback
         self.default = default
-        self._value = _Option.UNSET
-    _DATETIME_FORMATS = ['%a %b %d %H:%M:%S %Y', '%Y-%m-%d %H:%M:%S',
-        '%Y-%m-%d %H:%M', '%Y-%m-%dT%H:%M', '%Y%m%d %H:%M:%S',
-        '%Y%m%d %H:%M', '%Y-%m-%d', '%Y%m%d', '%H:%M:%S', '%H:%M']
-    _TIMEDELTA_ABBREV_DICT = {'h': 'hours', 'm': 'minutes', 'min':
-        'minutes', 's': 'seconds', 'sec': 'seconds', 'ms': 'milliseconds',
-        'us': 'microseconds', 'd': 'days', 'w': 'weeks'}
-    _FLOAT_PATTERN = '[-+]?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][-+]?\\d+)?'
-    _TIMEDELTA_PATTERN = re.compile('\\s*(%s)\\s*(\\w*)\\s*' %
-        _FLOAT_PATTERN, re.IGNORECASE)
+        self._value = _Option.UNSET  # type: Any
+
+    def value(self) -> Any:
+        return self.default if self._value is _Option.UNSET else self._value
+
+    def parse(self, value: str) -> Any:
+        _parse = {
+            datetime.datetime: self._parse_datetime,
+            datetime.timedelta: self._parse_timedelta,
+            bool: self._parse_bool,
+            basestring_type: self._parse_string,
+        }.get(
+            self.type, self.type
+        )  # type: Callable[[str], Any]
+        if self.multiple:
+            self._value = []
+            for part in value.split(","):
+                if issubclass(self.type, numbers.Integral):
+                    # allow ranges of the form X:Y (inclusive at both ends)
+                    lo_str, _, hi_str = part.partition(":")
+                    lo = _parse(lo_str)
+                    hi = _parse(hi_str) if hi_str else lo
+                    self._value.extend(range(lo, hi + 1))
+                else:
+                    self._value.append(_parse(part))
+        else:
+            self._value = _parse(value)
+        if self.callback is not None:
+            self.callback(self._value)
+        return self.value()
+
+    def set(self, value: Any) -> None:
+        if self.multiple:
+            if not isinstance(value, list):
+                raise Error(
+                    "Option %r is required to be a list of %s"
+                    % (self.name, self.type.__name__)
+                )
+            for item in value:
+                if item is not None and not isinstance(item, self.type):
+                    raise Error(
+                        "Option %r is required to be a list of %s"
+                        % (self.name, self.type.__name__)
+                    )
+        else:
+            if value is not None and not isinstance(value, self.type):
+                raise Error(
+                    "Option %r is required to be a %s (%s given)"
+                    % (self.name, self.type.__name__, type(value))
+                )
+        self._value = value
+        if self.callback is not None:
+            self.callback(self._value)
+
+    # Supported date/time formats in our options
+    _DATETIME_FORMATS = [
+        "%a %b %d %H:%M:%S %Y",
+        "%Y-%m-%d %H:%M:%S",
+        "%Y-%m-%d %H:%M",
+        "%Y-%m-%dT%H:%M",
+        "%Y%m%d %H:%M:%S",
+        "%Y%m%d %H:%M",
+        "%Y-%m-%d",
+        "%Y%m%d",
+        "%H:%M:%S",
+        "%H:%M",
+    ]
+
+    def _parse_datetime(self, value: str) -> datetime.datetime:
+        for format in self._DATETIME_FORMATS:
+            try:
+                return datetime.datetime.strptime(value, format)
+            except ValueError:
+                pass
+        raise Error("Unrecognized date/time format: %r" % value)
+
+    _TIMEDELTA_ABBREV_DICT = {
+        "h": "hours",
+        "m": "minutes",
+        "min": "minutes",
+        "s": "seconds",
+        "sec": "seconds",
+        "ms": "milliseconds",
+        "us": "microseconds",
+        "d": "days",
+        "w": "weeks",
+    }
+
+    _FLOAT_PATTERN = r"[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?"
+
+    _TIMEDELTA_PATTERN = re.compile(
+        r"\s*(%s)\s*(\w*)\s*" % _FLOAT_PATTERN, re.IGNORECASE
+    )
+
+    def _parse_timedelta(self, value: str) -> datetime.timedelta:
+        try:
+            sum = datetime.timedelta()
+            start = 0
+            while start < len(value):
+                m = self._TIMEDELTA_PATTERN.match(value, start)
+                if not m:
+                    raise Exception()
+                num = float(m.group(1))
+                units = m.group(2) or "seconds"
+                units = self._TIMEDELTA_ABBREV_DICT.get(units, units)
+                # This line confuses mypy when setup.py sets python_version=3.6
+                # https://github.com/python/mypy/issues/9676
+                sum += datetime.timedelta(**{units: num})  # type: ignore
+                start = m.end()
+            return sum
+        except Exception:
+            raise
+
+    def _parse_bool(self, value: str) -> bool:
+        return value.lower() not in ("false", "0", "f")
+
+    def _parse_string(self, value: str) -> str:
+        return _unicode(value)


 options = OptionParser()
@@ -386,48 +686,65 @@ All defined options are available as attributes on this object.
 """


-def define(name: str, default: Any=None, type: Optional[type]=None, help:
-    Optional[str]=None, metavar: Optional[str]=None, multiple: bool=False,
-    group: Optional[str]=None, callback: Optional[Callable[[Any], None]]=None
-    ) ->None:
+def define(
+    name: str,
+    default: Any = None,
+    type: Optional[type] = None,
+    help: Optional[str] = None,
+    metavar: Optional[str] = None,
+    multiple: bool = False,
+    group: Optional[str] = None,
+    callback: Optional[Callable[[Any], None]] = None,
+) -> None:
     """Defines an option in the global namespace.

     See `OptionParser.define`.
     """
-    pass
-
-
-def parse_command_line(args: Optional[List[str]]=None, final: bool=True
-    ) ->List[str]:
+    return options.define(
+        name,
+        default=default,
+        type=type,
+        help=help,
+        metavar=metavar,
+        multiple=multiple,
+        group=group,
+        callback=callback,
+    )
+
+
+def parse_command_line(
+    args: Optional[List[str]] = None, final: bool = True
+) -> List[str]:
     """Parses global options from the command line.

     See `OptionParser.parse_command_line`.
     """
-    pass
+    return options.parse_command_line(args, final=final)


-def parse_config_file(path: str, final: bool=True) ->None:
+def parse_config_file(path: str, final: bool = True) -> None:
     """Parses global options from a config file.

     See `OptionParser.parse_config_file`.
     """
-    pass
+    return options.parse_config_file(path, final=final)


-def print_help(file: Optional[TextIO]=None) ->None:
+def print_help(file: Optional[TextIO] = None) -> None:
     """Prints all the command line options to stderr (or another file).

     See `OptionParser.print_help`.
     """
-    pass
+    return options.print_help(file)


-def add_parse_callback(callback: Callable[[], None]) ->None:
+def add_parse_callback(callback: Callable[[], None]) -> None:
     """Adds a parse callback, to be invoked when option parsing is done.

     See `OptionParser.add_parse_callback`
     """
-    pass
+    options.add_parse_callback(callback)


+# Default options
 define_logging_options(options)
diff --git a/tornado/platform/asyncio.py b/tornado/platform/asyncio.py
index e67cf2ca..79e60848 100644
--- a/tornado/platform/asyncio.py
+++ b/tornado/platform/asyncio.py
@@ -21,6 +21,7 @@ the same event loop.
    of Tornado (but may reduce performance of other ``asyncio``-based libraries
    in the same process).
 """
+
 import asyncio
 import atexit
 import concurrent.futures
@@ -34,21 +35,238 @@ import typing
 import warnings
 from tornado.gen import convert_yielded
 from tornado.ioloop import IOLoop, _Selectable
-from typing import Any, Callable, Dict, List, Optional, Protocol, Set, Tuple, TypeVar, Union
+
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    List,
+    Optional,
+    Protocol,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+)


 class _HasFileno(Protocol):
-    pass
+    def fileno(self) -> int:
+        pass


 _FileDescriptorLike = Union[int, _HasFileno]
-_T = TypeVar('_T')
-_selector_loops: Set['SelectorThread'] = set()
+
+_T = TypeVar("_T")
+
+
+# Collection of selector thread event loops to shut down on exit.
+_selector_loops: Set["SelectorThread"] = set()
+
+
+def _atexit_callback() -> None:
+    for loop in _selector_loops:
+        with loop._select_cond:
+            loop._closing_selector = True
+            loop._select_cond.notify()
+        try:
+            loop._waker_w.send(b"a")
+        except BlockingIOError:
+            pass
+        if loop._thread is not None:
+            # If we don't join our (daemon) thread here, we may get a deadlock
+            # during interpreter shutdown. I don't really understand why. This
+            # deadlock happens every time in CI (both travis and appveyor) but
+            # I've never been able to reproduce locally.
+            loop._thread.join()
+    _selector_loops.clear()
+
+
 atexit.register(_atexit_callback)


 class BaseAsyncIOLoop(IOLoop):
-    pass
+    def initialize(  # type: ignore
+        self, asyncio_loop: asyncio.AbstractEventLoop, **kwargs: Any
+    ) -> None:
+        # asyncio_loop is always the real underlying IOLoop. This is used in
+        # ioloop.py to maintain the asyncio-to-ioloop mappings.
+        self.asyncio_loop = asyncio_loop
+        # selector_loop is an event loop that implements the add_reader family of
+        # methods. Usually the same as asyncio_loop but differs on platforms such
+        # as windows where the default event loop does not implement these methods.
+        self.selector_loop = asyncio_loop
+        if hasattr(asyncio, "ProactorEventLoop") and isinstance(
+            asyncio_loop, asyncio.ProactorEventLoop
+        ):
+            # Ignore this line for mypy because the abstract method checker
+            # doesn't understand dynamic proxies.
+            self.selector_loop = AddThreadSelectorEventLoop(asyncio_loop)  # type: ignore
+        # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler)
+        self.handlers: Dict[int, Tuple[Union[int, _Selectable], Callable]] = {}
+        # Set of fds listening for reads/writes
+        self.readers: Set[int] = set()
+        self.writers: Set[int] = set()
+        self.closing = False
+        # If an asyncio loop was closed through an asyncio interface
+        # instead of IOLoop.close(), we'd never hear about it and may
+        # have left a dangling reference in our map. In case an
+        # application (or, more likely, a test suite) creates and
+        # destroys a lot of event loops in this way, check here to
+        # ensure that we don't have a lot of dead loops building up in
+        # the map.
+        #
+        # TODO(bdarnell): consider making self.asyncio_loop a weakref
+        # for AsyncIOMainLoop and make _ioloop_for_asyncio a
+        # WeakKeyDictionary.
+        for loop in IOLoop._ioloop_for_asyncio.copy():
+            if loop.is_closed():
+                try:
+                    del IOLoop._ioloop_for_asyncio[loop]
+                except KeyError:
+                    pass
+
+        # Make sure we don't already have an IOLoop for this asyncio loop
+        existing_loop = IOLoop._ioloop_for_asyncio.setdefault(asyncio_loop, self)
+        if existing_loop is not self:
+            raise RuntimeError(
+                f"IOLoop {existing_loop} already associated with asyncio loop {asyncio_loop}"
+            )
+
+        super().initialize(**kwargs)
+
+    def close(self, all_fds: bool = False) -> None:
+        self.closing = True
+        for fd in list(self.handlers):
+            fileobj, handler_func = self.handlers[fd]
+            self.remove_handler(fd)
+            if all_fds:
+                self.close_fd(fileobj)
+        # Remove the mapping before closing the asyncio loop. If this
+        # happened in the other order, we could race against another
+        # initialize() call which would see the closed asyncio loop,
+        # assume it was closed from the asyncio side, and do this
+        # cleanup for us, leading to a KeyError.
+        del IOLoop._ioloop_for_asyncio[self.asyncio_loop]
+        if self.selector_loop is not self.asyncio_loop:
+            self.selector_loop.close()
+        self.asyncio_loop.close()
+
+    def add_handler(
+        self, fd: Union[int, _Selectable], handler: Callable[..., None], events: int
+    ) -> None:
+        fd, fileobj = self.split_fd(fd)
+        if fd in self.handlers:
+            raise ValueError("fd %s added twice" % fd)
+        self.handlers[fd] = (fileobj, handler)
+        if events & IOLoop.READ:
+            self.selector_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
+            self.readers.add(fd)
+        if events & IOLoop.WRITE:
+            self.selector_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
+            self.writers.add(fd)
+
+    def update_handler(self, fd: Union[int, _Selectable], events: int) -> None:
+        fd, fileobj = self.split_fd(fd)
+        if events & IOLoop.READ:
+            if fd not in self.readers:
+                self.selector_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
+                self.readers.add(fd)
+        else:
+            if fd in self.readers:
+                self.selector_loop.remove_reader(fd)
+                self.readers.remove(fd)
+        if events & IOLoop.WRITE:
+            if fd not in self.writers:
+                self.selector_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
+                self.writers.add(fd)
+        else:
+            if fd in self.writers:
+                self.selector_loop.remove_writer(fd)
+                self.writers.remove(fd)
+
+    def remove_handler(self, fd: Union[int, _Selectable]) -> None:
+        fd, fileobj = self.split_fd(fd)
+        if fd not in self.handlers:
+            return
+        if fd in self.readers:
+            self.selector_loop.remove_reader(fd)
+            self.readers.remove(fd)
+        if fd in self.writers:
+            self.selector_loop.remove_writer(fd)
+            self.writers.remove(fd)
+        del self.handlers[fd]
+
+    def _handle_events(self, fd: int, events: int) -> None:
+        fileobj, handler_func = self.handlers[fd]
+        handler_func(fileobj, events)
+
+    def start(self) -> None:
+        self.asyncio_loop.run_forever()
+
+    def stop(self) -> None:
+        self.asyncio_loop.stop()
+
+    def call_at(
+        self, when: float, callback: Callable, *args: Any, **kwargs: Any
+    ) -> object:
+        # asyncio.call_at supports *args but not **kwargs, so bind them here.
+        # We do not synchronize self.time and asyncio_loop.time, so
+        # convert from absolute to relative.
+        return self.asyncio_loop.call_later(
+            max(0, when - self.time()),
+            self._run_callback,
+            functools.partial(callback, *args, **kwargs),
+        )
+
+    def remove_timeout(self, timeout: object) -> None:
+        timeout.cancel()  # type: ignore
+
+    def add_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None:
+        try:
+            if asyncio.get_running_loop() is self.asyncio_loop:
+                call_soon = self.asyncio_loop.call_soon
+            else:
+                call_soon = self.asyncio_loop.call_soon_threadsafe
+        except RuntimeError:
+            call_soon = self.asyncio_loop.call_soon_threadsafe
+
+        try:
+            call_soon(self._run_callback, functools.partial(callback, *args, **kwargs))
+        except RuntimeError:
+            # "Event loop is closed". Swallow the exception for
+            # consistency with PollIOLoop (and logical consistency
+            # with the fact that we can't guarantee that an
+            # add_callback that completes without error will
+            # eventually execute).
+            pass
+        except AttributeError:
+            # ProactorEventLoop may raise this instead of RuntimeError
+            # if call_soon_threadsafe races with a call to close().
+            # Swallow it too for consistency.
+            pass
+
+    def add_callback_from_signal(
+        self, callback: Callable, *args: Any, **kwargs: Any
+    ) -> None:
+        warnings.warn("add_callback_from_signal is deprecated", DeprecationWarning)
+        try:
+            self.asyncio_loop.call_soon_threadsafe(
+                self._run_callback, functools.partial(callback, *args, **kwargs)
+            )
+        except RuntimeError:
+            pass
+
+    def run_in_executor(
+        self,
+        executor: Optional[concurrent.futures.Executor],
+        func: Callable[..., _T],
+        *args: Any,
+    ) -> "asyncio.Future[_T]":
+        return self.asyncio_loop.run_in_executor(executor, func, *args)
+
+    def set_default_executor(self, executor: concurrent.futures.Executor) -> None:
+        return self.asyncio_loop.set_default_executor(executor)


 class AsyncIOMainLoop(BaseAsyncIOLoop):
@@ -66,6 +284,14 @@ class AsyncIOMainLoop(BaseAsyncIOLoop):
        Closing an `AsyncIOMainLoop` now closes the underlying asyncio loop.
     """

+    def initialize(self, **kwargs: Any) -> None:  # type: ignore
+        super().initialize(asyncio.get_event_loop(), **kwargs)
+
+    def _make_current(self) -> None:
+        # AsyncIOMainLoop already refers to the current asyncio loop so
+        # nothing to do here.
+        pass
+

 class AsyncIOLoop(BaseAsyncIOLoop):
     """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop.
@@ -93,8 +319,41 @@ class AsyncIOLoop(BaseAsyncIOLoop):
        to refer to this class directly.
     """

-
-def to_tornado_future(asyncio_future: asyncio.Future) ->asyncio.Future:
+    def initialize(self, **kwargs: Any) -> None:  # type: ignore
+        self.is_current = False
+        loop = None
+        if "asyncio_loop" not in kwargs:
+            kwargs["asyncio_loop"] = loop = asyncio.new_event_loop()
+        try:
+            super().initialize(**kwargs)
+        except Exception:
+            # If initialize() does not succeed (taking ownership of the loop),
+            # we have to close it.
+            if loop is not None:
+                loop.close()
+            raise
+
+    def close(self, all_fds: bool = False) -> None:
+        if self.is_current:
+            self._clear_current()
+        super().close(all_fds=all_fds)
+
+    def _make_current(self) -> None:
+        if not self.is_current:
+            try:
+                self.old_asyncio = asyncio.get_event_loop()
+            except (RuntimeError, AssertionError):
+                self.old_asyncio = None  # type: ignore
+            self.is_current = True
+        asyncio.set_event_loop(self.asyncio_loop)
+
+    def _clear_current_hook(self) -> None:
+        if self.is_current:
+            asyncio.set_event_loop(self.old_asyncio)
+            self.is_current = False
+
+
+def to_tornado_future(asyncio_future: asyncio.Future) -> asyncio.Future:
     """Convert an `asyncio.Future` to a `tornado.concurrent.Future`.

     .. versionadded:: 4.1
@@ -103,10 +362,10 @@ def to_tornado_future(asyncio_future: asyncio.Future) ->asyncio.Future:
        Tornado ``Futures`` have been merged with `asyncio.Future`,
        so this method is now a no-op.
     """
-    pass
+    return asyncio_future


-def to_asyncio_future(tornado_future: asyncio.Future) ->asyncio.Future:
+def to_asyncio_future(tornado_future: asyncio.Future) -> asyncio.Future:
     """Convert a Tornado yieldable object to an `asyncio.Future`.

     .. versionadded:: 4.1
@@ -119,17 +378,18 @@ def to_asyncio_future(tornado_future: asyncio.Future) ->asyncio.Future:
        Tornado ``Futures`` have been merged with `asyncio.Future`,
        so this method is now equivalent to `tornado.gen.convert_yielded`.
     """
-    pass
+    return convert_yielded(tornado_future)


-if sys.platform == 'win32' and hasattr(asyncio,
-    'WindowsSelectorEventLoopPolicy'):
-    _BasePolicy = asyncio.WindowsSelectorEventLoopPolicy
+if sys.platform == "win32" and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"):
+    # "Any thread" and "selector" should be orthogonal, but there's not a clean
+    # interface for composing policies so pick the right base.
+    _BasePolicy = asyncio.WindowsSelectorEventLoopPolicy  # type: ignore
 else:
     _BasePolicy = asyncio.DefaultEventLoopPolicy


-class AnyThreadEventLoopPolicy(_BasePolicy):
+class AnyThreadEventLoopPolicy(_BasePolicy):  # type: ignore
     """Event loop policy that allows loop creation on any thread.

     The default `asyncio` event loop policy only automatically creates
@@ -156,11 +416,23 @@ class AnyThreadEventLoopPolicy(_BasePolicy):
         need event loops.
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
         super().__init__()
         warnings.warn(
-            'AnyThreadEventLoopPolicy is deprecated, use asyncio.run or asyncio.new_event_loop instead'
-            , DeprecationWarning, stacklevel=2)
+            "AnyThreadEventLoopPolicy is deprecated, use asyncio.run "
+            "or asyncio.new_event_loop instead",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+
+    def get_event_loop(self) -> asyncio.AbstractEventLoop:
+        try:
+            return super().get_event_loop()
+        except RuntimeError:
+            # "There is no current event loop in thread %r"
+            loop = self.new_event_loop()
+            self.set_event_loop(loop)
+            return loop


 class SelectorThread:
@@ -173,29 +445,218 @@ class SelectorThread:
     Typically used via ``AddThreadSelectorEventLoop``,
     but can be attached to a running asyncio loop.
     """
+
     _closed = False

-    def __init__(self, real_loop: asyncio.AbstractEventLoop) ->None:
+    def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None:
         self._real_loop = real_loop
+
         self._select_cond = threading.Condition()
-        self._select_args: Optional[Tuple[List[_FileDescriptorLike], List[
-            _FileDescriptorLike]]] = None
+        self._select_args: Optional[
+            Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]
+        ] = None
         self._closing_selector = False
         self._thread: Optional[threading.Thread] = None
         self._thread_manager_handle = self._thread_manager()

-        async def thread_manager_anext() ->None:
+        async def thread_manager_anext() -> None:
+            # the anext builtin wasn't added until 3.10. We just need to iterate
+            # this generator one step.
             await self._thread_manager_handle.__anext__()
-        self._real_loop.call_soon(lambda : self._real_loop.create_task(
-            thread_manager_anext()))
+
+        # When the loop starts, start the thread. Not too soon because we can't
+        # clean up if we get to this point but the event loop is closed without
+        # starting.
+        self._real_loop.call_soon(
+            lambda: self._real_loop.create_task(thread_manager_anext())
+        )
+
         self._readers: Dict[_FileDescriptorLike, Callable] = {}
         self._writers: Dict[_FileDescriptorLike, Callable] = {}
+
+        # Writing to _waker_w will wake up the selector thread, which
+        # watches for _waker_r to be readable.
         self._waker_r, self._waker_w = socket.socketpair()
         self._waker_r.setblocking(False)
         self._waker_w.setblocking(False)
         _selector_loops.add(self)
         self.add_reader(self._waker_r, self._consume_waker)

+    def close(self) -> None:
+        if self._closed:
+            return
+        with self._select_cond:
+            self._closing_selector = True
+            self._select_cond.notify()
+        self._wake_selector()
+        if self._thread is not None:
+            self._thread.join()
+        _selector_loops.discard(self)
+        self.remove_reader(self._waker_r)
+        self._waker_r.close()
+        self._waker_w.close()
+        self._closed = True
+
+    async def _thread_manager(self) -> typing.AsyncGenerator[None, None]:
+        # Create a thread to run the select system call. We manage this thread
+        # manually so we can trigger a clean shutdown from an atexit hook. Note
+        # that due to the order of operations at shutdown, only daemon threads
+        # can be shut down in this way (non-daemon threads would require the
+        # introduction of a new hook: https://bugs.python.org/issue41962)
+        self._thread = threading.Thread(
+            name="Tornado selector",
+            daemon=True,
+            target=self._run_select,
+        )
+        self._thread.start()
+        self._start_select()
+        try:
+            # The presense of this yield statement means that this coroutine
+            # is actually an asynchronous generator, which has a special
+            # shutdown protocol. We wait at this yield point until the
+            # event loop's shutdown_asyncgens method is called, at which point
+            # we will get a GeneratorExit exception and can shut down the
+            # selector thread.
+            yield
+        except GeneratorExit:
+            self.close()
+            raise
+
+    def _wake_selector(self) -> None:
+        if self._closed:
+            return
+        try:
+            self._waker_w.send(b"a")
+        except BlockingIOError:
+            pass
+
+    def _consume_waker(self) -> None:
+        try:
+            self._waker_r.recv(1024)
+        except BlockingIOError:
+            pass
+
+    def _start_select(self) -> None:
+        # Capture reader and writer sets here in the event loop
+        # thread to avoid any problems with concurrent
+        # modification while the select loop uses them.
+        with self._select_cond:
+            assert self._select_args is None
+            self._select_args = (list(self._readers.keys()), list(self._writers.keys()))
+            self._select_cond.notify()
+
+    def _run_select(self) -> None:
+        while True:
+            with self._select_cond:
+                while self._select_args is None and not self._closing_selector:
+                    self._select_cond.wait()
+                if self._closing_selector:
+                    return
+                assert self._select_args is not None
+                to_read, to_write = self._select_args
+                self._select_args = None
+
+            # We use the simpler interface of the select module instead of
+            # the more stateful interface in the selectors module because
+            # this class is only intended for use on windows, where
+            # select.select is the only option. The selector interface
+            # does not have well-documented thread-safety semantics that
+            # we can rely on so ensuring proper synchronization would be
+            # tricky.
+            try:
+                # On windows, selecting on a socket for write will not
+                # return the socket when there is an error (but selecting
+                # for reads works). Also select for errors when selecting
+                # for writes, and merge the results.
+                #
+                # This pattern is also used in
+                # https://github.com/python/cpython/blob/v3.8.0/Lib/selectors.py#L312-L317
+                rs, ws, xs = select.select(to_read, to_write, to_write)
+                ws = ws + xs
+            except OSError as e:
+                # After remove_reader or remove_writer is called, the file
+                # descriptor may subsequently be closed on the event loop
+                # thread. It's possible that this select thread hasn't
+                # gotten into the select system call by the time that
+                # happens in which case (at least on macOS), select may
+                # raise a "bad file descriptor" error. If we get that
+                # error, check and see if we're also being woken up by
+                # polling the waker alone. If we are, just return to the
+                # event loop and we'll get the updated set of file
+                # descriptors on the next iteration. Otherwise, raise the
+                # original error.
+                if e.errno == getattr(errno, "WSAENOTSOCK", errno.EBADF):
+                    rs, _, _ = select.select([self._waker_r.fileno()], [], [], 0)
+                    if rs:
+                        ws = []
+                    else:
+                        raise
+                else:
+                    raise
+
+            try:
+                self._real_loop.call_soon_threadsafe(self._handle_select, rs, ws)
+            except RuntimeError:
+                # "Event loop is closed". Swallow the exception for
+                # consistency with PollIOLoop (and logical consistency
+                # with the fact that we can't guarantee that an
+                # add_callback that completes without error will
+                # eventually execute).
+                pass
+            except AttributeError:
+                # ProactorEventLoop may raise this instead of RuntimeError
+                # if call_soon_threadsafe races with a call to close().
+                # Swallow it too for consistency.
+                pass
+
+    def _handle_select(
+        self, rs: List[_FileDescriptorLike], ws: List[_FileDescriptorLike]
+    ) -> None:
+        for r in rs:
+            self._handle_event(r, self._readers)
+        for w in ws:
+            self._handle_event(w, self._writers)
+        self._start_select()
+
+    def _handle_event(
+        self,
+        fd: _FileDescriptorLike,
+        cb_map: Dict[_FileDescriptorLike, Callable],
+    ) -> None:
+        try:
+            callback = cb_map[fd]
+        except KeyError:
+            return
+        callback()
+
+    def add_reader(
+        self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any
+    ) -> None:
+        self._readers[fd] = functools.partial(callback, *args)
+        self._wake_selector()
+
+    def add_writer(
+        self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any
+    ) -> None:
+        self._writers[fd] = functools.partial(callback, *args)
+        self._wake_selector()
+
+    def remove_reader(self, fd: _FileDescriptorLike) -> bool:
+        try:
+            del self._readers[fd]
+        except KeyError:
+            return False
+        self._wake_selector()
+        return True
+
+    def remove_writer(self, fd: _FileDescriptorLike) -> bool:
+        try:
+            del self._writers[fd]
+        except KeyError:
+            return False
+        self._wake_selector()
+        return True
+

 class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
     """Wrap an event loop to add implementations of the ``add_reader`` method family.
@@ -214,14 +675,44 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
     Closing the ``AddThreadSelectorEventLoop`` also closes the wrapped event loop.

     """
-    MY_ATTRIBUTES = {'_real_loop', '_selector', 'add_reader', 'add_writer',
-        'close', 'remove_reader', 'remove_writer'}

-    def __getattribute__(self, name: str) ->Any:
+    # This class is a __getattribute__-based proxy. All attributes other than those
+    # in this set are proxied through to the underlying loop.
+    MY_ATTRIBUTES = {
+        "_real_loop",
+        "_selector",
+        "add_reader",
+        "add_writer",
+        "close",
+        "remove_reader",
+        "remove_writer",
+    }
+
+    def __getattribute__(self, name: str) -> Any:
         if name in AddThreadSelectorEventLoop.MY_ATTRIBUTES:
             return super().__getattribute__(name)
         return getattr(self._real_loop, name)

-    def __init__(self, real_loop: asyncio.AbstractEventLoop) ->None:
+    def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None:
         self._real_loop = real_loop
         self._selector = SelectorThread(real_loop)
+
+    def close(self) -> None:
+        self._selector.close()
+        self._real_loop.close()
+
+    def add_reader(
+        self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
+    ) -> None:
+        return self._selector.add_reader(fd, callback, *args)
+
+    def add_writer(
+        self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
+    ) -> None:
+        return self._selector.add_writer(fd, callback, *args)
+
+    def remove_reader(self, fd: "_FileDescriptorLike") -> bool:
+        return self._selector.remove_reader(fd)
+
+    def remove_writer(self, fd: "_FileDescriptorLike") -> bool:
+        return self._selector.remove_writer(fd)
diff --git a/tornado/platform/caresresolver.py b/tornado/platform/caresresolver.py
index 379ad0a2..1ba45c9a 100644
--- a/tornado/platform/caresresolver.py
+++ b/tornado/platform/caresresolver.py
@@ -1,12 +1,15 @@
-import pycares
+import pycares  # type: ignore
 import socket
+
 from tornado.concurrent import Future
 from tornado import gen
 from tornado.ioloop import IOLoop
 from tornado.netutil import Resolver, is_valid_ip
+
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Generator, Any, List, Tuple, Dict
+    from typing import Generator, Any, List, Tuple, Dict  # noqa: F401


 class CaresResolver(Resolver):
@@ -29,3 +32,63 @@ class CaresResolver(Resolver):
        This class is deprecated and will be removed in Tornado 7.0. Use the default
        thread-based resolver instead.
     """
+
+    def initialize(self) -> None:
+        self.io_loop = IOLoop.current()
+        self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb)
+        self.fds = {}  # type: Dict[int, int]
+
+    def _sock_state_cb(self, fd: int, readable: bool, writable: bool) -> None:
+        state = (IOLoop.READ if readable else 0) | (IOLoop.WRITE if writable else 0)
+        if not state:
+            self.io_loop.remove_handler(fd)
+            del self.fds[fd]
+        elif fd in self.fds:
+            self.io_loop.update_handler(fd, state)
+            self.fds[fd] = state
+        else:
+            self.io_loop.add_handler(fd, self._handle_events, state)
+            self.fds[fd] = state
+
+    def _handle_events(self, fd: int, events: int) -> None:
+        read_fd = pycares.ARES_SOCKET_BAD
+        write_fd = pycares.ARES_SOCKET_BAD
+        if events & IOLoop.READ:
+            read_fd = fd
+        if events & IOLoop.WRITE:
+            write_fd = fd
+        self.channel.process_fd(read_fd, write_fd)
+
+    @gen.coroutine
+    def resolve(
+        self, host: str, port: int, family: int = 0
+    ) -> "Generator[Any, Any, List[Tuple[int, Any]]]":
+        if is_valid_ip(host):
+            addresses = [host]
+        else:
+            # gethostbyname doesn't take callback as a kwarg
+            fut = Future()  # type: Future[Tuple[Any, Any]]
+            self.channel.gethostbyname(
+                host, family, lambda result, error: fut.set_result((result, error))
+            )
+            result, error = yield fut
+            if error:
+                raise IOError(
+                    "C-Ares returned error %s: %s while resolving %s"
+                    % (error, pycares.errno.strerror(error), host)
+                )
+            addresses = result.addresses
+        addrinfo = []
+        for address in addresses:
+            if "." in address:
+                address_family = socket.AF_INET
+            elif ":" in address:
+                address_family = socket.AF_INET6
+            else:
+                address_family = socket.AF_UNSPEC
+            if family != socket.AF_UNSPEC and family != address_family:
+                raise IOError(
+                    "Requested socket family %d but got %d" % (family, address_family)
+                )
+            addrinfo.append((typing.cast(int, address_family), (address, port)))
+        return addrinfo
diff --git a/tornado/platform/twisted.py b/tornado/platform/twisted.py
index 4546b24f..153fe436 100644
--- a/tornado/platform/twisted.py
+++ b/tornado/platform/twisted.py
@@ -1,22 +1,39 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
 """Bridges between the Twisted package and Tornado.
 """
+
 import socket
 import sys
-import twisted.internet.abstract
-import twisted.internet.asyncioreactor
-from twisted.internet.defer import Deferred
-from twisted.python import failure
-import twisted.names.cache
-import twisted.names.client
-import twisted.names.hosts
-import twisted.names.resolve
+
+import twisted.internet.abstract  # type: ignore
+import twisted.internet.asyncioreactor  # type: ignore
+from twisted.internet.defer import Deferred  # type: ignore
+from twisted.python import failure  # type: ignore
+import twisted.names.cache  # type: ignore
+import twisted.names.client  # type: ignore
+import twisted.names.hosts  # type: ignore
+import twisted.names.resolve  # type: ignore
+
+
 from tornado.concurrent import Future, future_set_exc_info
 from tornado.escape import utf8
 from tornado import gen
 from tornado.netutil import Resolver
+
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Generator, Any, List, Tuple
+    from typing import Generator, Any, List, Tuple  # noqa: F401


 class TwistedResolver(Resolver):
@@ -41,8 +58,57 @@ class TwistedResolver(Resolver):
        thread-based resolver instead.
     """

+    def initialize(self) -> None:
+        # partial copy of twisted.names.client.createResolver, which doesn't
+        # allow for a reactor to be passed in.
+        self.reactor = twisted.internet.asyncioreactor.AsyncioSelectorReactor()
+
+        host_resolver = twisted.names.hosts.Resolver("/etc/hosts")
+        cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor)
+        real_resolver = twisted.names.client.Resolver(
+            "/etc/resolv.conf", reactor=self.reactor
+        )
+        self.resolver = twisted.names.resolve.ResolverChain(
+            [host_resolver, cache_resolver, real_resolver]
+        )

-def install() ->None:
+    @gen.coroutine
+    def resolve(
+        self, host: str, port: int, family: int = 0
+    ) -> "Generator[Any, Any, List[Tuple[int, Any]]]":
+        # getHostByName doesn't accept IP addresses, so if the input
+        # looks like an IP address just return it immediately.
+        if twisted.internet.abstract.isIPAddress(host):
+            resolved = host
+            resolved_family = socket.AF_INET
+        elif twisted.internet.abstract.isIPv6Address(host):
+            resolved = host
+            resolved_family = socket.AF_INET6
+        else:
+            deferred = self.resolver.getHostByName(utf8(host))
+            fut = Future()  # type: Future[Any]
+            deferred.addBoth(fut.set_result)
+            resolved = yield fut
+            if isinstance(resolved, failure.Failure):
+                try:
+                    resolved.raiseException()
+                except twisted.names.error.DomainError as e:
+                    raise IOError(e)
+            elif twisted.internet.abstract.isIPAddress(resolved):
+                resolved_family = socket.AF_INET
+            elif twisted.internet.abstract.isIPv6Address(resolved):
+                resolved_family = socket.AF_INET6
+            else:
+                resolved_family = socket.AF_UNSPEC
+        if family != socket.AF_UNSPEC and family != resolved_family:
+            raise Exception(
+                "Requested socket family %d but got %d" % (family, resolved_family)
+            )
+        result = [(typing.cast(int, resolved_family), (resolved, port))]
+        return result
+
+
+def install() -> None:
     """Install ``AsyncioSelectorReactor`` as the default Twisted reactor.

     .. deprecated:: 5.1
@@ -61,7 +127,24 @@ def install() ->None:
        ``asyncio`` reactor instead.

     """
-    pass
+    from twisted.internet.asyncioreactor import install
+
+    install()
+
+
+if hasattr(gen.convert_yielded, "register"):
+
+    @gen.convert_yielded.register(Deferred)  # type: ignore
+    def _(d: Deferred) -> Future:
+        f = Future()  # type: Future[Any]

+        def errback(failure: failure.Failure) -> None:
+            try:
+                failure.raiseException()
+                # Should never happen, but just in case
+                raise Exception("errback called without error")
+            except:
+                future_set_exc_info(f, sys.exc_info())

-if hasattr(gen.convert_yielded, 'register'):
+        d.addCallbacks(f.set_result, errback)
+        return f
diff --git a/tornado/process.py b/tornado/process.py
index 3afe7dc4..12e3eb64 100644
--- a/tornado/process.py
+++ b/tornado/process.py
@@ -1,6 +1,22 @@
+#
+# Copyright 2011 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Utilities for working with multiple processes, including both forking
 the server into multiple processes and managing subprocesses.
 """
+
 import asyncio
 import os
 import multiprocessing
@@ -8,28 +24,65 @@ import signal
 import subprocess
 import sys
 import time
+
 from binascii import hexlify
-from tornado.concurrent import Future, future_set_result_unless_cancelled, future_set_exception_unless_cancelled
+
+from tornado.concurrent import (
+    Future,
+    future_set_result_unless_cancelled,
+    future_set_exception_unless_cancelled,
+)
 from tornado import ioloop
 from tornado.iostream import PipeIOStream
 from tornado.log import gen_log
+
 import typing
 from typing import Optional, Any, Callable
+
 if typing.TYPE_CHECKING:
-    from typing import List
+    from typing import List  # noqa: F401
+
+# Re-export this exception for convenience.
 CalledProcessError = subprocess.CalledProcessError


-def cpu_count() ->int:
+def cpu_count() -> int:
     """Returns the number of processors on this machine."""
-    pass
+    if multiprocessing is None:
+        return 1
+    try:
+        return multiprocessing.cpu_count()
+    except NotImplementedError:
+        pass
+    try:
+        return os.sysconf("SC_NPROCESSORS_CONF")  # type: ignore
+    except (AttributeError, ValueError):
+        pass
+    gen_log.error("Could not detect number of processors; assuming 1")
+    return 1
+
+
+def _reseed_random() -> None:
+    if "random" not in sys.modules:
+        return
+    import random
+
+    # If os.urandom is available, this method does the same thing as
+    # random.seed (at least as of python 2.6).  If os.urandom is not
+    # available, we mix in the pid in addition to a timestamp.
+    try:
+        seed = int(hexlify(os.urandom(16)), 16)
+    except NotImplementedError:
+        seed = int(time.time() * 1000) ^ os.getpid()
+    random.seed(seed)


 _task_id = None


-def fork_processes(num_processes: Optional[int], max_restarts: Optional[int
-    ]=None) ->int:
+def fork_processes(
+    num_processes: Optional[int], max_restarts: Optional[int] = None
+) -> int:
     """Starts multiple worker processes.

     If ``num_processes`` is None or <= 0, we detect the number of cores
@@ -57,15 +110,79 @@ def fork_processes(num_processes: Optional[int], max_restarts: Optional[int

     Availability: Unix
     """
-    pass
-
-
-def task_id() ->Optional[int]:
+    if sys.platform == "win32":
+        # The exact form of this condition matters to mypy; it understands
+        # if but not assert in this context.
+        raise Exception("fork not available on windows")
+    if max_restarts is None:
+        max_restarts = 100
+
+    global _task_id
+    assert _task_id is None
+    if num_processes is None or num_processes <= 0:
+        num_processes = cpu_count()
+    gen_log.info("Starting %d processes", num_processes)
+    children = {}
+
+    def start_child(i: int) -> Optional[int]:
+        pid = os.fork()
+        if pid == 0:
+            # child process
+            _reseed_random()
+            global _task_id
+            _task_id = i
+            return i
+        else:
+            children[pid] = i
+            return None
+
+    for i in range(num_processes):
+        id = start_child(i)
+        if id is not None:
+            return id
+    num_restarts = 0
+    while children:
+        pid, status = os.wait()
+        if pid not in children:
+            continue
+        id = children.pop(pid)
+        if os.WIFSIGNALED(status):
+            gen_log.warning(
+                "child %d (pid %d) killed by signal %d, restarting",
+                id,
+                pid,
+                os.WTERMSIG(status),
+            )
+        elif os.WEXITSTATUS(status) != 0:
+            gen_log.warning(
+                "child %d (pid %d) exited with status %d, restarting",
+                id,
+                pid,
+                os.WEXITSTATUS(status),
+            )
+        else:
+            gen_log.info("child %d (pid %d) exited normally", id, pid)
+            continue
+        num_restarts += 1
+        if num_restarts > max_restarts:
+            raise RuntimeError("Too many child restarts, giving up")
+        new_id = start_child(id)
+        if new_id is not None:
+            return new_id
+    # All child processes exited cleanly, so exit the master process
+    # instead of just returning to right after the call to
+    # fork_processes (which will probably just start up another IOLoop
+    # unless the caller checks the return value).
+    sys.exit(0)
+
+
+def task_id() -> Optional[int]:
     """Returns the current task id, if any.

     Returns None if this process was not created by `fork_processes`.
     """
-    pass
+    global _task_id
+    return _task_id


 class Subprocess(object):
@@ -89,29 +206,33 @@ class Subprocess(object):
        The ``io_loop`` argument (deprecated since version 4.1) has been removed.

     """
+
     STREAM = object()
+
     _initialized = False
-    _waiting = {}
+    _waiting = {}  # type: ignore

-    def __init__(self, *args: Any, **kwargs: Any) ->None:
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
         self.io_loop = ioloop.IOLoop.current()
-        pipe_fds = []
-        to_close = []
-        if kwargs.get('stdin') is Subprocess.STREAM:
+        # All FDs we create should be closed on error; those in to_close
+        # should be closed in the parent process on success.
+        pipe_fds = []  # type: List[int]
+        to_close = []  # type: List[int]
+        if kwargs.get("stdin") is Subprocess.STREAM:
             in_r, in_w = os.pipe()
-            kwargs['stdin'] = in_r
+            kwargs["stdin"] = in_r
             pipe_fds.extend((in_r, in_w))
             to_close.append(in_r)
             self.stdin = PipeIOStream(in_w)
-        if kwargs.get('stdout') is Subprocess.STREAM:
+        if kwargs.get("stdout") is Subprocess.STREAM:
             out_r, out_w = os.pipe()
-            kwargs['stdout'] = out_w
+            kwargs["stdout"] = out_w
             pipe_fds.extend((out_r, out_w))
             to_close.append(out_w)
             self.stdout = PipeIOStream(out_r)
-        if kwargs.get('stderr') is Subprocess.STREAM:
+        if kwargs.get("stderr") is Subprocess.STREAM:
             err_r, err_w = os.pipe()
-            kwargs['stderr'] = err_w
+            kwargs["stderr"] = err_w
             pipe_fds.extend((err_r, err_w))
             to_close.append(err_w)
             self.stderr = PipeIOStream(err_r)
@@ -124,13 +245,13 @@ class Subprocess(object):
         for fd in to_close:
             os.close(fd)
         self.pid = self.proc.pid
-        for attr in ['stdin', 'stdout', 'stderr']:
-            if not hasattr(self, attr):
+        for attr in ["stdin", "stdout", "stderr"]:
+            if not hasattr(self, attr):  # don't clobber streams set above
                 setattr(self, attr, getattr(self.proc, attr))
-        self._exit_callback = None
-        self.returncode = None
+        self._exit_callback = None  # type: Optional[Callable[[int], None]]
+        self.returncode = None  # type: Optional[int]

-    def set_exit_callback(self, callback: Callable[[int], None]) ->None:
+    def set_exit_callback(self, callback: Callable[[int], None]) -> None:
         """Runs ``callback`` when this process exits.

         The callback takes one argument, the return code of the process.
@@ -147,9 +268,12 @@ class Subprocess(object):

         Availability: Unix
         """
-        pass
+        self._exit_callback = callback
+        Subprocess.initialize()
+        Subprocess._waiting[self.pid] = self
+        Subprocess._try_cleanup_process(self.pid)

-    def wait_for_exit(self, raise_error: bool=True) ->'Future[int]':
+    def wait_for_exit(self, raise_error: bool = True) -> "Future[int]":
         """Returns a `.Future` which resolves when the process exits.

         Usage::
@@ -167,10 +291,22 @@ class Subprocess(object):

         Availability: Unix
         """
-        pass
+        future = Future()  # type: Future[int]
+
+        def callback(ret: int) -> None:
+            if ret != 0 and raise_error:
+                # Unfortunately we don't have the original args any more.
+                future_set_exception_unless_cancelled(
+                    future, CalledProcessError(ret, "unknown")
+                )
+            else:
+                future_set_result_unless_cancelled(future, ret)
+
+        self.set_exit_callback(callback)
+        return future

     @classmethod
-    def initialize(cls) ->None:
+    def initialize(cls) -> None:
         """Initializes the ``SIGCHLD`` handler.

         The signal handler is run on an `.IOLoop` to avoid locking issues.
@@ -184,9 +320,52 @@ class Subprocess(object):

         Availability: Unix
         """
-        pass
+        if cls._initialized:
+            return
+        loop = asyncio.get_event_loop()
+        loop.add_signal_handler(signal.SIGCHLD, cls._cleanup)
+        cls._initialized = True

     @classmethod
-    def uninitialize(cls) ->None:
+    def uninitialize(cls) -> None:
         """Removes the ``SIGCHLD`` handler."""
-        pass
+        if not cls._initialized:
+            return
+        loop = asyncio.get_event_loop()
+        loop.remove_signal_handler(signal.SIGCHLD)
+        cls._initialized = False
+
+    @classmethod
+    def _cleanup(cls) -> None:
+        for pid in list(cls._waiting.keys()):  # make a copy
+            cls._try_cleanup_process(pid)
+
+    @classmethod
+    def _try_cleanup_process(cls, pid: int) -> None:
+        try:
+            ret_pid, status = os.waitpid(pid, os.WNOHANG)  # type: ignore
+        except ChildProcessError:
+            return
+        if ret_pid == 0:
+            return
+        assert ret_pid == pid
+        subproc = cls._waiting.pop(pid)
+        subproc.io_loop.add_callback(subproc._set_returncode, status)
+
+    def _set_returncode(self, status: int) -> None:
+        if sys.platform == "win32":
+            self.returncode = -1
+        else:
+            if os.WIFSIGNALED(status):
+                self.returncode = -os.WTERMSIG(status)
+            else:
+                assert os.WIFEXITED(status)
+                self.returncode = os.WEXITSTATUS(status)
+        # We've taken over wait() duty from the subprocess.Popen
+        # object. If we don't inform it of the process's return code,
+        # it will log a warning at destruction in python 3.6+.
+        self.proc.returncode = self.returncode
+        if self._exit_callback:
+            callback = self._exit_callback
+            self._exit_callback = None
+            callback(self.returncode)
diff --git a/tornado/queues.py b/tornado/queues.py
index fde4734a..1358d0ec 100644
--- a/tornado/queues.py
+++ b/tornado/queues.py
@@ -1,3 +1,17 @@
+# Copyright 2015 The Tornado Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Asynchronous queues for coroutines. These classes are very similar
 to those provided in the standard library's `asyncio package
 <https://docs.python.org/3/library/asyncio-queue.html>`_.
@@ -10,36 +24,57 @@ to those provided in the standard library's `asyncio package
    before calling any queue methods.

 """
+
 import collections
 import datetime
 import heapq
+
 from tornado import gen, ioloop
 from tornado.concurrent import Future, future_set_result_unless_cancelled
 from tornado.locks import Event
+
 from typing import Union, TypeVar, Generic, Awaitable, Optional
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Deque, Tuple, Any
-_T = TypeVar('_T')
-__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty']
+    from typing import Deque, Tuple, Any  # noqa: F401
+
+_T = TypeVar("_T")
+
+__all__ = ["Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty"]


 class QueueEmpty(Exception):
     """Raised by `.Queue.get_nowait` when the queue has no items."""
+
     pass


 class QueueFull(Exception):
     """Raised by `.Queue.put_nowait` when a queue is at its maximum size."""
+
     pass


-class _QueueIterator(Generic[_T]):
+def _set_timeout(
+    future: Future, timeout: Union[None, float, datetime.timedelta]
+) -> None:
+    if timeout:

-    def __init__(self, q: 'Queue[_T]') ->None:
+        def on_timeout() -> None:
+            if not future.done():
+                future.set_exception(gen.TimeoutError())
+
+        io_loop = ioloop.IOLoop.current()
+        timeout_handle = io_loop.add_timeout(timeout, on_timeout)
+        future.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle))
+
+
+class _QueueIterator(Generic[_T]):
+    def __init__(self, q: "Queue[_T]") -> None:
         self.q = q

-    def __anext__(self) ->Awaitable[_T]:
+    def __anext__(self) -> Awaitable[_T]:
         return self.q.get()


@@ -110,32 +145,47 @@ class Queue(Generic[_T]):
        Added ``async for`` support in Python 3.5.

     """
-    _queue = None

-    def __init__(self, maxsize: int=0) ->None:
+    # Exact type depends on subclass. Could be another generic
+    # parameter and use protocols to be more precise here.
+    _queue = None  # type: Any
+
+    def __init__(self, maxsize: int = 0) -> None:
         if maxsize is None:
             raise TypeError("maxsize can't be None")
+
         if maxsize < 0:
             raise ValueError("maxsize can't be negative")
+
         self._maxsize = maxsize
         self._init()
-        self._getters = collections.deque([])
-        self._putters = collections.deque([])
+        self._getters = collections.deque([])  # type: Deque[Future[_T]]
+        self._putters = collections.deque([])  # type: Deque[Tuple[_T, Future[None]]]
         self._unfinished_tasks = 0
         self._finished = Event()
         self._finished.set()

     @property
-    def maxsize(self) ->int:
+    def maxsize(self) -> int:
         """Number of items allowed in the queue."""
-        pass
+        return self._maxsize

-    def qsize(self) ->int:
+    def qsize(self) -> int:
         """Number of items in the queue."""
-        pass
+        return len(self._queue)
+
+    def empty(self) -> bool:
+        return not self._queue
+
+    def full(self) -> bool:
+        if self.maxsize == 0:
+            return False
+        else:
+            return self.qsize() >= self.maxsize

-    def put(self, item: _T, timeout: Optional[Union[float, datetime.
-        timedelta]]=None) ->'Future[None]':
+    def put(
+        self, item: _T, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> "Future[None]":
         """Put an item into the queue, perhaps waiting until there is room.

         Returns a Future, which raises `tornado.util.TimeoutError` after a
@@ -146,17 +196,35 @@ class Queue(Generic[_T]):
         `datetime.timedelta` object for a deadline relative to the
         current time.
         """
-        pass
-
-    def put_nowait(self, item: _T) ->None:
+        future = Future()  # type: Future[None]
+        try:
+            self.put_nowait(item)
+        except QueueFull:
+            self._putters.append((item, future))
+            _set_timeout(future, timeout)
+        else:
+            future.set_result(None)
+        return future
+
+    def put_nowait(self, item: _T) -> None:
         """Put an item into the queue without blocking.

         If no free slot is immediately available, raise `QueueFull`.
         """
-        pass
-
-    def get(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[_T]:
+        self._consume_expired()
+        if self._getters:
+            assert self.empty(), "queue non-empty, why are getters waiting?"
+            getter = self._getters.popleft()
+            self.__put_internal(item)
+            future_set_result_unless_cancelled(getter, self._get())
+        elif self.full():
+            raise QueueFull
+        else:
+            self.__put_internal(item)
+
+    def get(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[_T]:
         """Remove and return an item from the queue.

         Returns an awaitable which resolves once an item is available, or raises
@@ -177,17 +245,33 @@ class Queue(Generic[_T]):
            with other timeouts in Tornado).

         """
-        pass
-
-    def get_nowait(self) ->_T:
+        future = Future()  # type: Future[_T]
+        try:
+            future.set_result(self.get_nowait())
+        except QueueEmpty:
+            self._getters.append(future)
+            _set_timeout(future, timeout)
+        return future
+
+    def get_nowait(self) -> _T:
         """Remove and return an item from the queue without blocking.

         Return an item if one is immediately available, else raise
         `QueueEmpty`.
         """
-        pass
-
-    def task_done(self) ->None:
+        self._consume_expired()
+        if self._putters:
+            assert self.full(), "queue not full, why are putters waiting?"
+            item, putter = self._putters.popleft()
+            self.__put_internal(item)
+            future_set_result_unless_cancelled(putter, None)
+            return self._get()
+        elif self.qsize():
+            return self._get()
+        else:
+            raise QueueEmpty
+
+    def task_done(self) -> None:
         """Indicate that a formerly enqueued task is complete.

         Used by queue consumers. For each `.get` used to fetch a task, a
@@ -199,26 +283,67 @@ class Queue(Generic[_T]):

         Raises `ValueError` if called more times than `.put`.
         """
-        pass
-
-    def join(self, timeout: Optional[Union[float, datetime.timedelta]]=None
-        ) ->Awaitable[None]:
+        if self._unfinished_tasks <= 0:
+            raise ValueError("task_done() called too many times")
+        self._unfinished_tasks -= 1
+        if self._unfinished_tasks == 0:
+            self._finished.set()
+
+    def join(
+        self, timeout: Optional[Union[float, datetime.timedelta]] = None
+    ) -> Awaitable[None]:
         """Block until all items in the queue are processed.

         Returns an awaitable, which raises `tornado.util.TimeoutError` after a
         timeout.
         """
-        pass
+        return self._finished.wait(timeout)

-    def __aiter__(self) ->_QueueIterator[_T]:
+    def __aiter__(self) -> _QueueIterator[_T]:
         return _QueueIterator(self)

-    def __repr__(self) ->str:
-        return '<%s at %s %s>' % (type(self).__name__, hex(id(self)), self.
-            _format())
+    # These three are overridable in subclasses.
+    def _init(self) -> None:
+        self._queue = collections.deque()
+
+    def _get(self) -> _T:
+        return self._queue.popleft()
+
+    def _put(self, item: _T) -> None:
+        self._queue.append(item)
+
+    # End of the overridable methods.
+
+    def __put_internal(self, item: _T) -> None:
+        self._unfinished_tasks += 1
+        self._finished.clear()
+        self._put(item)
+
+    def _consume_expired(self) -> None:
+        # Remove timed-out waiters.
+        while self._putters and self._putters[0][1].done():
+            self._putters.popleft()

-    def __str__(self) ->str:
-        return '<%s %s>' % (type(self).__name__, self._format())
+        while self._getters and self._getters[0].done():
+            self._getters.popleft()
+
+    def __repr__(self) -> str:
+        return "<%s at %s %s>" % (type(self).__name__, hex(id(self)), self._format())
+
+    def __str__(self) -> str:
+        return "<%s %s>" % (type(self).__name__, self._format())
+
+    def _format(self) -> str:
+        result = "maxsize=%r" % (self.maxsize,)
+        if getattr(self, "_queue", None):
+            result += " queue=%r" % self._queue
+        if self._getters:
+            result += " getters[%s]" % len(self._getters)
+        if self._putters:
+            result += " putters[%s]" % len(self._putters)
+        if self._unfinished_tasks:
+            result += " tasks=%s" % self._unfinished_tasks
+        return result


 class PriorityQueue(Queue):
@@ -250,6 +375,15 @@ class PriorityQueue(Queue):
         (10, 'low-priority item')
     """

+    def _init(self) -> None:
+        self._queue = []
+
+    def _put(self, item: _T) -> None:
+        heapq.heappush(self._queue, item)
+
+    def _get(self) -> _T:  # type: ignore[type-var]
+        return heapq.heappop(self._queue)
+

 class LifoQueue(Queue):
     """A `.Queue` that retrieves the most recently put items first.
@@ -277,3 +411,12 @@ class LifoQueue(Queue):
         2
         3
     """
+
+    def _init(self) -> None:
+        self._queue = []
+
+    def _put(self, item: _T) -> None:
+        self._queue.append(item)
+
+    def _get(self) -> _T:  # type: ignore[type-var]
+        return self._queue.pop()
diff --git a/tornado/routing.py b/tornado/routing.py
index 111cbaaa..a145d719 100644
--- a/tornado/routing.py
+++ b/tornado/routing.py
@@ -1,3 +1,17 @@
+# Copyright 2015 The Tornado Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """Flexible routing implementation.

 Tornado routes HTTP requests to appropriate handlers using `Router`
@@ -160,21 +174,25 @@ For more information on application-level routing see docs for `~.web.Applicatio
 .. versionadded:: 4.5

 """
+
 import re
 from functools import partial
+
 from tornado import httputil
 from tornado.httpserver import _CallableAdapter
 from tornado.escape import url_escape, url_unescape, utf8
 from tornado.log import app_log
 from tornado.util import basestring_type, import_object, re_unescape, unicode_type
+
 from typing import Any, Union, Optional, Awaitable, List, Dict, Pattern, Tuple, overload


 class Router(httputil.HTTPServerConnectionDelegate):
     """Abstract router interface."""

-    def find_handler(self, request: httputil.HTTPServerRequest, **kwargs: Any
-        ) ->Optional[httputil.HTTPMessageDelegate]:
+    def find_handler(
+        self, request: httputil.HTTPServerRequest, **kwargs: Any
+    ) -> Optional[httputil.HTTPMessageDelegate]:
         """Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate`
         that can serve the request.
         Routing implementations may pass additional kwargs to extend the routing logic.
@@ -184,7 +202,12 @@ class Router(httputil.HTTPServerConnectionDelegate):
         :returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to
             process the request.
         """
-        pass
+        raise NotImplementedError()
+
+    def start_request(
+        self, server_conn: object, request_conn: httputil.HTTPConnection
+    ) -> httputil.HTTPMessageDelegate:
+        return _RoutingDelegate(self, server_conn, request_conn)


 class ReversibleRouter(Router):
@@ -192,7 +215,7 @@ class ReversibleRouter(Router):
     and support reversing them to original urls.
     """

-    def reverse_url(self, name: str, *args: Any) ->Optional[str]:
+    def reverse_url(self, name: str, *args: Any) -> Optional[str]:
         """Returns url string for a given route name and arguments
         or ``None`` if no match is found.

@@ -200,34 +223,84 @@ class ReversibleRouter(Router):
         :arg args: url parameters.
         :returns: parametrized url string for a given route name (or ``None``).
         """
-        pass
+        raise NotImplementedError()


 class _RoutingDelegate(httputil.HTTPMessageDelegate):
-
-    def __init__(self, router: Router, server_conn: object, request_conn:
-        httputil.HTTPConnection) ->None:
+    def __init__(
+        self, router: Router, server_conn: object, request_conn: httputil.HTTPConnection
+    ) -> None:
         self.server_conn = server_conn
         self.request_conn = request_conn
-        self.delegate = None
-        self.router = router
+        self.delegate = None  # type: Optional[httputil.HTTPMessageDelegate]
+        self.router = router  # type: Router
+
+    def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
+        assert isinstance(start_line, httputil.RequestStartLine)
+        request = httputil.HTTPServerRequest(
+            connection=self.request_conn,
+            server_connection=self.server_conn,
+            start_line=start_line,
+            headers=headers,
+        )
+
+        self.delegate = self.router.find_handler(request)
+        if self.delegate is None:
+            app_log.debug(
+                "Delegate for %s %s request not found",
+                start_line.method,
+                start_line.path,
+            )
+            self.delegate = _DefaultMessageDelegate(self.request_conn)
+
+        return self.delegate.headers_received(start_line, headers)
+
+    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
+        assert self.delegate is not None
+        return self.delegate.data_received(chunk)
+
+    def finish(self) -> None:
+        assert self.delegate is not None
+        self.delegate.finish()
+
+    def on_connection_close(self) -> None:
+        assert self.delegate is not None
+        self.delegate.on_connection_close()


 class _DefaultMessageDelegate(httputil.HTTPMessageDelegate):
-
-    def __init__(self, connection: httputil.HTTPConnection) ->None:
+    def __init__(self, connection: httputil.HTTPConnection) -> None:
         self.connection = connection

+    def finish(self) -> None:
+        self.connection.write_headers(
+            httputil.ResponseStartLine("HTTP/1.1", 404, "Not Found"),
+            httputil.HTTPHeaders(),
+        )
+        self.connection.finish()
+

-_RuleList = List[Union['Rule', List[Any], Tuple[Union[str, 'Matcher'], Any],
-    Tuple[Union[str, 'Matcher'], Any, Dict[str, Any]], Tuple[Union[str,
-    'Matcher'], Any, Dict[str, Any], str]]]
+# _RuleList can either contain pre-constructed Rules or a sequence of
+# arguments to be passed to the Rule constructor.
+_RuleList = List[
+    Union[
+        "Rule",
+        List[Any],  # Can't do detailed typechecking of lists.
+        Tuple[Union[str, "Matcher"], Any],
+        Tuple[Union[str, "Matcher"], Any, Dict[str, Any]],
+        Tuple[Union[str, "Matcher"], Any, Dict[str, Any], str],
+    ]
+]


 class RuleRouter(Router):
     """Rule-based router implementation."""

-    def __init__(self, rules: Optional[_RuleList]=None) ->None:
+    def __init__(self, rules: Optional[_RuleList] = None) -> None:
         """Constructs a router from an ordered list of rules::

             RuleRouter([
@@ -254,29 +327,55 @@ class RuleRouter(Router):
         :arg rules: a list of `Rule` instances or tuples of `Rule`
             constructor arguments.
         """
-        self.rules = []
+        self.rules = []  # type: List[Rule]
         if rules:
             self.add_rules(rules)

-    def add_rules(self, rules: _RuleList) ->None:
+    def add_rules(self, rules: _RuleList) -> None:
         """Appends new rules to the router.

         :arg rules: a list of Rule instances (or tuples of arguments, which are
             passed to Rule constructor).
         """
-        pass
+        for rule in rules:
+            if isinstance(rule, (tuple, list)):
+                assert len(rule) in (2, 3, 4)
+                if isinstance(rule[0], basestring_type):
+                    rule = Rule(PathMatches(rule[0]), *rule[1:])
+                else:
+                    rule = Rule(*rule)

-    def process_rule(self, rule: 'Rule') ->'Rule':
+            self.rules.append(self.process_rule(rule))
+
+    def process_rule(self, rule: "Rule") -> "Rule":
         """Override this method for additional preprocessing of each rule.

         :arg Rule rule: a rule to be processed.
         :returns: the same or modified Rule instance.
         """
-        pass
+        return rule
+
+    def find_handler(
+        self, request: httputil.HTTPServerRequest, **kwargs: Any
+    ) -> Optional[httputil.HTTPMessageDelegate]:
+        for rule in self.rules:
+            target_params = rule.matcher.match(request)
+            if target_params is not None:
+                if rule.target_kwargs:
+                    target_params["target_kwargs"] = rule.target_kwargs

-    def get_target_delegate(self, target: Any, request: httputil.
-        HTTPServerRequest, **target_params: Any) ->Optional[httputil.
-        HTTPMessageDelegate]:
+                delegate = self.get_target_delegate(
+                    rule.target, request, **target_params
+                )
+
+                if delegate is not None:
+                    return delegate
+
+        return None
+
+    def get_target_delegate(
+        self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
+    ) -> Optional[httputil.HTTPMessageDelegate]:
         """Returns an instance of `~.httputil.HTTPMessageDelegate` for a
         Rule's target. This method is called by `~.find_handler` and can be
         extended to provide additional target types.
@@ -286,7 +385,20 @@ class RuleRouter(Router):
         :arg target_params: additional parameters that can be useful
             for `~.httputil.HTTPMessageDelegate` creation.
         """
-        pass
+        if isinstance(target, Router):
+            return target.find_handler(request, **target_params)
+
+        elif isinstance(target, httputil.HTTPServerConnectionDelegate):
+            assert request.connection is not None
+            return target.start_request(request.server_connection, request.connection)
+
+        elif callable(target):
+            assert request.connection is not None
+            return _CallableAdapter(
+                partial(target, **target_params), request.connection
+            )
+
+        return None


 class ReversibleRuleRouter(ReversibleRouter, RuleRouter):
@@ -297,16 +409,45 @@ class ReversibleRuleRouter(ReversibleRouter, RuleRouter):
     in a rule's matcher (see `Matcher.reverse`).
     """

-    def __init__(self, rules: Optional[_RuleList]=None) ->None:
-        self.named_rules = {}
+    def __init__(self, rules: Optional[_RuleList] = None) -> None:
+        self.named_rules = {}  # type: Dict[str, Any]
         super().__init__(rules)

+    def process_rule(self, rule: "Rule") -> "Rule":
+        rule = super().process_rule(rule)
+
+        if rule.name:
+            if rule.name in self.named_rules:
+                app_log.warning(
+                    "Multiple handlers named %s; replacing previous value", rule.name
+                )
+            self.named_rules[rule.name] = rule
+
+        return rule
+
+    def reverse_url(self, name: str, *args: Any) -> Optional[str]:
+        if name in self.named_rules:
+            return self.named_rules[name].matcher.reverse(*args)
+
+        for rule in self.rules:
+            if isinstance(rule.target, ReversibleRouter):
+                reversed_url = rule.target.reverse_url(name, *args)
+                if reversed_url is not None:
+                    return reversed_url
+
+        return None
+

 class Rule(object):
     """A routing rule."""

-    def __init__(self, matcher: 'Matcher', target: Any, target_kwargs:
-        Optional[Dict[str, Any]]=None, name: Optional[str]=None) ->None:
+    def __init__(
+        self,
+        matcher: "Matcher",
+        target: Any,
+        target_kwargs: Optional[Dict[str, Any]] = None,
+        name: Optional[str] = None,
+    ) -> None:
         """Constructs a Rule instance.

         :arg Matcher matcher: a `Matcher` instance used for determining
@@ -324,22 +465,32 @@ class Rule(object):
             in `ReversibleRouter.reverse_url` implementation.
         """
         if isinstance(target, str):
+            # import the Module and instantiate the class
+            # Must be a fully qualified name (module.ClassName)
             target = import_object(target)
-        self.matcher = matcher
+
+        self.matcher = matcher  # type: Matcher
         self.target = target
         self.target_kwargs = target_kwargs if target_kwargs else {}
         self.name = name

-    def __repr__(self) ->str:
-        return '%s(%r, %s, kwargs=%r, name=%r)' % (self.__class__.__name__,
-            self.matcher, self.target, self.target_kwargs, self.name)
+    def reverse(self, *args: Any) -> Optional[str]:
+        return self.matcher.reverse(*args)
+
+    def __repr__(self) -> str:
+        return "%s(%r, %s, kwargs=%r, name=%r)" % (
+            self.__class__.__name__,
+            self.matcher,
+            self.target,
+            self.target_kwargs,
+            self.name,
+        )


 class Matcher(object):
     """Represents a matcher for request features."""

-    def match(self, request: httputil.HTTPServerRequest) ->Optional[Dict[
-        str, Any]]:
+    def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
         """Matches current instance against the request.

         :arg httputil.HTTPServerRequest request: current HTTP request
@@ -349,60 +500,150 @@ class Matcher(object):
             An empty dict is a valid (and common) return value to indicate a match
             when the argument-passing features are not used.
             ``None`` must be returned to indicate that there is no match."""
-        pass
+        raise NotImplementedError()

-    def reverse(self, *args: Any) ->Optional[str]:
+    def reverse(self, *args: Any) -> Optional[str]:
         """Reconstructs full url from matcher instance and additional arguments."""
-        pass
+        return None


 class AnyMatches(Matcher):
     """Matches any request."""

+    def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
+        return {}
+

 class HostMatches(Matcher):
     """Matches requests from hosts specified by ``host_pattern`` regex."""

-    def __init__(self, host_pattern: Union[str, Pattern]) ->None:
+    def __init__(self, host_pattern: Union[str, Pattern]) -> None:
         if isinstance(host_pattern, basestring_type):
-            if not host_pattern.endswith('$'):
-                host_pattern += '$'
+            if not host_pattern.endswith("$"):
+                host_pattern += "$"
             self.host_pattern = re.compile(host_pattern)
         else:
             self.host_pattern = host_pattern

+    def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
+        if self.host_pattern.match(request.host_name):
+            return {}
+
+        return None
+

 class DefaultHostMatches(Matcher):
     """Matches requests from host that is equal to application's default_host.
     Always returns no match if ``X-Real-Ip`` header is present.
     """

-    def __init__(self, application: Any, host_pattern: Pattern) ->None:
+    def __init__(self, application: Any, host_pattern: Pattern) -> None:
         self.application = application
         self.host_pattern = host_pattern

+    def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
+        # Look for default host if not behind load balancer (for debugging)
+        if "X-Real-Ip" not in request.headers:
+            if self.host_pattern.match(self.application.default_host):
+                return {}
+        return None
+

 class PathMatches(Matcher):
     """Matches requests with paths specified by ``path_pattern`` regex."""

-    def __init__(self, path_pattern: Union[str, Pattern]) ->None:
+    def __init__(self, path_pattern: Union[str, Pattern]) -> None:
         if isinstance(path_pattern, basestring_type):
-            if not path_pattern.endswith('$'):
-                path_pattern += '$'
+            if not path_pattern.endswith("$"):
+                path_pattern += "$"
             self.regex = re.compile(path_pattern)
         else:
             self.regex = path_pattern
-        assert len(self.regex.groupindex) in (0, self.regex.groups
-            ), 'groups in url regexes must either be all named or all positional: %r' % self.regex.pattern
+
+        assert len(self.regex.groupindex) in (0, self.regex.groups), (
+            "groups in url regexes must either be all named or all "
+            "positional: %r" % self.regex.pattern
+        )
+
         self._path, self._group_count = self._find_groups()

-    def _find_groups(self) ->Tuple[Optional[str], Optional[int]]:
+    def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
+        match = self.regex.match(request.path)
+        if match is None:
+            return None
+        if not self.regex.groups:
+            return {}
+
+        path_args = []  # type: List[bytes]
+        path_kwargs = {}  # type: Dict[str, bytes]
+
+        # Pass matched groups to the handler.  Since
+        # match.groups() includes both named and
+        # unnamed groups, we want to use either groups
+        # or groupdict but not both.
+        if self.regex.groupindex:
+            path_kwargs = dict(
+                (str(k), _unquote_or_none(v)) for (k, v) in match.groupdict().items()
+            )
+        else:
+            path_args = [_unquote_or_none(s) for s in match.groups()]
+
+        return dict(path_args=path_args, path_kwargs=path_kwargs)
+
+    def reverse(self, *args: Any) -> Optional[str]:
+        if self._path is None:
+            raise ValueError("Cannot reverse url regex " + self.regex.pattern)
+        assert len(args) == self._group_count, (
+            "required number of arguments " "not found"
+        )
+        if not len(args):
+            return self._path
+        converted_args = []
+        for a in args:
+            if not isinstance(a, (unicode_type, bytes)):
+                a = str(a)
+            converted_args.append(url_escape(utf8(a), plus=False))
+        return self._path % tuple(converted_args)
+
+    def _find_groups(self) -> Tuple[Optional[str], Optional[int]]:
         """Returns a tuple (reverse string, group count) for a url.

         For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
         would return ('/%s/%s/', 2).
         """
-        pass
+        pattern = self.regex.pattern
+        if pattern.startswith("^"):
+            pattern = pattern[1:]
+        if pattern.endswith("$"):
+            pattern = pattern[:-1]
+
+        if self.regex.groups != pattern.count("("):
+            # The pattern is too complicated for our simplistic matching,
+            # so we can't support reversing it.
+            return None, None
+
+        pieces = []
+        for fragment in pattern.split("("):
+            if ")" in fragment:
+                paren_loc = fragment.index(")")
+                if paren_loc >= 0:
+                    try:
+                        unescaped_fragment = re_unescape(fragment[paren_loc + 1 :])
+                    except ValueError:
+                        # If we can't unescape part of it, we can't
+                        # reverse this url.
+                        return (None, None)
+                    pieces.append("%s" + unescaped_fragment)
+            else:
+                try:
+                    unescaped_fragment = re_unescape(fragment)
+                except ValueError:
+                    # If we can't unescape part of it, we can't
+                    # reverse this url.
+                    return (None, None)
+                pieces.append(unescaped_fragment)
+
+        return "".join(pieces), self.regex.groups


 class URLSpec(Rule):
@@ -413,8 +654,13 @@ class URLSpec(Rule):
        backwards compatibility.
     """

-    def __init__(self, pattern: Union[str, Pattern], handler: Any, kwargs:
-        Optional[Dict[str, Any]]=None, name: Optional[str]=None) ->None:
+    def __init__(
+        self,
+        pattern: Union[str, Pattern],
+        handler: Any,
+        kwargs: Optional[Dict[str, Any]] = None,
+        name: Optional[str] = None,
+    ) -> None:
         """Parameters:

         * ``pattern``: Regular expression to be matched. Any capturing
@@ -434,20 +680,38 @@ class URLSpec(Rule):
         """
         matcher = PathMatches(pattern)
         super().__init__(matcher, handler, kwargs, name)
+
         self.regex = matcher.regex
         self.handler_class = self.target
         self.kwargs = kwargs

-    def __repr__(self) ->str:
-        return '%s(%r, %s, kwargs=%r, name=%r)' % (self.__class__.__name__,
-            self.regex.pattern, self.handler_class, self.kwargs, self.name)
+    def __repr__(self) -> str:
+        return "%s(%r, %s, kwargs=%r, name=%r)" % (
+            self.__class__.__name__,
+            self.regex.pattern,
+            self.handler_class,
+            self.kwargs,
+            self.name,
+        )
+
+
+@overload
+def _unquote_or_none(s: str) -> bytes:
+    pass


-def _unquote_or_none(s: Optional[str]) ->Optional[bytes]:
+@overload  # noqa: F811
+def _unquote_or_none(s: None) -> None:
+    pass
+
+
+def _unquote_or_none(s: Optional[str]) -> Optional[bytes]:  # noqa: F811
     """None-safe wrapper around url_unescape to handle unmatched optional
     groups correctly.

     Note that args are passed as bytes so the handler can decide what
     encoding to use.
     """
-    pass
+    if s is None:
+        return s
+    return url_unescape(s, encoding=None, plus=False)
diff --git a/tornado/simple_httpclient.py b/tornado/simple_httpclient.py
index 5ce89b18..5b2d4dcd 100644
--- a/tornado/simple_httpclient.py
+++ b/tornado/simple_httpclient.py
@@ -1,13 +1,26 @@
 from tornado.escape import _unicode
 from tornado import gen, version
-from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy, HTTPRequest
+from tornado.httpclient import (
+    HTTPResponse,
+    HTTPError,
+    AsyncHTTPClient,
+    main,
+    _RequestProxy,
+    HTTPRequest,
+)
 from tornado import httputil
 from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters
 from tornado.ioloop import IOLoop
 from tornado.iostream import StreamClosedError, IOStream
-from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults, is_valid_ip
+from tornado.netutil import (
+    Resolver,
+    OverrideResolver,
+    _client_ssl_defaults,
+    is_valid_ip,
+)
 from tornado.log import gen_log
 from tornado.tcpclient import TCPClient
+
 import base64
 import collections
 import copy
@@ -19,11 +32,13 @@ import sys
 import time
 from io import BytesIO
 import urllib.parse
+
 from typing import Dict, Any, Callable, Optional, Type, Union
 from types import TracebackType
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Deque, Tuple, List
+    from typing import Deque, Tuple, List  # noqa: F401


 class HTTPTimeoutError(HTTPError):
@@ -35,11 +50,11 @@ class HTTPTimeoutError(HTTPError):
     .. versionadded:: 5.1
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         super().__init__(599, message=message)

-    def __str__(self) ->str:
-        return self.message or 'Timeout'
+    def __str__(self) -> str:
+        return self.message or "Timeout"


 class HTTPStreamClosedError(HTTPError):
@@ -54,11 +69,11 @@ class HTTPStreamClosedError(HTTPError):
     .. versionadded:: 5.1
     """

-    def __init__(self, message: str) ->None:
+    def __init__(self, message: str) -> None:
         super().__init__(599, message=message)

-    def __str__(self) ->str:
-        return self.message or 'Stream closed'
+    def __str__(self) -> str:
+        return self.message or "Stream closed"


 class SimpleAsyncHTTPClient(AsyncHTTPClient):
@@ -100,7 +115,119 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
         Added the ``max_body_size`` argument.
     """

-    def _on_timeout(self, key: object, info: Optional[str]=None) ->None:
+    def initialize(  # type: ignore
+        self,
+        max_clients: int = 10,
+        hostname_mapping: Optional[Dict[str, str]] = None,
+        max_buffer_size: int = 104857600,
+        resolver: Optional[Resolver] = None,
+        defaults: Optional[Dict[str, Any]] = None,
+        max_header_size: Optional[int] = None,
+        max_body_size: Optional[int] = None,
+    ) -> None:
+        super().initialize(defaults=defaults)
+        self.max_clients = max_clients
+        self.queue = (
+            collections.deque()
+        )  # type: Deque[Tuple[object, HTTPRequest, Callable[[HTTPResponse], None]]]
+        self.active = (
+            {}
+        )  # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None]]]
+        self.waiting = (
+            {}
+        )  # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None], object]]
+        self.max_buffer_size = max_buffer_size
+        self.max_header_size = max_header_size
+        self.max_body_size = max_body_size
+        # TCPClient could create a Resolver for us, but we have to do it
+        # ourselves to support hostname_mapping.
+        if resolver:
+            self.resolver = resolver
+            self.own_resolver = False
+        else:
+            self.resolver = Resolver()
+            self.own_resolver = True
+        if hostname_mapping is not None:
+            self.resolver = OverrideResolver(
+                resolver=self.resolver, mapping=hostname_mapping
+            )
+        self.tcp_client = TCPClient(resolver=self.resolver)
+
+    def close(self) -> None:
+        super().close()
+        if self.own_resolver:
+            self.resolver.close()
+        self.tcp_client.close()
+
+    def fetch_impl(
+        self, request: HTTPRequest, callback: Callable[[HTTPResponse], None]
+    ) -> None:
+        key = object()
+        self.queue.append((key, request, callback))
+        assert request.connect_timeout is not None
+        assert request.request_timeout is not None
+        timeout_handle = None
+        if len(self.active) >= self.max_clients:
+            timeout = (
+                min(request.connect_timeout, request.request_timeout)
+                or request.connect_timeout
+                or request.request_timeout
+            )  # min but skip zero
+            if timeout:
+                timeout_handle = self.io_loop.add_timeout(
+                    self.io_loop.time() + timeout,
+                    functools.partial(self._on_timeout, key, "in request queue"),
+                )
+        self.waiting[key] = (request, callback, timeout_handle)
+        self._process_queue()
+        if self.queue:
+            gen_log.debug(
+                "max_clients limit reached, request queued. "
+                "%d active, %d queued requests." % (len(self.active), len(self.queue))
+            )
+
+    def _process_queue(self) -> None:
+        while self.queue and len(self.active) < self.max_clients:
+            key, request, callback = self.queue.popleft()
+            if key not in self.waiting:
+                continue
+            self._remove_timeout(key)
+            self.active[key] = (request, callback)
+            release_callback = functools.partial(self._release_fetch, key)
+            self._handle_request(request, release_callback, callback)
+
+    def _connection_class(self) -> type:
+        return _HTTPConnection
+
+    def _handle_request(
+        self,
+        request: HTTPRequest,
+        release_callback: Callable[[], None],
+        final_callback: Callable[[HTTPResponse], None],
+    ) -> None:
+        self._connection_class()(
+            self,
+            request,
+            release_callback,
+            final_callback,
+            self.max_buffer_size,
+            self.tcp_client,
+            self.max_header_size,
+            self.max_body_size,
+        )
+
+    def _release_fetch(self, key: object) -> None:
+        del self.active[key]
+        self._process_queue()
+
+    def _remove_timeout(self, key: object) -> None:
+        if key in self.waiting:
+            request, callback, timeout_handle = self.waiting[key]
+            if timeout_handle is not None:
+                self.io_loop.remove_timeout(timeout_handle)
+            del self.waiting[key]
+
+    def _on_timeout(self, key: object, info: Optional[str] = None) -> None:
         """Timeout callback of request.

         Construct a timeout HTTPResponse when a timeout occurs.
@@ -108,17 +235,36 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
         :arg object key: A simple object to mark the request.
         :info string key: More detailed timeout information.
         """
-        pass
+        request, callback, timeout_handle = self.waiting[key]
+        self.queue.remove((key, request, callback))

+        error_message = "Timeout {0}".format(info) if info else "Timeout"
+        timeout_response = HTTPResponse(
+            request,
+            599,
+            error=HTTPTimeoutError(error_message),
+            request_time=self.io_loop.time() - request.start_time,
+        )
+        self.io_loop.add_callback(callback, timeout_response)
+        del self.waiting[key]

-class _HTTPConnection(httputil.HTTPMessageDelegate):
-    _SUPPORTED_METHODS = set(['GET', 'HEAD', 'POST', 'PUT', 'DELETE',
-        'PATCH', 'OPTIONS'])

-    def __init__(self, client: Optional[SimpleAsyncHTTPClient], request:
-        HTTPRequest, release_callback: Callable[[], None], final_callback:
-        Callable[[HTTPResponse], None], max_buffer_size: int, tcp_client:
-        TCPClient, max_header_size: int, max_body_size: int) ->None:
+class _HTTPConnection(httputil.HTTPMessageDelegate):
+    _SUPPORTED_METHODS = set(
+        ["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]
+    )
+
+    def __init__(
+        self,
+        client: Optional[SimpleAsyncHTTPClient],
+        request: HTTPRequest,
+        release_callback: Callable[[], None],
+        final_callback: Callable[[HTTPResponse], None],
+        max_buffer_size: int,
+        tcp_client: TCPClient,
+        max_header_size: int,
+        max_body_size: int,
+    ) -> None:
         self.io_loop = IOLoop.current()
         self.start_time = self.io_loop.time()
         self.start_wall_time = time.time()
@@ -130,25 +276,429 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
         self.tcp_client = tcp_client
         self.max_header_size = max_header_size
         self.max_body_size = max_body_size
-        self.code = None
-        self.headers = None
-        self.chunks = []
+        self.code = None  # type: Optional[int]
+        self.headers = None  # type: Optional[httputil.HTTPHeaders]
+        self.chunks = []  # type: List[bytes]
         self._decompressor = None
-        self._timeout = None
+        # Timeout handle returned by IOLoop.add_timeout
+        self._timeout = None  # type: object
         self._sockaddr = None
-        IOLoop.current().add_future(gen.convert_yielded(self.run()), lambda
-            f: f.result())
-
-    def _on_timeout(self, info: Optional[str]=None) ->None:
+        IOLoop.current().add_future(
+            gen.convert_yielded(self.run()), lambda f: f.result()
+        )
+
+    async def run(self) -> None:
+        try:
+            self.parsed = urllib.parse.urlsplit(_unicode(self.request.url))
+            if self.parsed.scheme not in ("http", "https"):
+                raise ValueError("Unsupported url scheme: %s" % self.request.url)
+            # urlsplit results have hostname and port results, but they
+            # didn't support ipv6 literals until python 2.7.
+            netloc = self.parsed.netloc
+            if "@" in netloc:
+                userpass, _, netloc = netloc.rpartition("@")
+            host, port = httputil.split_host_and_port(netloc)
+            if port is None:
+                port = 443 if self.parsed.scheme == "https" else 80
+            if re.match(r"^\[.*\]$", host):
+                # raw ipv6 addresses in urls are enclosed in brackets
+                host = host[1:-1]
+            self.parsed_hostname = host  # save final host for _on_connect
+
+            if self.request.allow_ipv6 is False:
+                af = socket.AF_INET
+            else:
+                af = socket.AF_UNSPEC
+
+            ssl_options = self._get_ssl_options(self.parsed.scheme)
+
+            source_ip = None
+            if self.request.network_interface:
+                if is_valid_ip(self.request.network_interface):
+                    source_ip = self.request.network_interface
+                else:
+                    raise ValueError(
+                        "Unrecognized IPv4 or IPv6 address for network_interface, got %r"
+                        % (self.request.network_interface,)
+                    )
+
+            if self.request.connect_timeout and self.request.request_timeout:
+                timeout = min(
+                    self.request.connect_timeout, self.request.request_timeout
+                )
+            elif self.request.connect_timeout:
+                timeout = self.request.connect_timeout
+            elif self.request.request_timeout:
+                timeout = self.request.request_timeout
+            else:
+                timeout = 0
+            if timeout:
+                self._timeout = self.io_loop.add_timeout(
+                    self.start_time + timeout,
+                    functools.partial(self._on_timeout, "while connecting"),
+                )
+            stream = await self.tcp_client.connect(
+                host,
+                port,
+                af=af,
+                ssl_options=ssl_options,
+                max_buffer_size=self.max_buffer_size,
+                source_ip=source_ip,
+            )
+
+            if self.final_callback is None:
+                # final_callback is cleared if we've hit our timeout.
+                stream.close()
+                return
+            self.stream = stream
+            self.stream.set_close_callback(self.on_connection_close)
+            self._remove_timeout()
+            if self.final_callback is None:
+                return
+            if self.request.request_timeout:
+                self._timeout = self.io_loop.add_timeout(
+                    self.start_time + self.request.request_timeout,
+                    functools.partial(self._on_timeout, "during request"),
+                )
+            if (
+                self.request.method not in self._SUPPORTED_METHODS
+                and not self.request.allow_nonstandard_methods
+            ):
+                raise KeyError("unknown method %s" % self.request.method)
+            for key in (
+                "proxy_host",
+                "proxy_port",
+                "proxy_username",
+                "proxy_password",
+                "proxy_auth_mode",
+            ):
+                if getattr(self.request, key, None):
+                    raise NotImplementedError("%s not supported" % key)
+            if "Connection" not in self.request.headers:
+                self.request.headers["Connection"] = "close"
+            if "Host" not in self.request.headers:
+                if "@" in self.parsed.netloc:
+                    self.request.headers["Host"] = self.parsed.netloc.rpartition("@")[
+                        -1
+                    ]
+                else:
+                    self.request.headers["Host"] = self.parsed.netloc
+            username, password = None, None
+            if self.parsed.username is not None:
+                username, password = self.parsed.username, self.parsed.password
+            elif self.request.auth_username is not None:
+                username = self.request.auth_username
+                password = self.request.auth_password or ""
+            if username is not None:
+                assert password is not None
+                if self.request.auth_mode not in (None, "basic"):
+                    raise ValueError("unsupported auth_mode %s", self.request.auth_mode)
+                self.request.headers["Authorization"] = "Basic " + _unicode(
+                    base64.b64encode(
+                        httputil.encode_username_password(username, password)
+                    )
+                )
+            if self.request.user_agent:
+                self.request.headers["User-Agent"] = self.request.user_agent
+            elif self.request.headers.get("User-Agent") is None:
+                self.request.headers["User-Agent"] = "Tornado/{}".format(version)
+            if not self.request.allow_nonstandard_methods:
+                # Some HTTP methods nearly always have bodies while others
+                # almost never do. Fail in this case unless the user has
+                # opted out of sanity checks with allow_nonstandard_methods.
+                body_expected = self.request.method in ("POST", "PATCH", "PUT")
+                body_present = (
+                    self.request.body is not None
+                    or self.request.body_producer is not None
+                )
+                if (body_expected and not body_present) or (
+                    body_present and not body_expected
+                ):
+                    raise ValueError(
+                        "Body must %sbe None for method %s (unless "
+                        "allow_nonstandard_methods is true)"
+                        % ("not " if body_expected else "", self.request.method)
+                    )
+            if self.request.expect_100_continue:
+                self.request.headers["Expect"] = "100-continue"
+            if self.request.body is not None:
+                # When body_producer is used the caller is responsible for
+                # setting Content-Length (or else chunked encoding will be used).
+                self.request.headers["Content-Length"] = str(len(self.request.body))
+            if (
+                self.request.method == "POST"
+                and "Content-Type" not in self.request.headers
+            ):
+                self.request.headers["Content-Type"] = (
+                    "application/x-www-form-urlencoded"
+                )
+            if self.request.decompress_response:
+                self.request.headers["Accept-Encoding"] = "gzip"
+            req_path = (self.parsed.path or "/") + (
+                ("?" + self.parsed.query) if self.parsed.query else ""
+            )
+            self.connection = self._create_connection(stream)
+            start_line = httputil.RequestStartLine(self.request.method, req_path, "")
+            self.connection.write_headers(start_line, self.request.headers)
+            if self.request.expect_100_continue:
+                await self.connection.read_response(self)
+            else:
+                await self._write_body(True)
+        except Exception:
+            if not self._handle_exception(*sys.exc_info()):
+                raise
+
+    def _get_ssl_options(
+        self, scheme: str
+    ) -> Union[None, Dict[str, Any], ssl.SSLContext]:
+        if scheme == "https":
+            if self.request.ssl_options is not None:
+                return self.request.ssl_options
+            # If we are using the defaults, don't construct a
+            # new SSLContext.
+            if (
+                self.request.validate_cert
+                and self.request.ca_certs is None
+                and self.request.client_cert is None
+                and self.request.client_key is None
+            ):
+                return _client_ssl_defaults
+            ssl_ctx = ssl.create_default_context(
+                ssl.Purpose.SERVER_AUTH, cafile=self.request.ca_certs
+            )
+            if not self.request.validate_cert:
+                ssl_ctx.check_hostname = False
+                ssl_ctx.verify_mode = ssl.CERT_NONE
+            if self.request.client_cert is not None:
+                ssl_ctx.load_cert_chain(
+                    self.request.client_cert, self.request.client_key
+                )
+            if hasattr(ssl, "OP_NO_COMPRESSION"):
+                # See netutil.ssl_options_to_context
+                ssl_ctx.options |= ssl.OP_NO_COMPRESSION
+            return ssl_ctx
+        return None
+
+    def _on_timeout(self, info: Optional[str] = None) -> None:
         """Timeout callback of _HTTPConnection instance.

         Raise a `HTTPTimeoutError` when a timeout occurs.

         :info string key: More detailed timeout information.
         """
-        pass
-
-
-if __name__ == '__main__':
+        self._timeout = None
+        error_message = "Timeout {0}".format(info) if info else "Timeout"
+        if self.final_callback is not None:
+            self._handle_exception(
+                HTTPTimeoutError, HTTPTimeoutError(error_message), None
+            )
+
+    def _remove_timeout(self) -> None:
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+            self._timeout = None
+
+    def _create_connection(self, stream: IOStream) -> HTTP1Connection:
+        stream.set_nodelay(True)
+        connection = HTTP1Connection(
+            stream,
+            True,
+            HTTP1ConnectionParameters(
+                no_keep_alive=True,
+                max_header_size=self.max_header_size,
+                max_body_size=self.max_body_size,
+                decompress=bool(self.request.decompress_response),
+            ),
+            self._sockaddr,
+        )
+        return connection
+
+    async def _write_body(self, start_read: bool) -> None:
+        if self.request.body is not None:
+            self.connection.write(self.request.body)
+        elif self.request.body_producer is not None:
+            fut = self.request.body_producer(self.connection.write)
+            if fut is not None:
+                await fut
+        self.connection.finish()
+        if start_read:
+            try:
+                await self.connection.read_response(self)
+            except StreamClosedError:
+                if not self._handle_exception(*sys.exc_info()):
+                    raise
+
+    def _release(self) -> None:
+        if self.release_callback is not None:
+            release_callback = self.release_callback
+            self.release_callback = None  # type: ignore
+            release_callback()
+
+    def _run_callback(self, response: HTTPResponse) -> None:
+        self._release()
+        if self.final_callback is not None:
+            final_callback = self.final_callback
+            self.final_callback = None  # type: ignore
+            self.io_loop.add_callback(final_callback, response)
+
+    def _handle_exception(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[TracebackType],
+    ) -> bool:
+        if self.final_callback is not None:
+            self._remove_timeout()
+            if isinstance(value, StreamClosedError):
+                if value.real_error is None:
+                    value = HTTPStreamClosedError("Stream closed")
+                else:
+                    value = value.real_error
+            self._run_callback(
+                HTTPResponse(
+                    self.request,
+                    599,
+                    error=value,
+                    request_time=self.io_loop.time() - self.start_time,
+                    start_time=self.start_wall_time,
+                )
+            )
+
+            if hasattr(self, "stream"):
+                # TODO: this may cause a StreamClosedError to be raised
+                # by the connection's Future.  Should we cancel the
+                # connection more gracefully?
+                self.stream.close()
+            return True
+        else:
+            # If our callback has already been called, we are probably
+            # catching an exception that is not caused by us but rather
+            # some child of our callback. Rather than drop it on the floor,
+            # pass it along, unless it's just the stream being closed.
+            return isinstance(value, StreamClosedError)
+
+    def on_connection_close(self) -> None:
+        if self.final_callback is not None:
+            message = "Connection closed"
+            if self.stream.error:
+                raise self.stream.error
+            try:
+                raise HTTPStreamClosedError(message)
+            except HTTPStreamClosedError:
+                self._handle_exception(*sys.exc_info())
+
+    async def headers_received(
+        self,
+        first_line: Union[httputil.ResponseStartLine, httputil.RequestStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> None:
+        assert isinstance(first_line, httputil.ResponseStartLine)
+        if self.request.expect_100_continue and first_line.code == 100:
+            await self._write_body(False)
+            return
+        self.code = first_line.code
+        self.reason = first_line.reason
+        self.headers = headers
+
+        if self._should_follow_redirect():
+            return
+
+        if self.request.header_callback is not None:
+            # Reassemble the start line.
+            self.request.header_callback("%s %s %s\r\n" % first_line)
+            for k, v in self.headers.get_all():
+                self.request.header_callback("%s: %s\r\n" % (k, v))
+            self.request.header_callback("\r\n")
+
+    def _should_follow_redirect(self) -> bool:
+        if self.request.follow_redirects:
+            assert self.request.max_redirects is not None
+            return (
+                self.code in (301, 302, 303, 307, 308)
+                and self.request.max_redirects > 0
+                and self.headers is not None
+                and self.headers.get("Location") is not None
+            )
+        return False
+
+    def finish(self) -> None:
+        assert self.code is not None
+        data = b"".join(self.chunks)
+        self._remove_timeout()
+        original_request = getattr(self.request, "original_request", self.request)
+        if self._should_follow_redirect():
+            assert isinstance(self.request, _RequestProxy)
+            assert self.headers is not None
+            new_request = copy.copy(self.request.request)
+            new_request.url = urllib.parse.urljoin(
+                self.request.url, self.headers["Location"]
+            )
+            assert self.request.max_redirects is not None
+            new_request.max_redirects = self.request.max_redirects - 1
+            del new_request.headers["Host"]
+            # https://tools.ietf.org/html/rfc7231#section-6.4
+            #
+            # The original HTTP spec said that after a 301 or 302
+            # redirect, the request method should be preserved.
+            # However, browsers implemented this by changing the
+            # method to GET, and the behavior stuck. 303 redirects
+            # always specified this POST-to-GET behavior, arguably
+            # for *all* methods, but libcurl < 7.70 only does this
+            # for POST, while libcurl >= 7.70 does it for other methods.
+            if (self.code == 303 and self.request.method != "HEAD") or (
+                self.code in (301, 302) and self.request.method == "POST"
+            ):
+                new_request.method = "GET"
+                new_request.body = None  # type: ignore
+                for h in [
+                    "Content-Length",
+                    "Content-Type",
+                    "Content-Encoding",
+                    "Transfer-Encoding",
+                ]:
+                    try:
+                        del self.request.headers[h]
+                    except KeyError:
+                        pass
+            new_request.original_request = original_request  # type: ignore
+            final_callback = self.final_callback
+            self.final_callback = None  # type: ignore
+            self._release()
+            assert self.client is not None
+            fut = self.client.fetch(new_request, raise_error=False)
+            fut.add_done_callback(lambda f: final_callback(f.result()))
+            self._on_end_request()
+            return
+        if self.request.streaming_callback:
+            buffer = BytesIO()
+        else:
+            buffer = BytesIO(data)  # TODO: don't require one big string?
+        response = HTTPResponse(
+            original_request,
+            self.code,
+            reason=getattr(self, "reason", None),
+            headers=self.headers,
+            request_time=self.io_loop.time() - self.start_time,
+            start_time=self.start_wall_time,
+            buffer=buffer,
+            effective_url=self.request.url,
+        )
+        self._run_callback(response)
+        self._on_end_request()
+
+    def _on_end_request(self) -> None:
+        self.stream.close()
+
+    def data_received(self, chunk: bytes) -> None:
+        if self._should_follow_redirect():
+            # We're going to follow a redirect so just discard the body.
+            return
+        if self.request.streaming_callback is not None:
+            self.request.streaming_callback(chunk)
+        else:
+            self.chunks.append(chunk)
+
+
+if __name__ == "__main__":
     AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
     main()
diff --git a/tornado/tcpclient.py b/tornado/tcpclient.py
index 4e4fcdf6..0a829062 100644
--- a/tornado/tcpclient.py
+++ b/tornado/tcpclient.py
@@ -1,20 +1,40 @@
+#
+# Copyright 2014 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """A non-blocking TCP connection factory.
 """
+
 import functools
 import socket
 import numbers
 import datetime
 import ssl
 import typing
+
 from tornado.concurrent import Future, future_add_done_callback
 from tornado.ioloop import IOLoop
 from tornado.iostream import IOStream
 from tornado import gen
 from tornado.netutil import Resolver
 from tornado.gen import TimeoutError
+
 from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional
+
 if typing.TYPE_CHECKING:
-    from typing import Set
+    from typing import Set  # noqa(F401)
+
 _INITIAL_CONNECT_TIMEOUT = 0.3


@@ -36,21 +56,33 @@ class _Connector(object):

     """

-    def __init__(self, addrinfo: List[Tuple], connect: Callable[[socket.
-        AddressFamily, Tuple], Tuple[IOStream, 'Future[IOStream]']]) ->None:
+    def __init__(
+        self,
+        addrinfo: List[Tuple],
+        connect: Callable[
+            [socket.AddressFamily, Tuple], Tuple[IOStream, "Future[IOStream]"]
+        ],
+    ) -> None:
         self.io_loop = IOLoop.current()
         self.connect = connect
-        self.future = Future()
-        self.timeout = None
-        self.connect_timeout = None
-        self.last_error = None
+
+        self.future = (
+            Future()
+        )  # type: Future[Tuple[socket.AddressFamily, Any, IOStream]]
+        self.timeout = None  # type: Optional[object]
+        self.connect_timeout = None  # type: Optional[object]
+        self.last_error = None  # type: Optional[Exception]
         self.remaining = len(addrinfo)
         self.primary_addrs, self.secondary_addrs = self.split(addrinfo)
-        self.streams = set()
+        self.streams = set()  # type: Set[IOStream]

     @staticmethod
-    def split(addrinfo: List[Tuple]) ->Tuple[List[Tuple[socket.
-        AddressFamily, Tuple]], List[Tuple[socket.AddressFamily, Tuple]]]:
+    def split(
+        addrinfo: List[Tuple],
+    ) -> Tuple[
+        List[Tuple[socket.AddressFamily, Tuple]],
+        List[Tuple[socket.AddressFamily, Tuple]],
+    ]:
         """Partition the ``addrinfo`` list by address family.

         Returns two lists.  The first list contains the first entry from
@@ -59,7 +91,112 @@ class _Connector(object):
         be AF_INET and the other AF_INET6, although non-standard resolvers
         may return additional families).
         """
-        pass
+        primary = []
+        secondary = []
+        primary_af = addrinfo[0][0]
+        for af, addr in addrinfo:
+            if af == primary_af:
+                primary.append((af, addr))
+            else:
+                secondary.append((af, addr))
+        return primary, secondary
+
+    def start(
+        self,
+        timeout: float = _INITIAL_CONNECT_TIMEOUT,
+        connect_timeout: Optional[Union[float, datetime.timedelta]] = None,
+    ) -> "Future[Tuple[socket.AddressFamily, Any, IOStream]]":
+        self.try_connect(iter(self.primary_addrs))
+        self.set_timeout(timeout)
+        if connect_timeout is not None:
+            self.set_connect_timeout(connect_timeout)
+        return self.future
+
+    def try_connect(self, addrs: Iterator[Tuple[socket.AddressFamily, Tuple]]) -> None:
+        try:
+            af, addr = next(addrs)
+        except StopIteration:
+            # We've reached the end of our queue, but the other queue
+            # might still be working.  Send a final error on the future
+            # only when both queues are finished.
+            if self.remaining == 0 and not self.future.done():
+                self.future.set_exception(
+                    self.last_error or IOError("connection failed")
+                )
+            return
+        stream, future = self.connect(af, addr)
+        self.streams.add(stream)
+        future_add_done_callback(
+            future, functools.partial(self.on_connect_done, addrs, af, addr)
+        )
+
+    def on_connect_done(
+        self,
+        addrs: Iterator[Tuple[socket.AddressFamily, Tuple]],
+        af: socket.AddressFamily,
+        addr: Tuple,
+        future: "Future[IOStream]",
+    ) -> None:
+        self.remaining -= 1
+        try:
+            stream = future.result()
+        except Exception as e:
+            if self.future.done():
+                return
+            # Error: try again (but remember what happened so we have an
+            # error to raise in the end)
+            self.last_error = e
+            self.try_connect(addrs)
+            if self.timeout is not None:
+                # If the first attempt failed, don't wait for the
+                # timeout to try an address from the secondary queue.
+                self.io_loop.remove_timeout(self.timeout)
+                self.on_timeout()
+            return
+        self.clear_timeouts()
+        if self.future.done():
+            # This is a late arrival; just drop it.
+            stream.close()
+        else:
+            self.streams.discard(stream)
+            self.future.set_result((af, addr, stream))
+            self.close_streams()
+
+    def set_timeout(self, timeout: float) -> None:
+        self.timeout = self.io_loop.add_timeout(
+            self.io_loop.time() + timeout, self.on_timeout
+        )
+
+    def on_timeout(self) -> None:
+        self.timeout = None
+        if not self.future.done():
+            self.try_connect(iter(self.secondary_addrs))
+
+    def clear_timeout(self) -> None:
+        if self.timeout is not None:
+            self.io_loop.remove_timeout(self.timeout)
+
+    def set_connect_timeout(
+        self, connect_timeout: Union[float, datetime.timedelta]
+    ) -> None:
+        self.connect_timeout = self.io_loop.add_timeout(
+            connect_timeout, self.on_connect_timeout
+        )
+
+    def on_connect_timeout(self) -> None:
+        if not self.future.done():
+            self.future.set_exception(TimeoutError())
+        self.close_streams()
+
+    def clear_timeouts(self) -> None:
+        if self.timeout is not None:
+            self.io_loop.remove_timeout(self.timeout)
+        if self.connect_timeout is not None:
+            self.io_loop.remove_timeout(self.connect_timeout)
+
+    def close_streams(self) -> None:
+        for stream in self.streams:
+            stream.close()


 class TCPClient(object):
@@ -69,7 +206,7 @@ class TCPClient(object):
        The ``io_loop`` argument (deprecated since version 4.1) has been removed.
     """

-    def __init__(self, resolver: Optional[Resolver]=None) ->None:
+    def __init__(self, resolver: Optional[Resolver] = None) -> None:
         if resolver is not None:
             self.resolver = resolver
             self._own_resolver = False
@@ -77,11 +214,21 @@ class TCPClient(object):
             self.resolver = Resolver()
             self._own_resolver = True

-    async def connect(self, host: str, port: int, af: socket.AddressFamily=
-        socket.AF_UNSPEC, ssl_options: Optional[Union[Dict[str, Any], ssl.
-        SSLContext]]=None, max_buffer_size: Optional[int]=None, source_ip:
-        Optional[str]=None, source_port: Optional[int]=None, timeout:
-        Optional[Union[float, datetime.timedelta]]=None) ->IOStream:
+    def close(self) -> None:
+        if self._own_resolver:
+            self.resolver.close()
+
+    async def connect(
+        self,
+        host: str,
+        port: int,
+        af: socket.AddressFamily = socket.AF_UNSPEC,
+        ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
+        max_buffer_size: Optional[int] = None,
+        source_ip: Optional[str] = None,
+        source_port: Optional[int] = None,
+        timeout: Optional[Union[float, datetime.timedelta]] = None,
+    ) -> IOStream:
         """Connect to the given host and port.

         Asynchronously returns an `.IOStream` (or `.SSLIOStream` if
@@ -107,4 +254,79 @@ class TCPClient(object):
         .. versionchanged:: 5.0
            Added the ``timeout`` argument.
         """
-        pass
+        if timeout is not None:
+            if isinstance(timeout, numbers.Real):
+                timeout = IOLoop.current().time() + timeout
+            elif isinstance(timeout, datetime.timedelta):
+                timeout = IOLoop.current().time() + timeout.total_seconds()
+            else:
+                raise TypeError("Unsupported timeout %r" % timeout)
+        if timeout is not None:
+            addrinfo = await gen.with_timeout(
+                timeout, self.resolver.resolve(host, port, af)
+            )
+        else:
+            addrinfo = await self.resolver.resolve(host, port, af)
+        connector = _Connector(
+            addrinfo,
+            functools.partial(
+                self._create_stream,
+                max_buffer_size,
+                source_ip=source_ip,
+                source_port=source_port,
+            ),
+        )
+        af, addr, stream = await connector.start(connect_timeout=timeout)
+        # TODO: For better performance we could cache the (af, addr)
+        # information here and re-use it on subsequent connections to
+        # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2)
+        if ssl_options is not None:
+            if timeout is not None:
+                stream = await gen.with_timeout(
+                    timeout,
+                    stream.start_tls(
+                        False, ssl_options=ssl_options, server_hostname=host
+                    ),
+                )
+            else:
+                stream = await stream.start_tls(
+                    False, ssl_options=ssl_options, server_hostname=host
+                )
+        return stream
+
+    def _create_stream(
+        self,
+        max_buffer_size: int,
+        af: socket.AddressFamily,
+        addr: Tuple,
+        source_ip: Optional[str] = None,
+        source_port: Optional[int] = None,
+    ) -> Tuple[IOStream, "Future[IOStream]"]:
+        # Always connect in plaintext; we'll convert to ssl if necessary
+        # after one connection has completed.
+        source_port_bind = source_port if isinstance(source_port, int) else 0
+        source_ip_bind = source_ip
+        if source_port_bind and not source_ip:
+            # User required a specific port, but did not specify
+            # a certain source IP, will bind to the default loopback.
+            source_ip_bind = "::1" if af == socket.AF_INET6 else "127.0.0.1"
+            # Trying to use the same address family as the requested af socket:
+            # - 127.0.0.1 for IPv4
+            # - ::1 for IPv6
+        socket_obj = socket.socket(af)
+        if source_port_bind or source_ip_bind:
+            # If the user requires binding also to a specific IP/port.
+            try:
+                socket_obj.bind((source_ip_bind, source_port_bind))
+            except socket.error:
+                socket_obj.close()
+                # Fail loudly if unable to use the IP/port.
+                raise
+        try:
+            stream = IOStream(socket_obj, max_buffer_size=max_buffer_size)
+        except socket.error as e:
+            fu = Future()  # type: Future[IOStream]
+            fu.set_exception(e)
+            return stream, fu
+        else:
+            return stream, stream.connect(addr)
diff --git a/tornado/tcpserver.py b/tornado/tcpserver.py
index be5dca18..02c0ca0c 100644
--- a/tornado/tcpserver.py
+++ b/tornado/tcpserver.py
@@ -1,23 +1,47 @@
+#
+# Copyright 2011 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """A non-blocking, single-threaded TCP server."""
+
 import errno
 import os
 import socket
 import ssl
+
 from tornado import gen
 from tornado.log import app_log
 from tornado.ioloop import IOLoop
 from tornado.iostream import IOStream, SSLIOStream
-from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket, _DEFAULT_BACKLOG
+from tornado.netutil import (
+    bind_sockets,
+    add_accept_handler,
+    ssl_wrap_socket,
+    _DEFAULT_BACKLOG,
+)
 from tornado import process
 from tornado.util import errno_from_exception
+
 import typing
 from typing import Union, Dict, Any, Iterable, Optional, Awaitable
+
 if typing.TYPE_CHECKING:
-    from typing import Callable, List
+    from typing import Callable, List  # noqa: F401


 class TCPServer(object):
-    """A non-blocking, single-threaded TCP server.
+    r"""A non-blocking, single-threaded TCP server.

     To use `TCPServer`, define a subclass which overrides the `handle_stream`
     method. For example, a simple echo server could be defined like this::
@@ -29,7 +53,7 @@ class TCPServer(object):
           async def handle_stream(self, stream, address):
               while True:
                   try:
-                      data = await stream.read_until(b"\\n") await
+                      data = await stream.read_until(b"\n") await
                       stream.write(data)
                   except StreamClosedError:
                       break
@@ -97,31 +121,50 @@ class TCPServer(object):
        The ``io_loop`` argument has been removed.
     """

-    def __init__(self, ssl_options: Optional[Union[Dict[str, Any], ssl.
-        SSLContext]]=None, max_buffer_size: Optional[int]=None,
-        read_chunk_size: Optional[int]=None) ->None:
+    def __init__(
+        self,
+        ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
+        max_buffer_size: Optional[int] = None,
+        read_chunk_size: Optional[int] = None,
+    ) -> None:
         self.ssl_options = ssl_options
-        self._sockets = {}
-        self._handlers = {}
-        self._pending_sockets = []
+        self._sockets = {}  # type: Dict[int, socket.socket]
+        self._handlers = {}  # type: Dict[int, Callable[[], None]]
+        self._pending_sockets = []  # type: List[socket.socket]
         self._started = False
         self._stopped = False
         self.max_buffer_size = max_buffer_size
         self.read_chunk_size = read_chunk_size
+
+        # Verify the SSL options. Otherwise we don't get errors until clients
+        # connect. This doesn't verify that the keys are legitimate, but
+        # the SSL module doesn't do that until there is a connected socket
+        # which seems like too much work
         if self.ssl_options is not None and isinstance(self.ssl_options, dict):
-            if 'certfile' not in self.ssl_options:
+            # Only certfile is required: it can contain both keys
+            if "certfile" not in self.ssl_options:
                 raise KeyError('missing key "certfile" in ssl_options')
-            if not os.path.exists(self.ssl_options['certfile']):
-                raise ValueError('certfile "%s" does not exist' % self.
-                    ssl_options['certfile'])
-            if 'keyfile' in self.ssl_options and not os.path.exists(self.
-                ssl_options['keyfile']):
-                raise ValueError('keyfile "%s" does not exist' % self.
-                    ssl_options['keyfile'])
-
-    def listen(self, port: int, address: Optional[str]=None, family: socket
-        .AddressFamily=socket.AF_UNSPEC, backlog: int=_DEFAULT_BACKLOG,
-        flags: Optional[int]=None, reuse_port: bool=False) ->None:
+
+            if not os.path.exists(self.ssl_options["certfile"]):
+                raise ValueError(
+                    'certfile "%s" does not exist' % self.ssl_options["certfile"]
+                )
+            if "keyfile" in self.ssl_options and not os.path.exists(
+                self.ssl_options["keyfile"]
+            ):
+                raise ValueError(
+                    'keyfile "%s" does not exist' % self.ssl_options["keyfile"]
+                )
+
+    def listen(
+        self,
+        port: int,
+        address: Optional[str] = None,
+        family: socket.AddressFamily = socket.AF_UNSPEC,
+        backlog: int = _DEFAULT_BACKLOG,
+        flags: Optional[int] = None,
+        reuse_port: bool = False,
+    ) -> None:
         """Starts accepting connections on the given port.

         This method may be called more than once to listen on multiple ports.
@@ -137,9 +180,17 @@ class TCPServer(object):
            Added ``family``, ``backlog``, ``flags``, and ``reuse_port``
            arguments to match `tornado.netutil.bind_sockets`.
         """
-        pass
-
-    def add_sockets(self, sockets: Iterable[socket.socket]) ->None:
+        sockets = bind_sockets(
+            port,
+            address=address,
+            family=family,
+            backlog=backlog,
+            flags=flags,
+            reuse_port=reuse_port,
+        )
+        self.add_sockets(sockets)
+
+    def add_sockets(self, sockets: Iterable[socket.socket]) -> None:
         """Makes this server start accepting connections on the given sockets.

         The ``sockets`` parameter is a list of socket objects such as
@@ -148,15 +199,25 @@ class TCPServer(object):
         method and `tornado.process.fork_processes` to provide greater
         control over the initialization of a multi-process server.
         """
-        pass
+        for sock in sockets:
+            self._sockets[sock.fileno()] = sock
+            self._handlers[sock.fileno()] = add_accept_handler(
+                sock, self._handle_connection
+            )

-    def add_socket(self, socket: socket.socket) ->None:
+    def add_socket(self, socket: socket.socket) -> None:
         """Singular version of `add_sockets`.  Takes a single socket object."""
-        pass
-
-    def bind(self, port: int, address: Optional[str]=None, family: socket.
-        AddressFamily=socket.AF_UNSPEC, backlog: int=_DEFAULT_BACKLOG,
-        flags: Optional[int]=None, reuse_port: bool=False) ->None:
+        self.add_sockets([socket])
+
+    def bind(
+        self,
+        port: int,
+        address: Optional[str] = None,
+        family: socket.AddressFamily = socket.AF_UNSPEC,
+        backlog: int = _DEFAULT_BACKLOG,
+        flags: Optional[int] = None,
+        reuse_port: bool = False,
+    ) -> None:
         """Binds this server to the given port on the given address.

         To start the server, call `start`. If you want to run this server in a
@@ -187,10 +248,22 @@ class TCPServer(object):
            Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
            and ``start()``.
         """
-        pass
-
-    def start(self, num_processes: Optional[int]=1, max_restarts: Optional[
-        int]=None) ->None:
+        sockets = bind_sockets(
+            port,
+            address=address,
+            family=family,
+            backlog=backlog,
+            flags=flags,
+            reuse_port=reuse_port,
+        )
+        if self._started:
+            self.add_sockets(sockets)
+        else:
+            self._pending_sockets.extend(sockets)
+
+    def start(
+        self, num_processes: Optional[int] = 1, max_restarts: Optional[int] = None
+    ) -> None:
         """Starts this server in the `.IOLoop`.

         By default, we run the server in this process and do not fork any
@@ -222,18 +295,32 @@ class TCPServer(object):
            Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
            and ``start()``.
         """
-        pass
+        assert not self._started
+        self._started = True
+        if num_processes != 1:
+            process.fork_processes(num_processes, max_restarts)
+        sockets = self._pending_sockets
+        self._pending_sockets = []
+        self.add_sockets(sockets)

-    def stop(self) ->None:
+    def stop(self) -> None:
         """Stops listening for new connections.

         Requests currently in progress may still continue after the
         server is stopped.
         """
-        pass
-
-    def handle_stream(self, stream: IOStream, address: tuple) ->Optional[
-        Awaitable[None]]:
+        if self._stopped:
+            return
+        self._stopped = True
+        for fd, sock in self._sockets.items():
+            assert sock.fileno() == fd
+            # Unregister socket from IOLoop
+            self._handlers.pop(fd)()
+            sock.close()
+
+    def handle_stream(
+        self, stream: IOStream, address: tuple
+    ) -> Optional[Awaitable[None]]:
         """Override to handle a new `.IOStream` from an incoming connection.

         This method may be a coroutine; if so any exceptions it raises
@@ -248,4 +335,56 @@ class TCPServer(object):
         .. versionchanged:: 4.2
            Added the option for this method to be a coroutine.
         """
-        pass
+        raise NotImplementedError()
+
+    def _handle_connection(self, connection: socket.socket, address: Any) -> None:
+        if self.ssl_options is not None:
+            assert ssl, "Python 2.6+ and OpenSSL required for SSL"
+            try:
+                connection = ssl_wrap_socket(
+                    connection,
+                    self.ssl_options,
+                    server_side=True,
+                    do_handshake_on_connect=False,
+                )
+            except ssl.SSLError as err:
+                if err.args[0] == ssl.SSL_ERROR_EOF:
+                    return connection.close()
+                else:
+                    raise
+            except socket.error as err:
+                # If the connection is closed immediately after it is created
+                # (as in a port scan), we can get one of several errors.
+                # wrap_socket makes an internal call to getpeername,
+                # which may return either EINVAL (Mac OS X) or ENOTCONN
+                # (Linux).  If it returns ENOTCONN, this error is
+                # silently swallowed by the ssl module, so we need to
+                # catch another error later on (AttributeError in
+                # SSLIOStream._do_ssl_handshake).
+                # To test this behavior, try nmap with the -sT flag.
+                # https://github.com/tornadoweb/tornado/pull/750
+                if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL):
+                    return connection.close()
+                else:
+                    raise
+        try:
+            if self.ssl_options is not None:
+                stream = SSLIOStream(
+                    connection,
+                    max_buffer_size=self.max_buffer_size,
+                    read_chunk_size=self.read_chunk_size,
+                )  # type: IOStream
+            else:
+                stream = IOStream(
+                    connection,
+                    max_buffer_size=self.max_buffer_size,
+                    read_chunk_size=self.read_chunk_size,
+                )
+
+            future = self.handle_stream(stream, address)
+            if future is not None:
+                IOLoop.current().add_future(
+                    gen.convert_yielded(future), lambda f: f.result()
+                )
+        except Exception:
+            app_log.error("Error in connection callback", exc_info=True)
diff --git a/tornado/template.py b/tornado/template.py
index 1f082087..d53e977c 100644
--- a/tornado/template.py
+++ b/tornado/template.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """A simple template system that compiles templates to Python code.

 Basic usage looks like::
@@ -180,6 +195,7 @@ To include a literal ``{{``, ``{%``, or ``{#`` in the output, escape them as
     (or until the next ``{% whitespace %}`` directive). See
     `filter_whitespace` for available options. New in Tornado 4.3.
 """
+
 import datetime
 from io import StringIO
 import linecache
@@ -187,14 +203,18 @@ import os.path
 import posixpath
 import re
 import threading
+
 from tornado import escape
 from tornado.log import app_log
 from tornado.util import ObjectDict, exec_in, unicode_type
+
 from typing import Any, Union, Callable, List, Dict, Iterable, Optional, TextIO
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Tuple, ContextManager
-_DEFAULT_AUTOESCAPE = 'xhtml_escape'
+    from typing import Tuple, ContextManager  # noqa: F401
+
+_DEFAULT_AUTOESCAPE = "xhtml_escape"


 class _UnsetMarker:
@@ -204,7 +224,7 @@ class _UnsetMarker:
 _UNSET = _UnsetMarker()


-def filter_whitespace(mode: str, text: str) ->str:
+def filter_whitespace(mode: str, text: str) -> str:
     """Transform whitespace in ``text`` according to ``mode``.

     Available modes are:
@@ -217,7 +237,16 @@ def filter_whitespace(mode: str, text: str) ->str:

     .. versionadded:: 4.3
     """
-    pass
+    if mode == "all":
+        return text
+    elif mode == "single":
+        text = re.sub(r"([\t ]+)", " ", text)
+        text = re.sub(r"(\s*\n\s*)", "\n", text)
+        return text
+    elif mode == "oneline":
+        return re.sub(r"(\s+)", " ", text)
+    else:
+        raise Exception("invalid whitespace mode %s" % mode)


 class Template(object):
@@ -227,11 +256,18 @@ class Template(object):
     the template from variables with generate().
     """

-    def __init__(self, template_string: Union[str, bytes], name: str=
-        '<string>', loader: Optional['BaseLoader']=None,
-        compress_whitespace: Union[bool, _UnsetMarker]=_UNSET, autoescape:
-        Optional[Union[str, _UnsetMarker]]=_UNSET, whitespace: Optional[str
-        ]=None) ->None:
+    # note that the constructor's signature is not extracted with
+    # autodoc because _UNSET looks like garbage.  When changing
+    # this signature update website/sphinx/template.rst too.
+    def __init__(
+        self,
+        template_string: Union[str, bytes],
+        name: str = "<string>",
+        loader: Optional["BaseLoader"] = None,
+        compress_whitespace: Union[bool, _UnsetMarker] = _UNSET,
+        autoescape: Optional[Union[str, _UnsetMarker]] = _UNSET,
+        whitespace: Optional[str] = None,
+    ) -> None:
         """Construct a Template.

         :arg str template_string: the contents of the template file.
@@ -251,44 +287,106 @@ class Template(object):
            Added ``whitespace`` parameter; deprecated ``compress_whitespace``.
         """
         self.name = escape.native_str(name)
+
         if compress_whitespace is not _UNSET:
+            # Convert deprecated compress_whitespace (bool) to whitespace (str).
             if whitespace is not None:
-                raise Exception(
-                    'cannot set both whitespace and compress_whitespace')
-            whitespace = 'single' if compress_whitespace else 'all'
+                raise Exception("cannot set both whitespace and compress_whitespace")
+            whitespace = "single" if compress_whitespace else "all"
         if whitespace is None:
             if loader and loader.whitespace:
                 whitespace = loader.whitespace
-            elif name.endswith('.html') or name.endswith('.js'):
-                whitespace = 'single'
             else:
-                whitespace = 'all'
+                # Whitespace defaults by filename.
+                if name.endswith(".html") or name.endswith(".js"):
+                    whitespace = "single"
+                else:
+                    whitespace = "all"
+        # Validate the whitespace setting.
         assert whitespace is not None
-        filter_whitespace(whitespace, '')
+        filter_whitespace(whitespace, "")
+
         if not isinstance(autoescape, _UnsetMarker):
-            self.autoescape = autoescape
+            self.autoescape = autoescape  # type: Optional[str]
         elif loader:
             self.autoescape = loader.autoescape
         else:
             self.autoescape = _DEFAULT_AUTOESCAPE
+
         self.namespace = loader.namespace if loader else {}
-        reader = _TemplateReader(name, escape.native_str(template_string),
-            whitespace)
+        reader = _TemplateReader(name, escape.native_str(template_string), whitespace)
         self.file = _File(self, _parse(reader, self))
         self.code = self._generate_python(loader)
         self.loader = loader
         try:
-            self.compiled = compile(escape.to_unicode(self.code), 
-                '%s.generated.py' % self.name.replace('.', '_'), 'exec',
-                dont_inherit=True)
+            # Under python2.5, the fake filename used here must match
+            # the module name used in __name__ below.
+            # The dont_inherit flag prevents template.py's future imports
+            # from being applied to the generated code.
+            self.compiled = compile(
+                escape.to_unicode(self.code),
+                "%s.generated.py" % self.name.replace(".", "_"),
+                "exec",
+                dont_inherit=True,
+            )
         except Exception:
             formatted_code = _format_code(self.code).rstrip()
-            app_log.error('%s code:\n%s', self.name, formatted_code)
+            app_log.error("%s code:\n%s", self.name, formatted_code)
             raise

-    def generate(self, **kwargs: Any) ->bytes:
+    def generate(self, **kwargs: Any) -> bytes:
         """Generate this template with the given arguments."""
-        pass
+        namespace = {
+            "escape": escape.xhtml_escape,
+            "xhtml_escape": escape.xhtml_escape,
+            "url_escape": escape.url_escape,
+            "json_encode": escape.json_encode,
+            "squeeze": escape.squeeze,
+            "linkify": escape.linkify,
+            "datetime": datetime,
+            "_tt_utf8": escape.utf8,  # for internal use
+            "_tt_string_types": (unicode_type, bytes),
+            # __name__ and __loader__ allow the traceback mechanism to find
+            # the generated source code.
+            "__name__": self.name.replace(".", "_"),
+            "__loader__": ObjectDict(get_source=lambda name: self.code),
+        }
+        namespace.update(self.namespace)
+        namespace.update(kwargs)
+        exec_in(self.compiled, namespace)
+        execute = typing.cast(Callable[[], bytes], namespace["_tt_execute"])
+        # Clear the traceback module's cache of source data now that
+        # we've generated a new template (mainly for this module's
+        # unittests, where different tests reuse the same name).
+        linecache.clearcache()
+        return execute()
+
+    def _generate_python(self, loader: Optional["BaseLoader"]) -> str:
+        buffer = StringIO()
+        try:
+            # named_blocks maps from names to _NamedBlock objects
+            named_blocks = {}  # type: Dict[str, _NamedBlock]
+            ancestors = self._get_ancestors(loader)
+            ancestors.reverse()
+            for ancestor in ancestors:
+                ancestor.find_named_blocks(loader, named_blocks)
+            writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template)
+            ancestors[0].generate(writer)
+            return buffer.getvalue()
+        finally:
+            buffer.close()
+
+    def _get_ancestors(self, loader: Optional["BaseLoader"]) -> List["_File"]:
+        ancestors = [self.file]
+        for chunk in self.file.body.chunks:
+            if isinstance(chunk, _ExtendsBlock):
+                if not loader:
+                    raise ParseError(
+                        "{% extends %} block found, but no " "template loader"
+                    )
+                template = loader.load(chunk.name, self.name)
+                ancestors.extend(template._get_ancestors(loader))
+        return ancestors


 class BaseLoader(object):
@@ -299,8 +397,12 @@ class BaseLoader(object):
     templates after they are loaded the first time.
     """

-    def __init__(self, autoescape: str=_DEFAULT_AUTOESCAPE, namespace:
-        Optional[Dict[str, Any]]=None, whitespace: Optional[str]=None) ->None:
+    def __init__(
+        self,
+        autoescape: str = _DEFAULT_AUTOESCAPE,
+        namespace: Optional[Dict[str, Any]] = None,
+        whitespace: Optional[str] = None,
+    ) -> None:
         """Construct a template loader.

         :arg str autoescape: The name of a function in the template
@@ -319,131 +421,281 @@ class BaseLoader(object):
         self.autoescape = autoescape
         self.namespace = namespace or {}
         self.whitespace = whitespace
-        self.templates = {}
+        self.templates = {}  # type: Dict[str, Template]
+        # self.lock protects self.templates.  It's a reentrant lock
+        # because templates may load other templates via `include` or
+        # `extends`.  Note that thanks to the GIL this code would be safe
+        # even without the lock, but could lead to wasted work as multiple
+        # threads tried to compile the same template simultaneously.
         self.lock = threading.RLock()

-    def reset(self) ->None:
+    def reset(self) -> None:
         """Resets the cache of compiled templates."""
-        pass
+        with self.lock:
+            self.templates = {}

-    def resolve_path(self, name: str, parent_path: Optional[str]=None) ->str:
+    def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
         """Converts a possibly-relative path to absolute (used internally)."""
-        pass
+        raise NotImplementedError()

-    def load(self, name: str, parent_path: Optional[str]=None) ->Template:
+    def load(self, name: str, parent_path: Optional[str] = None) -> Template:
         """Loads a template."""
-        pass
+        name = self.resolve_path(name, parent_path=parent_path)
+        with self.lock:
+            if name not in self.templates:
+                self.templates[name] = self._create_template(name)
+            return self.templates[name]
+
+    def _create_template(self, name: str) -> Template:
+        raise NotImplementedError()


 class Loader(BaseLoader):
     """A template loader that loads from a single root directory."""

-    def __init__(self, root_directory: str, **kwargs: Any) ->None:
+    def __init__(self, root_directory: str, **kwargs: Any) -> None:
         super().__init__(**kwargs)
         self.root = os.path.abspath(root_directory)

+    def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
+        if (
+            parent_path
+            and not parent_path.startswith("<")
+            and not parent_path.startswith("/")
+            and not name.startswith("/")
+        ):
+            current_path = os.path.join(self.root, parent_path)
+            file_dir = os.path.dirname(os.path.abspath(current_path))
+            relative_path = os.path.abspath(os.path.join(file_dir, name))
+            if relative_path.startswith(self.root):
+                name = relative_path[len(self.root) + 1 :]
+        return name
+
+    def _create_template(self, name: str) -> Template:
+        path = os.path.join(self.root, name)
+        with open(path, "rb") as f:
+            template = Template(f.read(), name=name, loader=self)
+            return template
+

 class DictLoader(BaseLoader):
     """A template loader that loads from a dictionary."""

-    def __init__(self, dict: Dict[str, str], **kwargs: Any) ->None:
+    def __init__(self, dict: Dict[str, str], **kwargs: Any) -> None:
         super().__init__(**kwargs)
         self.dict = dict

+    def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
+        if (
+            parent_path
+            and not parent_path.startswith("<")
+            and not parent_path.startswith("/")
+            and not name.startswith("/")
+        ):
+            file_dir = posixpath.dirname(parent_path)
+            name = posixpath.normpath(posixpath.join(file_dir, name))
+        return name
+
+    def _create_template(self, name: str) -> Template:
+        return Template(self.dict[name], name=name, loader=self)
+

 class _Node(object):
-    pass
+    def each_child(self) -> Iterable["_Node"]:
+        return ()

+    def generate(self, writer: "_CodeWriter") -> None:
+        raise NotImplementedError()

-class _File(_Node):
+    def find_named_blocks(
+        self, loader: Optional[BaseLoader], named_blocks: Dict[str, "_NamedBlock"]
+    ) -> None:
+        for child in self.each_child():
+            child.find_named_blocks(loader, named_blocks)

-    def __init__(self, template: Template, body: '_ChunkList') ->None:
+
+class _File(_Node):
+    def __init__(self, template: Template, body: "_ChunkList") -> None:
         self.template = template
         self.body = body
         self.line = 0

+    def generate(self, writer: "_CodeWriter") -> None:
+        writer.write_line("def _tt_execute():", self.line)
+        with writer.indent():
+            writer.write_line("_tt_buffer = []", self.line)
+            writer.write_line("_tt_append = _tt_buffer.append", self.line)
+            self.body.generate(writer)
+            writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
+
+    def each_child(self) -> Iterable["_Node"]:
+        return (self.body,)

-class _ChunkList(_Node):

-    def __init__(self, chunks: List[_Node]) ->None:
+class _ChunkList(_Node):
+    def __init__(self, chunks: List[_Node]) -> None:
         self.chunks = chunks

+    def generate(self, writer: "_CodeWriter") -> None:
+        for chunk in self.chunks:
+            chunk.generate(writer)

-class _NamedBlock(_Node):
+    def each_child(self) -> Iterable["_Node"]:
+        return self.chunks

-    def __init__(self, name: str, body: _Node, template: Template, line: int
-        ) ->None:
+
+class _NamedBlock(_Node):
+    def __init__(self, name: str, body: _Node, template: Template, line: int) -> None:
         self.name = name
         self.body = body
         self.template = template
         self.line = line

+    def each_child(self) -> Iterable["_Node"]:
+        return (self.body,)

-class _ExtendsBlock(_Node):
+    def generate(self, writer: "_CodeWriter") -> None:
+        block = writer.named_blocks[self.name]
+        with writer.include(block.template, self.line):
+            block.body.generate(writer)
+
+    def find_named_blocks(
+        self, loader: Optional[BaseLoader], named_blocks: Dict[str, "_NamedBlock"]
+    ) -> None:
+        named_blocks[self.name] = self
+        _Node.find_named_blocks(self, loader, named_blocks)

-    def __init__(self, name: str) ->None:
+
+class _ExtendsBlock(_Node):
+    def __init__(self, name: str) -> None:
         self.name = name


 class _IncludeBlock(_Node):
-
-    def __init__(self, name: str, reader: '_TemplateReader', line: int) ->None:
+    def __init__(self, name: str, reader: "_TemplateReader", line: int) -> None:
         self.name = name
         self.template_name = reader.name
         self.line = line

+    def find_named_blocks(
+        self, loader: Optional[BaseLoader], named_blocks: Dict[str, _NamedBlock]
+    ) -> None:
+        assert loader is not None
+        included = loader.load(self.name, self.template_name)
+        included.file.find_named_blocks(loader, named_blocks)

-class _ApplyBlock(_Node):
+    def generate(self, writer: "_CodeWriter") -> None:
+        assert writer.loader is not None
+        included = writer.loader.load(self.name, self.template_name)
+        with writer.include(included, self.line):
+            included.file.body.generate(writer)

-    def __init__(self, method: str, line: int, body: _Node) ->None:
+
+class _ApplyBlock(_Node):
+    def __init__(self, method: str, line: int, body: _Node) -> None:
         self.method = method
         self.line = line
         self.body = body

+    def each_child(self) -> Iterable["_Node"]:
+        return (self.body,)
+
+    def generate(self, writer: "_CodeWriter") -> None:
+        method_name = "_tt_apply%d" % writer.apply_counter
+        writer.apply_counter += 1
+        writer.write_line("def %s():" % method_name, self.line)
+        with writer.indent():
+            writer.write_line("_tt_buffer = []", self.line)
+            writer.write_line("_tt_append = _tt_buffer.append", self.line)
+            self.body.generate(writer)
+            writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
+        writer.write_line(
+            "_tt_append(_tt_utf8(%s(%s())))" % (self.method, method_name), self.line
+        )

-class _ControlBlock(_Node):

-    def __init__(self, statement: str, line: int, body: _Node) ->None:
+class _ControlBlock(_Node):
+    def __init__(self, statement: str, line: int, body: _Node) -> None:
         self.statement = statement
         self.line = line
         self.body = body

+    def each_child(self) -> Iterable[_Node]:
+        return (self.body,)

-class _IntermediateControlBlock(_Node):
+    def generate(self, writer: "_CodeWriter") -> None:
+        writer.write_line("%s:" % self.statement, self.line)
+        with writer.indent():
+            self.body.generate(writer)
+            # Just in case the body was empty
+            writer.write_line("pass", self.line)

-    def __init__(self, statement: str, line: int) ->None:
+
+class _IntermediateControlBlock(_Node):
+    def __init__(self, statement: str, line: int) -> None:
         self.statement = statement
         self.line = line

+    def generate(self, writer: "_CodeWriter") -> None:
+        # In case the previous block was empty
+        writer.write_line("pass", self.line)
+        writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1)

-class _Statement(_Node):

-    def __init__(self, statement: str, line: int) ->None:
+class _Statement(_Node):
+    def __init__(self, statement: str, line: int) -> None:
         self.statement = statement
         self.line = line

+    def generate(self, writer: "_CodeWriter") -> None:
+        writer.write_line(self.statement, self.line)

-class _Expression(_Node):

-    def __init__(self, expression: str, line: int, raw: bool=False) ->None:
+class _Expression(_Node):
+    def __init__(self, expression: str, line: int, raw: bool = False) -> None:
         self.expression = expression
         self.line = line
         self.raw = raw

+    def generate(self, writer: "_CodeWriter") -> None:
+        writer.write_line("_tt_tmp = %s" % self.expression, self.line)
+        writer.write_line(
+            "if isinstance(_tt_tmp, _tt_string_types):" " _tt_tmp = _tt_utf8(_tt_tmp)",
+            self.line,
+        )
+        writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line)
+        if not self.raw and writer.current_template.autoescape is not None:
+            # In python3 functions like xhtml_escape return unicode,
+            # so we have to convert to utf8 again.
+            writer.write_line(
+                "_tt_tmp = _tt_utf8(%s(_tt_tmp))" % writer.current_template.autoescape,
+                self.line,
+            )
+        writer.write_line("_tt_append(_tt_tmp)", self.line)

-class _Module(_Expression):

-    def __init__(self, expression: str, line: int) ->None:
-        super().__init__('_tt_modules.' + expression, line, raw=True)
+class _Module(_Expression):
+    def __init__(self, expression: str, line: int) -> None:
+        super().__init__("_tt_modules." + expression, line, raw=True)


 class _Text(_Node):
-
-    def __init__(self, value: str, line: int, whitespace: str) ->None:
+    def __init__(self, value: str, line: int, whitespace: str) -> None:
         self.value = value
         self.line = line
         self.whitespace = whitespace

+    def generate(self, writer: "_CodeWriter") -> None:
+        value = self.value
+
+        # Compress whitespace if requested, with a crude heuristic to avoid
+        # altering preformatted whitespace.
+        if "<pre>" not in value:
+            value = filter_whitespace(self.whitespace, value)
+
+        if value:
+            writer.write_line("_tt_append(%r)" % escape.utf8(value), self.line)
+

 class ParseError(Exception):
     """Raised for template syntax errors.
@@ -455,42 +707,115 @@ class ParseError(Exception):
        Added ``filename`` and ``lineno`` attributes.
     """

-    def __init__(self, message: str, filename: Optional[str]=None, lineno:
-        int=0) ->None:
+    def __init__(
+        self, message: str, filename: Optional[str] = None, lineno: int = 0
+    ) -> None:
         self.message = message
+        # The names "filename" and "lineno" are chosen for consistency
+        # with python SyntaxError.
         self.filename = filename
         self.lineno = lineno

-    def __str__(self) ->str:
-        return '%s at %s:%d' % (self.message, self.filename, self.lineno)
+    def __str__(self) -> str:
+        return "%s at %s:%d" % (self.message, self.filename, self.lineno)


 class _CodeWriter(object):
-
-    def __init__(self, file: TextIO, named_blocks: Dict[str, _NamedBlock],
-        loader: Optional[BaseLoader], current_template: Template) ->None:
+    def __init__(
+        self,
+        file: TextIO,
+        named_blocks: Dict[str, _NamedBlock],
+        loader: Optional[BaseLoader],
+        current_template: Template,
+    ) -> None:
         self.file = file
         self.named_blocks = named_blocks
         self.loader = loader
         self.current_template = current_template
         self.apply_counter = 0
-        self.include_stack = []
+        self.include_stack = []  # type: List[Tuple[Template, int]]
         self._indent = 0

+    def indent_size(self) -> int:
+        return self._indent

-class _TemplateReader(object):
+    def indent(self) -> "ContextManager":
+        class Indenter(object):
+            def __enter__(_) -> "_CodeWriter":
+                self._indent += 1
+                return self
+
+            def __exit__(_, *args: Any) -> None:
+                assert self._indent > 0
+                self._indent -= 1
+
+        return Indenter()
+
+    def include(self, template: Template, line: int) -> "ContextManager":
+        self.include_stack.append((self.current_template, line))
+        self.current_template = template
+
+        class IncludeTemplate(object):
+            def __enter__(_) -> "_CodeWriter":
+                return self
+
+            def __exit__(_, *args: Any) -> None:
+                self.current_template = self.include_stack.pop()[0]
+
+        return IncludeTemplate()

-    def __init__(self, name: str, text: str, whitespace: str) ->None:
+    def write_line(
+        self, line: str, line_number: int, indent: Optional[int] = None
+    ) -> None:
+        if indent is None:
+            indent = self._indent
+        line_comment = "  # %s:%d" % (self.current_template.name, line_number)
+        if self.include_stack:
+            ancestors = [
+                "%s:%d" % (tmpl.name, lineno) for (tmpl, lineno) in self.include_stack
+            ]
+            line_comment += " (via %s)" % ", ".join(reversed(ancestors))
+        print("    " * indent + line + line_comment, file=self.file)
+
+
+class _TemplateReader(object):
+    def __init__(self, name: str, text: str, whitespace: str) -> None:
         self.name = name
         self.text = text
         self.whitespace = whitespace
         self.line = 1
         self.pos = 0

-    def __len__(self) ->int:
+    def find(self, needle: str, start: int = 0, end: Optional[int] = None) -> int:
+        assert start >= 0, start
+        pos = self.pos
+        start += pos
+        if end is None:
+            index = self.text.find(needle, start)
+        else:
+            end += pos
+            assert end >= start
+            index = self.text.find(needle, start, end)
+        if index != -1:
+            index -= pos
+        return index
+
+    def consume(self, count: Optional[int] = None) -> str:
+        if count is None:
+            count = len(self.text) - self.pos
+        newpos = self.pos + count
+        self.line += self.text.count("\n", self.pos, newpos)
+        s = self.text[self.pos : newpos]
+        self.pos = newpos
+        return s
+
+    def remaining(self) -> int:
+        return len(self.text) - self.pos
+
+    def __len__(self) -> int:
         return self.remaining()

-    def __getitem__(self, key: Union[int, slice]) ->str:
+    def __getitem__(self, key: Union[int, slice]) -> str:
         if isinstance(key, slice):
             size = len(self)
             start, stop, step = key.indices(size)
@@ -506,5 +831,217 @@ class _TemplateReader(object):
         else:
             return self.text[self.pos + key]

-    def __str__(self) ->str:
-        return self.text[self.pos:]
+    def __str__(self) -> str:
+        return self.text[self.pos :]
+
+    def raise_parse_error(self, msg: str) -> None:
+        raise ParseError(msg, self.name, self.line)
+
+
+def _format_code(code: str) -> str:
+    lines = code.splitlines()
+    format = "%%%dd  %%s\n" % len(repr(len(lines) + 1))
+    return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
+
+
+def _parse(
+    reader: _TemplateReader,
+    template: Template,
+    in_block: Optional[str] = None,
+    in_loop: Optional[str] = None,
+) -> _ChunkList:
+    body = _ChunkList([])
+    while True:
+        # Find next template directive
+        curly = 0
+        while True:
+            curly = reader.find("{", curly)
+            if curly == -1 or curly + 1 == reader.remaining():
+                # EOF
+                if in_block:
+                    reader.raise_parse_error(
+                        "Missing {%% end %%} block for %s" % in_block
+                    )
+                body.chunks.append(
+                    _Text(reader.consume(), reader.line, reader.whitespace)
+                )
+                return body
+            # If the first curly brace is not the start of a special token,
+            # start searching from the character after it
+            if reader[curly + 1] not in ("{", "%", "#"):
+                curly += 1
+                continue
+            # When there are more than 2 curlies in a row, use the
+            # innermost ones.  This is useful when generating languages
+            # like latex where curlies are also meaningful
+            if (
+                curly + 2 < reader.remaining()
+                and reader[curly + 1] == "{"
+                and reader[curly + 2] == "{"
+            ):
+                curly += 1
+                continue
+            break
+
+        # Append any text before the special token
+        if curly > 0:
+            cons = reader.consume(curly)
+            body.chunks.append(_Text(cons, reader.line, reader.whitespace))
+
+        start_brace = reader.consume(2)
+        line = reader.line
+
+        # Template directives may be escaped as "{{!" or "{%!".
+        # In this case output the braces and consume the "!".
+        # This is especially useful in conjunction with jquery templates,
+        # which also use double braces.
+        if reader.remaining() and reader[0] == "!":
+            reader.consume(1)
+            body.chunks.append(_Text(start_brace, line, reader.whitespace))
+            continue
+
+        # Comment
+        if start_brace == "{#":
+            end = reader.find("#}")
+            if end == -1:
+                reader.raise_parse_error("Missing end comment #}")
+            contents = reader.consume(end).strip()
+            reader.consume(2)
+            continue
+
+        # Expression
+        if start_brace == "{{":
+            end = reader.find("}}")
+            if end == -1:
+                reader.raise_parse_error("Missing end expression }}")
+            contents = reader.consume(end).strip()
+            reader.consume(2)
+            if not contents:
+                reader.raise_parse_error("Empty expression")
+            body.chunks.append(_Expression(contents, line))
+            continue
+
+        # Block
+        assert start_brace == "{%", start_brace
+        end = reader.find("%}")
+        if end == -1:
+            reader.raise_parse_error("Missing end block %}")
+        contents = reader.consume(end).strip()
+        reader.consume(2)
+        if not contents:
+            reader.raise_parse_error("Empty block tag ({% %})")
+
+        operator, space, suffix = contents.partition(" ")
+        suffix = suffix.strip()
+
+        # Intermediate ("else", "elif", etc) blocks
+        intermediate_blocks = {
+            "else": set(["if", "for", "while", "try"]),
+            "elif": set(["if"]),
+            "except": set(["try"]),
+            "finally": set(["try"]),
+        }
+        allowed_parents = intermediate_blocks.get(operator)
+        if allowed_parents is not None:
+            if not in_block:
+                reader.raise_parse_error(
+                    "%s outside %s block" % (operator, allowed_parents)
+                )
+            if in_block not in allowed_parents:
+                reader.raise_parse_error(
+                    "%s block cannot be attached to %s block" % (operator, in_block)
+                )
+            body.chunks.append(_IntermediateControlBlock(contents, line))
+            continue
+
+        # End tag
+        elif operator == "end":
+            if not in_block:
+                reader.raise_parse_error("Extra {% end %} block")
+            return body
+
+        elif operator in (
+            "extends",
+            "include",
+            "set",
+            "import",
+            "from",
+            "comment",
+            "autoescape",
+            "whitespace",
+            "raw",
+            "module",
+        ):
+            if operator == "comment":
+                continue
+            if operator == "extends":
+                suffix = suffix.strip('"').strip("'")
+                if not suffix:
+                    reader.raise_parse_error("extends missing file path")
+                block = _ExtendsBlock(suffix)  # type: _Node
+            elif operator in ("import", "from"):
+                if not suffix:
+                    reader.raise_parse_error("import missing statement")
+                block = _Statement(contents, line)
+            elif operator == "include":
+                suffix = suffix.strip('"').strip("'")
+                if not suffix:
+                    reader.raise_parse_error("include missing file path")
+                block = _IncludeBlock(suffix, reader, line)
+            elif operator == "set":
+                if not suffix:
+                    reader.raise_parse_error("set missing statement")
+                block = _Statement(suffix, line)
+            elif operator == "autoescape":
+                fn = suffix.strip()  # type: Optional[str]
+                if fn == "None":
+                    fn = None
+                template.autoescape = fn
+                continue
+            elif operator == "whitespace":
+                mode = suffix.strip()
+                # Validate the selected mode
+                filter_whitespace(mode, "")
+                reader.whitespace = mode
+                continue
+            elif operator == "raw":
+                block = _Expression(suffix, line, raw=True)
+            elif operator == "module":
+                block = _Module(suffix, line)
+            body.chunks.append(block)
+            continue
+
+        elif operator in ("apply", "block", "try", "if", "for", "while"):
+            # parse inner body recursively
+            if operator in ("for", "while"):
+                block_body = _parse(reader, template, operator, operator)
+            elif operator == "apply":
+                # apply creates a nested function so syntactically it's not
+                # in the loop.
+                block_body = _parse(reader, template, operator, None)
+            else:
+                block_body = _parse(reader, template, operator, in_loop)
+
+            if operator == "apply":
+                if not suffix:
+                    reader.raise_parse_error("apply missing method name")
+                block = _ApplyBlock(suffix, line, block_body)
+            elif operator == "block":
+                if not suffix:
+                    reader.raise_parse_error("block missing name")
+                block = _NamedBlock(suffix, block_body, template, line)
+            else:
+                block = _ControlBlock(contents, line, block_body)
+            body.chunks.append(block)
+            continue
+
+        elif operator in ("break", "continue"):
+            if not in_loop:
+                reader.raise_parse_error(
+                    "%s outside %s block" % (operator, set(["for", "while"]))
+                )
+            body.chunks.append(_Statement(contents, line))
+            continue
+
+        else:
+            reader.raise_parse_error("unknown operator: %r" % operator)
diff --git a/tornado/testing.py b/tornado/testing.py
index 0dea2c63..4c33b3e2 100644
--- a/tornado/testing.py
+++ b/tornado/testing.py
@@ -8,6 +8,7 @@
 * `main()`: A simple test runner (wrapper around unittest.main()) with support
   for the tornado.autoreload module to rerun the tests when code changes.
 """
+
 import asyncio
 from collections.abc import Generator
 import functools
@@ -20,6 +21,7 @@ import socket
 import sys
 import unittest
 import warnings
+
 from tornado import gen
 from tornado.httpclient import AsyncHTTPClient, HTTPResponse
 from tornado.httpserver import HTTPServer
@@ -30,17 +32,23 @@ from tornado.process import Subprocess
 from tornado.log import app_log
 from tornado.util import raise_exc_info, basestring_type
 from tornado.web import Application
+
 import typing
 from typing import Tuple, Any, Callable, Type, Dict, Union, Optional, Coroutine
 from types import TracebackType
+
 if typing.TYPE_CHECKING:
-    _ExcInfoTuple = Tuple[Optional[Type[BaseException]], Optional[
-        BaseException], Optional[TracebackType]]
+    _ExcInfoTuple = Tuple[
+        Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
+    ]
+
+
 _NON_OWNED_IOLOOPS = AsyncIOMainLoop


-def bind_unused_port(reuse_port: bool=False, address: str='127.0.0.1') ->Tuple[
-    socket.socket, int]:
+def bind_unused_port(
+    reuse_port: bool = False, address: str = "127.0.0.1"
+) -> Tuple[socket.socket, int]:
     """Binds a server socket to an available port on localhost.

     Returns a tuple (socket, port).
@@ -53,17 +61,27 @@ def bind_unused_port(reuse_port: bool=False, address: str='127.0.0.1') ->Tuple[
        Added optional ``address`` argument to
        override the default "127.0.0.1".
     """
-    pass
+    sock = netutil.bind_sockets(
+        0, address, family=socket.AF_INET, reuse_port=reuse_port
+    )[0]
+    port = sock.getsockname()[1]
+    return sock, port


-def get_async_test_timeout() ->float:
+def get_async_test_timeout() -> float:
     """Get the global timeout setting for async tests.

     Returns a float, the timeout in seconds.

     .. versionadded:: 3.1
     """
-    pass
+    env = os.environ.get("ASYNC_TEST_TIMEOUT")
+    if env is not None:
+        try:
+            return float(env)
+        except ValueError:
+            pass
+    return 5


 class AsyncTestCase(unittest.TestCase):
@@ -114,16 +132,80 @@ class AsyncTestCase(unittest.TestCase):
                 self.assertIn("FriendFeed", response.body)
     """

-    def __init__(self, methodName: str='runTest') ->None:
+    def __init__(self, methodName: str = "runTest") -> None:
         super().__init__(methodName)
         self.__stopped = False
         self.__running = False
-        self.__failure = None
-        self.__stop_args = None
-        self.__timeout = None
-        self._test_generator = None
-
-    def get_new_ioloop(self) ->IOLoop:
+        self.__failure = None  # type: Optional[_ExcInfoTuple]
+        self.__stop_args = None  # type: Any
+        self.__timeout = None  # type: Optional[object]
+
+        # Not used in this class itself, but used by @gen_test
+        self._test_generator = None  # type: Optional[Union[Generator, Coroutine]]
+
+    def setUp(self) -> None:
+        py_ver = sys.version_info
+        if ((3, 10, 0) <= py_ver < (3, 10, 9)) or ((3, 11, 0) <= py_ver <= (3, 11, 1)):
+            # Early releases in the Python 3.10 and 3.1 series had deprecation
+            # warnings that were later reverted; we must suppress them here.
+            setup_with_context_manager(self, warnings.catch_warnings())
+            warnings.filterwarnings(
+                "ignore",
+                message="There is no current event loop",
+                category=DeprecationWarning,
+                module=r"tornado\..*",
+            )
+        super().setUp()
+        if type(self).get_new_ioloop is not AsyncTestCase.get_new_ioloop:
+            warnings.warn("get_new_ioloop is deprecated", DeprecationWarning)
+        self.io_loop = self.get_new_ioloop()
+        asyncio.set_event_loop(self.io_loop.asyncio_loop)  # type: ignore[attr-defined]
+
+    def tearDown(self) -> None:
+        # Native coroutines tend to produce warnings if they're not
+        # allowed to run to completion. It's difficult to ensure that
+        # this always happens in tests, so cancel any tasks that are
+        # still pending by the time we get here.
+        asyncio_loop = self.io_loop.asyncio_loop  # type: ignore
+        tasks = asyncio.all_tasks(asyncio_loop)
+        # Tasks that are done may still appear here and may contain
+        # non-cancellation exceptions, so filter them out.
+        tasks = [t for t in tasks if not t.done()]  # type: ignore
+        for t in tasks:
+            t.cancel()
+        # Allow the tasks to run and finalize themselves (which means
+        # raising a CancelledError inside the coroutine). This may
+        # just transform the "task was destroyed but it is pending"
+        # warning into a "uncaught CancelledError" warning, but
+        # catching CancelledErrors in coroutines that may leak is
+        # simpler than ensuring that no coroutines leak.
+        if tasks:
+            done, pending = self.io_loop.run_sync(lambda: asyncio.wait(tasks))
+            assert not pending
+            # If any task failed with anything but a CancelledError, raise it.
+            for f in done:
+                try:
+                    f.result()
+                except asyncio.CancelledError:
+                    pass
+
+        # Clean up Subprocess, so it can be used again with a new ioloop.
+        Subprocess.uninitialize()
+        asyncio.set_event_loop(None)
+        if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
+            # Try to clean up any file descriptors left open in the ioloop.
+            # This avoids leaks, especially when tests are run repeatedly
+            # in the same process with autoreload (because curl does not
+            # set FD_CLOEXEC on its file descriptors)
+            self.io_loop.close(all_fds=True)
+        super().tearDown()
+        # In case an exception escaped or the StackContext caught an exception
+        # when there wasn't a wait() to re-raise it, do so here.
+        # This is our last chance to raise an exception in a way that the
+        # unittest machinery understands.
+        self.__rethrow()
+
+    def get_new_ioloop(self) -> IOLoop:
         """Returns the `.IOLoop` to use for this test.

         By default, a new `.IOLoop` is created for each test.
@@ -137,9 +219,38 @@ class AsyncTestCase(unittest.TestCase):
         .. deprecated:: 6.3
            This method will be removed in Tornado 7.0.
         """
-        pass
-
-    def _callTestMethod(self, method: Callable) ->None:
+        return IOLoop(make_current=False)
+
+    def _handle_exception(
+        self, typ: Type[Exception], value: Exception, tb: TracebackType
+    ) -> bool:
+        if self.__failure is None:
+            self.__failure = (typ, value, tb)
+        else:
+            app_log.error(
+                "multiple unhandled exceptions in test", exc_info=(typ, value, tb)
+            )
+        self.stop()
+        return True
+
+    def __rethrow(self) -> None:
+        if self.__failure is not None:
+            failure = self.__failure
+            self.__failure = None
+            raise_exc_info(failure)
+
+    def run(
+        self, result: Optional[unittest.TestResult] = None
+    ) -> Optional[unittest.TestResult]:
+        ret = super().run(result)
+        # As a last resort, if an exception escaped super.run() and wasn't
+        # re-raised in tearDown, raise it here.  This will cause the
+        # unittest run to fail messily, but that's better than silently
+        # ignoring an error.
+        self.__rethrow()
+        return ret
+
+    def _callTestMethod(self, method: Callable) -> None:
         """Run the given test method, raising an error if it returns non-None.

         Failure to decorate asynchronous test methods with ``@gen_test`` can lead to tests
@@ -152,9 +263,18 @@ class AsyncTestCase(unittest.TestCase):
         present in all supported versions of Python (3.8+), and if it goes away in the future that's
         OK because we can just remove this override as noted above.
         """
-        pass
-
-    def stop(self, _arg: Any=None, **kwargs: Any) ->None:
+        # Calling super()._callTestMethod would hide the return value, even in python 3.8-3.10
+        # where the check isn't being done for us.
+        result = method()
+        if isinstance(result, Generator) or inspect.iscoroutine(result):
+            raise TypeError(
+                "Generator and coroutine test methods should be"
+                " decorated with tornado.testing.gen_test"
+            )
+        elif result is not None:
+            raise ValueError("Return value from test method ignored: %r" % result)
+
+    def stop(self, _arg: Any = None, **kwargs: Any) -> None:
         """Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
         to return.

@@ -165,10 +285,18 @@ class AsyncTestCase(unittest.TestCase):

            `stop` and `wait` are deprecated; use ``@gen_test`` instead.
         """
-        pass
-
-    def wait(self, condition: Optional[Callable[..., bool]]=None, timeout:
-        Optional[float]=None) ->Any:
+        assert _arg is None or not kwargs
+        self.__stop_args = kwargs or _arg
+        if self.__running:
+            self.io_loop.stop()
+            self.__running = False
+        self.__stopped = True
+
+    def wait(
+        self,
+        condition: Optional[Callable[..., bool]] = None,
+        timeout: Optional[float] = None,
+    ) -> Any:
         """Runs the `.IOLoop` until stop is called or timeout has passed.

         In the event of a timeout, an exception will be thrown. The
@@ -186,7 +314,38 @@ class AsyncTestCase(unittest.TestCase):

            `stop` and `wait` are deprecated; use ``@gen_test`` instead.
         """
-        pass
+        if timeout is None:
+            timeout = get_async_test_timeout()
+
+        if not self.__stopped:
+            if timeout:
+
+                def timeout_func() -> None:
+                    try:
+                        raise self.failureException(
+                            "Async operation timed out after %s seconds" % timeout
+                        )
+                    except Exception:
+                        self.__failure = sys.exc_info()
+                    self.stop()
+
+                self.__timeout = self.io_loop.add_timeout(
+                    self.io_loop.time() + timeout, timeout_func
+                )
+            while True:
+                self.__running = True
+                self.io_loop.start()
+                if self.__failure is not None or condition is None or condition():
+                    break
+            if self.__timeout is not None:
+                self.io_loop.remove_timeout(self.__timeout)
+                self.__timeout = None
+        assert self.__stopped
+        self.__stopped = False
+        self.__rethrow()
+        result = self.__stop_args
+        self.__stop_args = None
+        return result


 class AsyncHTTPTestCase(AsyncTestCase):
@@ -222,14 +381,31 @@ class AsyncHTTPTestCase(AsyncTestCase):
     ``stop()`` and ``wait()`` yourself.
     """

-    def get_app(self) ->Application:
+    def setUp(self) -> None:
+        super().setUp()
+        sock, port = bind_unused_port()
+        self.__port = port
+
+        self.http_client = self.get_http_client()
+        self._app = self.get_app()
+        self.http_server = self.get_http_server()
+        self.http_server.add_sockets([sock])
+
+    def get_http_client(self) -> AsyncHTTPClient:
+        return AsyncHTTPClient()
+
+    def get_http_server(self) -> HTTPServer:
+        return HTTPServer(self._app, **self.get_httpserver_options())
+
+    def get_app(self) -> Application:
         """Should be overridden by subclasses to return a
         `tornado.web.Application` or other `.HTTPServer` callback.
         """
-        pass
+        raise NotImplementedError()

-    def fetch(self, path: str, raise_error: bool=False, **kwargs: Any
-        ) ->HTTPResponse:
+    def fetch(
+        self, path: str, raise_error: bool = False, **kwargs: Any
+    ) -> HTTPResponse:
         """Convenience method to synchronously fetch a URL.

         The given path will be appended to the local server's host and
@@ -263,24 +439,44 @@ class AsyncHTTPTestCase(AsyncTestCase):
            response codes.

         """
-        pass
-
-    def get_httpserver_options(self) ->Dict[str, Any]:
+        if path.lower().startswith(("http://", "https://")):
+            url = path
+        else:
+            url = self.get_url(path)
+        return self.io_loop.run_sync(
+            lambda: self.http_client.fetch(url, raise_error=raise_error, **kwargs),
+            timeout=get_async_test_timeout(),
+        )
+
+    def get_httpserver_options(self) -> Dict[str, Any]:
         """May be overridden by subclasses to return additional
         keyword arguments for the server.
         """
-        pass
+        return {}

-    def get_http_port(self) ->int:
+    def get_http_port(self) -> int:
         """Returns the port used by the server.

         A new port is chosen for each test.
         """
-        pass
+        return self.__port
+
+    def get_protocol(self) -> str:
+        return "http"

-    def get_url(self, path: str) ->str:
+    def get_url(self, path: str) -> str:
         """Returns an absolute url for the given path on the test server."""
-        pass
+        return "%s://127.0.0.1:%s%s" % (self.get_protocol(), self.get_http_port(), path)
+
+    def tearDown(self) -> None:
+        self.http_server.stop()
+        self.io_loop.run_sync(
+            self.http_server.close_all_connections, timeout=get_async_test_timeout()
+        )
+        self.http_client.close()
+        del self.http_server
+        del self._app
+        super().tearDown()


 class AsyncHTTPSTestCase(AsyncHTTPTestCase):
@@ -289,18 +485,55 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase):
     Interface is generally the same as `AsyncHTTPTestCase`.
     """

-    def get_ssl_options(self) ->Dict[str, Any]:
+    def get_http_client(self) -> AsyncHTTPClient:
+        return AsyncHTTPClient(force_instance=True, defaults=dict(validate_cert=False))
+
+    def get_httpserver_options(self) -> Dict[str, Any]:
+        return dict(ssl_options=self.get_ssl_options())
+
+    def get_ssl_options(self) -> Dict[str, Any]:
         """May be overridden by subclasses to select SSL options.

         By default includes a self-signed testing certificate.
         """
-        pass
+        return AsyncHTTPSTestCase.default_ssl_options()
+
+    @staticmethod
+    def default_ssl_options() -> Dict[str, Any]:
+        # Testing keys were generated with:
+        # openssl req -new -keyout tornado/test/test.key \
+        #     -out tornado/test/test.crt \
+        #     -nodes -days 3650 -x509 \
+        #     -subj "/CN=foo.example.com" -addext "subjectAltName = DNS:foo.example.com"
+        module_dir = os.path.dirname(__file__)
+        return dict(
+            certfile=os.path.join(module_dir, "test", "test.crt"),
+            keyfile=os.path.join(module_dir, "test", "test.key"),
+        )
+
+    def get_protocol(self) -> str:
+        return "https"
+
+
+@typing.overload
+def gen_test(
+    *, timeout: Optional[float] = None
+) -> Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]]:
+    pass
+
+
+@typing.overload  # noqa: F811
+def gen_test(func: Callable[..., Union[Generator, "Coroutine"]]) -> Callable[..., None]:
+    pass


-def gen_test(func: Optional[Callable[..., Union[Generator, 'Coroutine']]]=
-    None, timeout: Optional[float]=None) ->Union[Callable[..., None],
-    Callable[[Callable[..., Union[Generator, 'Coroutine']]], Callable[...,
-    None]]]:
+def gen_test(  # noqa: F811
+    func: Optional[Callable[..., Union[Generator, "Coroutine"]]] = None,
+    timeout: Optional[float] = None,
+) -> Union[
+    Callable[..., None],
+    Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]],
+]:
     """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.

     ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
@@ -336,10 +569,74 @@ def gen_test(func: Optional[Callable[..., Union[Generator, 'Coroutine']]]=
        on functions with arguments.

     """
-    pass
-
-
-gen_test.__test__ = False
+    if timeout is None:
+        timeout = get_async_test_timeout()
+
+    def wrap(f: Callable[..., Union[Generator, "Coroutine"]]) -> Callable[..., None]:
+        # Stack up several decorators to allow us to access the generator
+        # object itself.  In the innermost wrapper, we capture the generator
+        # and save it in an attribute of self.  Next, we run the wrapped
+        # function through @gen.coroutine.  Finally, the coroutine is
+        # wrapped again to make it synchronous with run_sync.
+        #
+        # This is a good case study arguing for either some sort of
+        # extensibility in the gen decorators or cancellation support.
+        @functools.wraps(f)
+        def pre_coroutine(self, *args, **kwargs):
+            # type: (AsyncTestCase, *Any, **Any) -> Union[Generator, Coroutine]
+            # Type comments used to avoid pypy3 bug.
+            result = f(self, *args, **kwargs)
+            if isinstance(result, Generator) or inspect.iscoroutine(result):
+                self._test_generator = result
+            else:
+                self._test_generator = None
+            return result
+
+        if inspect.iscoroutinefunction(f):
+            coro = pre_coroutine
+        else:
+            coro = gen.coroutine(pre_coroutine)  # type: ignore[assignment]
+
+        @functools.wraps(coro)
+        def post_coroutine(self, *args, **kwargs):
+            # type: (AsyncTestCase, *Any, **Any) -> None
+            try:
+                return self.io_loop.run_sync(
+                    functools.partial(coro, self, *args, **kwargs), timeout=timeout
+                )
+            except TimeoutError as e:
+                # run_sync raises an error with an unhelpful traceback.
+                # If the underlying generator is still running, we can throw the
+                # exception back into it so the stack trace is replaced by the
+                # point where the test is stopped. The only reason the generator
+                # would not be running would be if it were cancelled, which means
+                # a native coroutine, so we can rely on the cr_running attribute.
+                if self._test_generator is not None and getattr(
+                    self._test_generator, "cr_running", True
+                ):
+                    self._test_generator.throw(e)
+                    # In case the test contains an overly broad except
+                    # clause, we may get back here.
+                # Coroutine was stopped or didn't raise a useful stack trace,
+                # so re-raise the original exception which is better than nothing.
+                raise
+
+        return post_coroutine
+
+    if func is not None:
+        # Used like:
+        #     @gen_test
+        #     def f(self):
+        #         pass
+        return wrap(func)
+    else:
+        # Used like @gen_test(timeout=10)
+        return wrap
+
+
+# Without this attribute, nosetests will try to run gen_test as a test
+# anywhere it is imported.
+gen_test.__test__ = False  # type: ignore


 class ExpectLog(logging.Filter):
@@ -360,8 +657,13 @@ class ExpectLog(logging.Filter):
        Added the ``logged_stack`` attribute.
     """

-    def __init__(self, logger: Union[logging.Logger, basestring_type],
-        regex: str, required: bool=True, level: Optional[int]=None) ->None:
+    def __init__(
+        self,
+        logger: Union[logging.Logger, basestring_type],
+        regex: str,
+        required: bool = True,
+        level: Optional[int] = None,
+    ) -> None:
         """Constructs an ExpectLog context manager.

         :param logger: Logger object (or name of logger) to watch.  Pass an
@@ -392,40 +694,73 @@ class ExpectLog(logging.Filter):
         self.logger = logger
         self.regex = re.compile(regex)
         self.required = required
+        # matched and deprecated_level_matched are a counter for the respective event.
         self.matched = 0
         self.deprecated_level_matched = 0
         self.logged_stack = False
         self.level = level
-        self.orig_level = None
-
-    def __enter__(self) ->'ExpectLog':
-        if (self.level is not None and self.level < self.logger.
-            getEffectiveLevel()):
+        self.orig_level = None  # type: Optional[int]
+
+    def filter(self, record: logging.LogRecord) -> bool:
+        if record.exc_info:
+            self.logged_stack = True
+        message = record.getMessage()
+        if self.regex.match(message):
+            if self.level is None and record.levelno < logging.WARNING:
+                # We're inside the logging machinery here so generating a DeprecationWarning
+                # here won't be reported cleanly (if warnings-as-errors is enabled, the error
+                # just gets swallowed by the logging module), and even if it were it would
+                # have the wrong stack trace. Just remember this fact and report it in
+                # __exit__ instead.
+                self.deprecated_level_matched += 1
+            if self.level is not None and record.levelno != self.level:
+                app_log.warning(
+                    "Got expected log message %r at unexpected level (%s vs %s)"
+                    % (message, logging.getLevelName(self.level), record.levelname)
+                )
+                return True
+            self.matched += 1
+            return False
+        return True
+
+    def __enter__(self) -> "ExpectLog":
+        if self.level is not None and self.level < self.logger.getEffectiveLevel():
             self.orig_level = self.logger.level
             self.logger.setLevel(self.level)
         self.logger.addFilter(self)
         return self

-    def __exit__(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: Optional[TracebackType]) ->None:
+    def __exit__(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[TracebackType],
+    ) -> None:
         if self.orig_level is not None:
             self.logger.setLevel(self.orig_level)
         self.logger.removeFilter(self)
         if not typ and self.required and not self.matched:
-            raise Exception('did not get expected log message')
-        if (not typ and self.required and self.deprecated_level_matched >=
-            self.matched):
+            raise Exception("did not get expected log message")
+        if (
+            not typ
+            and self.required
+            and (self.deprecated_level_matched >= self.matched)
+        ):
             warnings.warn(
-                'ExpectLog matched at INFO or below without level argument',
-                DeprecationWarning)
+                "ExpectLog matched at INFO or below without level argument",
+                DeprecationWarning,
+            )


-def setup_with_context_manager(testcase: unittest.TestCase, cm: Any) ->Any:
+# From https://nedbatchelder.com/blog/201508/using_context_managers_in_test_setup.html
+def setup_with_context_manager(testcase: unittest.TestCase, cm: Any) -> Any:
     """Use a contextmanager to setUp a test case."""
-    pass
+    val = cm.__enter__()
+    testcase.addCleanup(cm.__exit__, None, None, None)
+    return val


-def main(**kwargs: Any) ->None:
+def main(**kwargs: Any) -> None:
     """A simple test runner.

     This test runner is essentially equivalent to `unittest.main` from
@@ -465,8 +800,57 @@ def main(**kwargs: Any) ->None:
        by the `unittest` module (previously it would add a PASS or FAIL
        log message).
     """
-    pass
-
-
-if __name__ == '__main__':
+    from tornado.options import define, options, parse_command_line
+
+    define(
+        "exception_on_interrupt",
+        type=bool,
+        default=True,
+        help=(
+            "If true (default), ctrl-c raises a KeyboardInterrupt "
+            "exception.  This prints a stack trace but cannot interrupt "
+            "certain operations.  If false, the process is more reliably "
+            "killed, but does not print a stack trace."
+        ),
+    )
+
+    # support the same options as unittest's command-line interface
+    define("verbose", type=bool)
+    define("quiet", type=bool)
+    define("failfast", type=bool)
+    define("catch", type=bool)
+    define("buffer", type=bool)
+
+    argv = [sys.argv[0]] + parse_command_line(sys.argv)
+
+    if not options.exception_on_interrupt:
+        signal.signal(signal.SIGINT, signal.SIG_DFL)
+
+    if options.verbose is not None:
+        kwargs["verbosity"] = 2
+    if options.quiet is not None:
+        kwargs["verbosity"] = 0
+    if options.failfast is not None:
+        kwargs["failfast"] = True
+    if options.catch is not None:
+        kwargs["catchbreak"] = True
+    if options.buffer is not None:
+        kwargs["buffer"] = True
+
+    if __name__ == "__main__" and len(argv) == 1:
+        print("No tests specified", file=sys.stderr)
+        sys.exit(1)
+    # In order to be able to run tests by their fully-qualified name
+    # on the command line without importing all tests here,
+    # module must be set to None.  Python 3.2's unittest.main ignores
+    # defaultTest if no module is given (it tries to do its own
+    # test discovery, which is incompatible with auto2to3), so don't
+    # set module if we're not asking for a specific test.
+    if len(argv) > 1:
+        unittest.main(module=None, argv=argv, **kwargs)  # type: ignore
+    else:
+        unittest.main(defaultTest="all", argv=argv, **kwargs)
+
+
+if __name__ == "__main__":
     main()
diff --git a/tornado/util.py b/tornado/util.py
index 182c7dcd..3a3a52f1 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -9,6 +9,7 @@ and its `~Configurable.configure` method, which becomes a part of the
 interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
 and `.Resolver`.
 """
+
 import array
 import asyncio
 import atexit
@@ -17,32 +18,67 @@ import os
 import re
 import typing
 import zlib
-from typing import Any, Optional, Dict, Mapping, List, Tuple, Match, Callable, Type, Sequence
+
+from typing import (
+    Any,
+    Optional,
+    Dict,
+    Mapping,
+    List,
+    Tuple,
+    Match,
+    Callable,
+    Type,
+    Sequence,
+)
+
 if typing.TYPE_CHECKING:
-    import datetime
-    from types import TracebackType
-    from typing import Union
-    import unittest
+    # Additional imports only used in type comments.
+    # This lets us make these imports lazy.
+    import datetime  # noqa: F401
+    from types import TracebackType  # noqa: F401
+    from typing import Union  # noqa: F401
+    import unittest  # noqa: F401
+
+# Aliases for types that are spelled differently in different Python
+# versions. bytes_type is deprecated and no longer used in Tornado
+# itself but is left in case anyone outside Tornado is using it.
 bytes_type = bytes
 unicode_type = str
 basestring_type = str
+
 try:
     from sys import is_finalizing
 except ImportError:
+    # Emulate it
+    def _get_emulated_is_finalizing() -> Callable[[], bool]:
+        L = []  # type: List[None]
+        atexit.register(lambda: L.append(None))
+
+        def is_finalizing() -> bool:
+            # Not referencing any globals here
+            return L != []
+
+        return is_finalizing
+
     is_finalizing = _get_emulated_is_finalizing()
+
+
+# versionchanged:: 6.2
+# no longer our own TimeoutError, use standard asyncio class
 TimeoutError = asyncio.TimeoutError


 class ObjectDict(Dict[str, Any]):
     """Makes a dictionary behave like an object, with attribute-style access."""

-    def __getattr__(self, name: str) ->Any:
+    def __getattr__(self, name: str) -> Any:
         try:
             return self[name]
         except KeyError:
             raise AttributeError(name)

-    def __setattr__(self, name: str, value: Any) ->None:
+    def __setattr__(self, name: str, value: Any) -> None:
         self[name] = value


@@ -53,10 +89,13 @@ class GzipDecompressor(object):
     optional arguments, but it understands gzip headers and checksums.
     """

-    def __init__(self) ->None:
+    def __init__(self) -> None:
+        # Magic parameter makes zlib module understand gzip header
+        # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
+        # This works on cpython and pypy, but not jython.
         self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)

-    def decompress(self, value: bytes, max_length: int=0) ->bytes:
+    def decompress(self, value: bytes, max_length: int = 0) -> bytes:
         """Decompress a chunk, returning newly-available data.

         Some data may be buffered for later processing; `flush` must
@@ -67,23 +106,23 @@ class GzipDecompressor(object):
         in ``unconsumed_tail``; you must retrieve this value and pass
         it back to a future call to `decompress` if it is not empty.
         """
-        pass
+        return self.decompressobj.decompress(value, max_length)

     @property
-    def unconsumed_tail(self) ->bytes:
+    def unconsumed_tail(self) -> bytes:
         """Returns the unconsumed portion left over"""
-        pass
+        return self.decompressobj.unconsumed_tail

-    def flush(self) ->bytes:
+    def flush(self) -> bytes:
         """Return any remaining buffered data not yet returned by decompress.

         Also checks for errors such as truncated input.
         No other methods may be called on this object after `flush`.
         """
-        pass
+        return self.decompressobj.flush()


-def import_object(name: str) ->Any:
+def import_object(name: str) -> Any:
     """Imports an object by name.

     ``import_object('x')`` is equivalent to ``import x``.
@@ -101,10 +140,42 @@ def import_object(name: str) ->Any:
         ...
     ImportError: No module named missing_module
     """
-    pass
+    if name.count(".") == 0:
+        return __import__(name)

+    parts = name.split(".")
+    obj = __import__(".".join(parts[:-1]), fromlist=[parts[-1]])
+    try:
+        return getattr(obj, parts[-1])
+    except AttributeError:
+        raise ImportError("No module named %s" % parts[-1])

-def errno_from_exception(e: BaseException) ->Optional[int]:
+
+def exec_in(
+    code: Any, glob: Dict[str, Any], loc: Optional[Optional[Mapping[str, Any]]] = None
+) -> None:
+    if isinstance(code, str):
+        # exec(string) inherits the caller's future imports; compile
+        # the string first to prevent that.
+        code = compile(code, "<string>", "exec", dont_inherit=True)
+    exec(code, glob, loc)
+
+
+def raise_exc_info(
+    exc_info: Tuple[Optional[type], Optional[BaseException], Optional["TracebackType"]]
+) -> typing.NoReturn:
+    try:
+        if exc_info[1] is not None:
+            raise exc_info[1].with_traceback(exc_info[2])
+        else:
+            raise TypeError("raise_exc_info called with no exception")
+    finally:
+        # Clear the traceback reference from our stack frame to
+        # minimize circular references that slow down GC.
+        exc_info = (None, None, None)
+
+
+def errno_from_exception(e: BaseException) -> Optional[int]:
     """Provides the errno from an Exception object.

     There are cases that the errno attribute was not set so we pull
@@ -113,24 +184,38 @@ def errno_from_exception(e: BaseException) ->Optional[int]:
     abstracts all that behavior to give you a safe way to get the
     errno.
     """
-    pass
+
+    if hasattr(e, "errno"):
+        return e.errno  # type: ignore
+    elif e.args:
+        return e.args[0]
+    else:
+        return None
+
+
+_alphanum = frozenset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
+
+
+def _re_unescape_replacement(match: Match[str]) -> str:
+    group = match.group(1)
+    if group[0] in _alphanum:
+        raise ValueError("cannot unescape '\\\\%s'" % group[0])
+    return group


-_alphanum = frozenset(
-    'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
-_re_unescape_pattern = re.compile('\\\\(.)', re.DOTALL)
+_re_unescape_pattern = re.compile(r"\\(.)", re.DOTALL)


-def re_unescape(s: str) ->str:
-    """Unescape a string escaped by `re.escape`.
+def re_unescape(s: str) -> str:
+    r"""Unescape a string escaped by `re.escape`.

     May raise ``ValueError`` for regular expressions which could not
     have been produced by `re.escape` (for example, strings containing
-    ``\\d`` cannot be unescaped).
+    ``\d`` cannot be unescaped).

     .. versionadded:: 4.4
     """
-    pass
+    return _re_unescape_pattern.sub(_re_unescape_replacement, s)


 class Configurable(object):
@@ -159,12 +244,21 @@ class Configurable(object):
        multiple levels of a class hierarchy.

     """
-    __impl_class = None
-    __impl_kwargs = None

-    def __new__(cls, *args: Any, **kwargs: Any) ->Any:
+    # Type annotations on this class are mostly done with comments
+    # because they need to refer to Configurable, which isn't defined
+    # until after the class definition block. These can use regular
+    # annotations when our minimum python version is 3.7.
+    #
+    # There may be a clever way to use generics here to get more
+    # precise types (i.e. for a particular Configurable subclass T,
+    # all the types are subclasses of T, not just Configurable).
+    __impl_class = None  # type: Optional[Type[Configurable]]
+    __impl_kwargs = None  # type: Dict[str, Any]
+
+    def __new__(cls, *args: Any, **kwargs: Any) -> Any:
         base = cls.configurable_base()
-        init_kwargs = {}
+        init_kwargs = {}  # type: Dict[str, Any]
         if cls is base:
             impl = cls.configured_class()
             if base.__impl_kwargs:
@@ -173,13 +267,18 @@ class Configurable(object):
             impl = cls
         init_kwargs.update(kwargs)
         if impl.configurable_base() is not base:
+            # The impl class is itself configurable, so recurse.
             return impl(*args, **init_kwargs)
         instance = super(Configurable, cls).__new__(impl)
+        # initialize vs __init__ chosen for compatibility with AsyncHTTPClient
+        # singleton magic.  If we get rid of that we can switch to __init__
+        # here too.
         instance.initialize(*args, **init_kwargs)
         return instance

     @classmethod
     def configurable_base(cls):
+        # type: () -> Type[Configurable]
         """Returns the base class of a configurable hierarchy.

         This will normally return the class in which it is defined.
@@ -187,13 +286,18 @@ class Configurable(object):
         parameter).

         """
-        pass
+        raise NotImplementedError()

     @classmethod
     def configurable_default(cls):
+        # type: () -> Type[Configurable]
         """Returns the implementation class to be used if none is configured."""
+        raise NotImplementedError()
+
+    def _initialize(self) -> None:
         pass
-    initialize = _initialize
+
+    initialize = _initialize  # type: Callable[..., None]
     """Initialize a `Configurable` subclass instance.

     Configurable classes should use `initialize` instead of ``__init__``.
@@ -204,18 +308,49 @@ class Configurable(object):

     @classmethod
     def configure(cls, impl, **kwargs):
+        # type: (Union[None, str, Type[Configurable]], Any) -> None
         """Sets the class to use when the base class is instantiated.

         Keyword arguments will be saved and added to the arguments passed
         to the constructor.  This can be used to set global defaults for
         some parameters.
         """
-        pass
+        base = cls.configurable_base()
+        if isinstance(impl, str):
+            impl = typing.cast(Type[Configurable], import_object(impl))
+        if impl is not None and not issubclass(impl, cls):
+            raise ValueError("Invalid subclass of %s" % cls)
+        base.__impl_class = impl
+        base.__impl_kwargs = kwargs

     @classmethod
     def configured_class(cls):
+        # type: () -> Type[Configurable]
         """Returns the currently configured class."""
-        pass
+        base = cls.configurable_base()
+        # Manually mangle the private name to see whether this base
+        # has been configured (and not another base higher in the
+        # hierarchy).
+        if base.__dict__.get("_Configurable__impl_class") is None:
+            base.__impl_class = cls.configurable_default()
+        if base.__impl_class is not None:
+            return base.__impl_class
+        else:
+            # Should be impossible, but mypy wants an explicit check.
+            raise ValueError("configured class not found")
+
+    @classmethod
+    def _save_configuration(cls):
+        # type: () -> Tuple[Optional[Type[Configurable]], Dict[str, Any]]
+        base = cls.configurable_base()
+        return (base.__impl_class, base.__impl_kwargs)
+
+    @classmethod
+    def _restore_configuration(cls, saved):
+        # type: (Tuple[Optional[Type[Configurable]], Dict[str, Any]]) -> None
+        base = cls.configurable_base()
+        base.__impl_class = saved[0]
+        base.__impl_kwargs = saved[1]


 class ArgReplacer(object):
@@ -226,23 +361,44 @@ class ArgReplacer(object):
     and similar wrappers.
     """

-    def __init__(self, func: Callable, name: str) ->None:
+    def __init__(self, func: Callable, name: str) -> None:
         self.name = name
         try:
-            self.arg_pos = self._getargnames(func).index(name)
+            self.arg_pos = self._getargnames(func).index(name)  # type: Optional[int]
         except ValueError:
+            # Not a positional parameter
             self.arg_pos = None

-    def get_old_value(self, args: Sequence[Any], kwargs: Dict[str, Any],
-        default: Any=None) ->Any:
+    def _getargnames(self, func: Callable) -> List[str]:
+        try:
+            return getfullargspec(func).args
+        except TypeError:
+            if hasattr(func, "func_code"):
+                # Cython-generated code has all the attributes needed
+                # by inspect.getfullargspec, but the inspect module only
+                # works with ordinary functions. Inline the portion of
+                # getfullargspec that we need here. Note that for static
+                # functions the @cython.binding(True) decorator must
+                # be used (for methods it works out of the box).
+                code = func.func_code  # type: ignore
+                return code.co_varnames[: code.co_argcount]
+            raise
+
+    def get_old_value(
+        self, args: Sequence[Any], kwargs: Dict[str, Any], default: Any = None
+    ) -> Any:
         """Returns the old value of the named argument without replacing it.

         Returns ``default`` if the argument is not present.
         """
-        pass
+        if self.arg_pos is not None and len(args) > self.arg_pos:
+            return args[self.arg_pos]
+        else:
+            return kwargs.get(self.name, default)

-    def replace(self, new_value: Any, args: Sequence[Any], kwargs: Dict[str,
-        Any]) ->Tuple[Any, Sequence[Any], Dict[str, Any]]:
+    def replace(
+        self, new_value: Any, args: Sequence[Any], kwargs: Dict[str, Any]
+    ) -> Tuple[Any, Sequence[Any], Dict[str, Any]]:
         """Replace the named argument in ``args, kwargs`` with ``new_value``.

         Returns ``(old_value, args, kwargs)``.  The returned ``args`` and
@@ -252,15 +408,25 @@ class ArgReplacer(object):
         If the named argument was not found, ``new_value`` will be added
         to ``kwargs`` and None will be returned as ``old_value``.
         """
-        pass
+        if self.arg_pos is not None and len(args) > self.arg_pos:
+            # The arg to replace is passed positionally
+            old_value = args[self.arg_pos]
+            args = list(args)  # *args is normally a tuple
+            args[self.arg_pos] = new_value
+        else:
+            # The arg to replace is either omitted or passed by keyword.
+            old_value = kwargs.get(self.name)
+            kwargs[self.name] = new_value
+        return old_value, args, kwargs


 def timedelta_to_seconds(td):
+    # type: (datetime.timedelta) -> float
     """Equivalent to ``td.total_seconds()`` (introduced in Python 2.7)."""
-    pass
+    return td.total_seconds()


-def _websocket_mask_python(mask: bytes, data: bytes) ->bytes:
+def _websocket_mask_python(mask: bytes, data: bytes) -> bytes:
     """Websocket masking function.

     `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
@@ -269,16 +435,28 @@ def _websocket_mask_python(mask: bytes, data: bytes) ->bytes:

     This pure-python implementation may be replaced by an optimized version when available.
     """
-    pass
+    mask_arr = array.array("B", mask)
+    unmasked_arr = array.array("B", data)
+    for i in range(len(data)):
+        unmasked_arr[i] = unmasked_arr[i] ^ mask_arr[i % 4]
+    return unmasked_arr.tobytes()


-if os.environ.get('TORNADO_NO_EXTENSION') or os.environ.get('TORNADO_EXTENSION'
-    ) == '0':
+if os.environ.get("TORNADO_NO_EXTENSION") or os.environ.get("TORNADO_EXTENSION") == "0":
+    # These environment variables exist to make it easier to do performance
+    # comparisons; they are not guaranteed to remain supported in the future.
     _websocket_mask = _websocket_mask_python
 else:
     try:
         from tornado.speedups import websocket_mask as _websocket_mask
     except ImportError:
-        if os.environ.get('TORNADO_EXTENSION') == '1':
+        if os.environ.get("TORNADO_EXTENSION") == "1":
             raise
         _websocket_mask = _websocket_mask_python
+
+
+def doctests():
+    # type: () -> unittest.TestSuite
+    import doctest
+
+    return doctest.DocTestSuite()
diff --git a/tornado/web.py b/tornado/web.py
index 7c62397d..03939647 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """``tornado.web`` provides a simple web framework with asynchronous
 features that allow it to scale to large numbers of open connections,
 making it ideal for `long polling
@@ -44,6 +59,7 @@ request, or to limit your use of other threads to
 the executor do not refer to Tornado objects.

 """
+
 import base64
 import binascii
 import datetime
@@ -69,6 +85,7 @@ import traceback
 import types
 import urllib.parse
 from urllib.parse import urlencode
+
 from tornado.concurrent import Future, future_set_result_unless_cancelled
 from tornado import escape
 from tornado import gen
@@ -79,17 +96,50 @@ from tornado import locale
 from tornado.log import access_log, app_log, gen_log
 from tornado import template
 from tornado.escape import utf8, _unicode
-from tornado.routing import AnyMatches, DefaultHostMatches, HostMatches, ReversibleRouter, Rule, ReversibleRuleRouter, URLSpec, _RuleList
+from tornado.routing import (
+    AnyMatches,
+    DefaultHostMatches,
+    HostMatches,
+    ReversibleRouter,
+    Rule,
+    ReversibleRuleRouter,
+    URLSpec,
+    _RuleList,
+)
 from tornado.util import ObjectDict, unicode_type, _websocket_mask
+
 url = URLSpec
-from typing import Dict, Any, Union, Optional, Awaitable, Tuple, List, Callable, Iterable, Generator, Type, TypeVar, cast, overload
+
+from typing import (
+    Dict,
+    Any,
+    Union,
+    Optional,
+    Awaitable,
+    Tuple,
+    List,
+    Callable,
+    Iterable,
+    Generator,
+    Type,
+    TypeVar,
+    cast,
+    overload,
+)
 from types import TracebackType
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Set
-_HeaderTypes = Union[bytes, unicode_type, int, numbers.Integral, datetime.
-    datetime]
+    from typing import Set  # noqa: F401
+
+
+# The following types are accepted by RequestHandler.set_header
+# and related methods.
+_HeaderTypes = Union[bytes, unicode_type, int, numbers.Integral, datetime.datetime]
+
 _CookieSecretTypes = Union[str, bytes, Dict[int, str], Dict[int, bytes]]
+
+
 MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
 """The oldest signed value version supported by this version of Tornado.

@@ -97,6 +147,7 @@ Signed values older than this version cannot be decoded.

 .. versionadded:: 3.2.1
 """
+
 MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
 """The newest signed value version supported by this version of Tornado.

@@ -104,6 +155,7 @@ Signed values newer than this version cannot be decoded.

 .. versionadded:: 3.2.1
 """
+
 DEFAULT_SIGNED_VALUE_VERSION = 2
 """The signed value version produced by `.RequestHandler.create_signed_value`.

@@ -111,6 +163,7 @@ May be overridden by passing a ``version`` keyword argument.

 .. versionadded:: 3.2.1
 """
+
 DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
 """The oldest signed value accepted by `.RequestHandler.get_signed_cookie`.

@@ -138,35 +191,56 @@ class RequestHandler(object):
     `~RequestHandler.initialize` instead).

     """
-    SUPPORTED_METHODS = ('GET', 'HEAD', 'POST', 'DELETE', 'PATCH', 'PUT',
-        'OPTIONS')
-    _template_loaders = {}
+
+    SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", "OPTIONS")
+
+    _template_loaders = {}  # type: Dict[str, template.BaseLoader]
     _template_loader_lock = threading.Lock()
-    _remove_control_chars_regex = re.compile('[\\x00-\\x08\\x0e-\\x1f]')
+    _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
+
     _stream_request_body = False
-    _transforms = None
-    path_args = None
-    path_kwargs = None

-    def __init__(self, application: 'Application', request: httputil.
-        HTTPServerRequest, **kwargs: Any) ->None:
+    # Will be set in _execute.
+    _transforms = None  # type: List[OutputTransform]
+    path_args = None  # type: List[str]
+    path_kwargs = None  # type: Dict[str, str]
+
+    def __init__(
+        self,
+        application: "Application",
+        request: httputil.HTTPServerRequest,
+        **kwargs: Any,
+    ) -> None:
         super().__init__()
+
         self.application = application
         self.request = request
         self._headers_written = False
         self._finished = False
         self._auto_finish = True
         self._prepared_future = None
-        self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
-            application.ui_methods.items())
-        self.ui['_tt_modules'] = _UIModuleNamespace(self, application.
-            ui_modules)
-        self.ui['modules'] = self.ui['_tt_modules']
+        self.ui = ObjectDict(
+            (n, self._ui_method(m)) for n, m in application.ui_methods.items()
+        )
+        # UIModules are available as both `modules` and `_tt_modules` in the
+        # template namespace.  Historically only `modules` was available
+        # but could be clobbered by user additions to the namespace.
+        # The template {% module %} directive looks in `_tt_modules` to avoid
+        # possible conflicts.
+        self.ui["_tt_modules"] = _UIModuleNamespace(self, application.ui_modules)
+        self.ui["modules"] = self.ui["_tt_modules"]
         self.clear()
         assert self.request.connection is not None
-        self.request.connection.set_close_callback(self.on_connection_close)
-        self.initialize(**kwargs)
-    initialize = _initialize
+        # TODO: need to add set_close_callback to HTTPConnection interface
+        self.request.connection.set_close_callback(  # type: ignore
+            self.on_connection_close
+        )
+        self.initialize(**kwargs)  # type: ignore
+
+    def _initialize(self) -> None:
+        pass
+
+    initialize = _initialize  # type: Callable[..., None]
     """Hook for subclass initialization. Called for each request.

     A dictionary passed as the third argument of a ``URLSpec`` will be
@@ -187,18 +261,22 @@ class RequestHandler(object):
     """

     @property
-    def settings(self) ->Dict[str, Any]:
+    def settings(self) -> Dict[str, Any]:
         """An alias for `self.application.settings <Application.settings>`."""
-        pass
-    head = _unimplemented_method
-    get = _unimplemented_method
-    post = _unimplemented_method
-    delete = _unimplemented_method
-    patch = _unimplemented_method
-    put = _unimplemented_method
-    options = _unimplemented_method
-
-    def prepare(self) ->Optional[Awaitable[None]]:
+        return self.application.settings
+
+    def _unimplemented_method(self, *args: str, **kwargs: str) -> None:
+        raise HTTPError(405)
+
+    head = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    get = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    post = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    delete = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    patch = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    put = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+    options = _unimplemented_method  # type: Callable[..., Optional[Awaitable[None]]]
+
+    def prepare(self) -> Optional[Awaitable[None]]:
         """Called at the beginning of a request before  `get`/`post`/etc.

         Override this method to perform common initialization regardless
@@ -214,7 +292,7 @@ class RequestHandler(object):
         """
         pass

-    def on_finish(self) ->None:
+    def on_finish(self) -> None:
         """Called after the end of a request.

         Override this method to perform cleanup, logging, etc.
@@ -224,7 +302,7 @@ class RequestHandler(object):
         """
         pass

-    def on_connection_close(self) ->None:
+    def on_connection_close(self) -> None:
         """Called in async handlers if the client closed the connection.

         Override this to clean up resources associated with
@@ -238,13 +316,26 @@ class RequestHandler(object):
         may not be called promptly after the end user closes their
         connection.
         """
-        pass
+        if _has_stream_request_body(self.__class__):
+            if not self.request._body_future.done():
+                self.request._body_future.set_exception(iostream.StreamClosedError())
+                self.request._body_future.exception()

-    def clear(self) ->None:
+    def clear(self) -> None:
         """Resets all headers and content for this response."""
-        pass
-
-    def set_default_headers(self) ->None:
+        self._headers = httputil.HTTPHeaders(
+            {
+                "Server": "TornadoServer/%s" % tornado.version,
+                "Content-Type": "text/html; charset=UTF-8",
+                "Date": httputil.format_timestamp(time.time()),
+            }
+        )
+        self.set_default_headers()
+        self._write_buffer = []  # type: List[bytes]
+        self._status_code = 200
+        self._reason = httputil.responses[200]
+
+    def set_default_headers(self) -> None:
         """Override this to set HTTP headers at the beginning of the request.

         For example, this is the place to set a custom ``Server`` header.
@@ -254,7 +345,7 @@ class RequestHandler(object):
         """
         pass

-    def set_status(self, status_code: int, reason: Optional[str]=None) ->None:
+    def set_status(self, status_code: int, reason: Optional[str] = None) -> None:
         """Sets the status code for our response.

         :arg int status_code: Response status code.
@@ -267,13 +358,17 @@ class RequestHandler(object):
            No longer validates that the response code is in
            `http.client.responses`.
         """
-        pass
+        self._status_code = status_code
+        if reason is not None:
+            self._reason = escape.native_str(reason)
+        else:
+            self._reason = httputil.responses.get(status_code, "Unknown")

-    def get_status(self) ->int:
+    def get_status(self) -> int:
         """Returns the status code for our response."""
-        pass
+        return self._status_code

-    def set_header(self, name: str, value: _HeaderTypes) ->None:
+    def set_header(self, name: str, value: _HeaderTypes) -> None:
         """Sets the given response header name and value.

         All header values are converted to strings (`datetime` objects
@@ -281,27 +376,73 @@ class RequestHandler(object):
         ``Date`` header).

         """
-        pass
+        self._headers[name] = self._convert_header_value(value)

-    def add_header(self, name: str, value: _HeaderTypes) ->None:
+    def add_header(self, name: str, value: _HeaderTypes) -> None:
         """Adds the given response header and value.

         Unlike `set_header`, `add_header` may be called multiple times
         to return multiple values for the same header.
         """
-        pass
+        self._headers.add(name, self._convert_header_value(value))

-    def clear_header(self, name: str) ->None:
+    def clear_header(self, name: str) -> None:
         """Clears an outgoing header, undoing a previous `set_header` call.

         Note that this method does not apply to multi-valued headers
         set by `add_header`.
         """
+        if name in self._headers:
+            del self._headers[name]
+
+    _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
+
+    def _convert_header_value(self, value: _HeaderTypes) -> str:
+        # Convert the input value to a str. This type check is a bit
+        # subtle: The bytes case only executes on python 3, and the
+        # unicode case only executes on python 2, because the other
+        # cases are covered by the first match for str.
+        if isinstance(value, str):
+            retval = value
+        elif isinstance(value, bytes):
+            # Non-ascii characters in headers are not well supported,
+            # but if you pass bytes, use latin1 so they pass through as-is.
+            retval = value.decode("latin1")
+        elif isinstance(value, numbers.Integral):
+            # return immediately since we know the converted value will be safe
+            return str(value)
+        elif isinstance(value, datetime.datetime):
+            return httputil.format_timestamp(value)
+        else:
+            raise TypeError("Unsupported header value %r" % value)
+        # If \n is allowed into the header, it is possible to inject
+        # additional headers or split the request.
+        if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
+            raise ValueError("Unsafe header value %r", retval)
+        return retval
+
+    @overload
+    def get_argument(self, name: str, default: str, strip: bool = True) -> str:
+        pass
+
+    @overload
+    def get_argument(  # noqa: F811
+        self, name: str, default: _ArgDefaultMarker = _ARG_DEFAULT, strip: bool = True
+    ) -> str:
+        pass
+
+    @overload
+    def get_argument(  # noqa: F811
+        self, name: str, default: None, strip: bool = True
+    ) -> Optional[str]:
         pass
-    _INVALID_HEADER_CHAR_RE = re.compile('[\\x00-\\x1f]')

-    def get_argument(self, name: str, default: Union[None, str,
-        _ArgDefaultMarker]=_ARG_DEFAULT, strip: bool=True) ->Optional[str]:
+    def get_argument(  # noqa: F811
+        self,
+        name: str,
+        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
+        strip: bool = True,
+    ) -> Optional[str]:
         """Returns the value of the argument with the given name.

         If default is not provided, the argument is considered to be
@@ -312,19 +453,29 @@ class RequestHandler(object):

         This method searches both the query and body arguments.
         """
-        pass
+        return self._get_argument(name, default, self.request.arguments, strip)

-    def get_arguments(self, name: str, strip: bool=True) ->List[str]:
+    def get_arguments(self, name: str, strip: bool = True) -> List[str]:
         """Returns a list of the arguments with the given name.

         If the argument is not present, returns an empty list.

         This method searches both the query and body arguments.
         """
-        pass

-    def get_body_argument(self, name: str, default: Union[None, str,
-        _ArgDefaultMarker]=_ARG_DEFAULT, strip: bool=True) ->Optional[str]:
+        # Make sure `get_arguments` isn't accidentally being called with a
+        # positional argument that's assumed to be a default (like in
+        # `get_argument`.)
+        assert isinstance(strip, bool)
+
+        return self._get_arguments(name, self.request.arguments, strip)
+
+    def get_body_argument(
+        self,
+        name: str,
+        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
+        strip: bool = True,
+    ) -> Optional[str]:
         """Returns the value of the argument with the given name
         from the request body.

@@ -336,19 +487,23 @@ class RequestHandler(object):

         .. versionadded:: 3.2
         """
-        pass
+        return self._get_argument(name, default, self.request.body_arguments, strip)

-    def get_body_arguments(self, name: str, strip: bool=True) ->List[str]:
+    def get_body_arguments(self, name: str, strip: bool = True) -> List[str]:
         """Returns a list of the body arguments with the given name.

         If the argument is not present, returns an empty list.

         .. versionadded:: 3.2
         """
-        pass
+        return self._get_arguments(name, self.request.body_arguments, strip)

-    def get_query_argument(self, name: str, default: Union[None, str,
-        _ArgDefaultMarker]=_ARG_DEFAULT, strip: bool=True) ->Optional[str]:
+    def get_query_argument(
+        self,
+        name: str,
+        default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
+        strip: bool = True,
+    ) -> Optional[str]:
         """Returns the value of the argument with the given name
         from the request query string.

@@ -360,18 +515,47 @@ class RequestHandler(object):

         .. versionadded:: 3.2
         """
-        pass
+        return self._get_argument(name, default, self.request.query_arguments, strip)

-    def get_query_arguments(self, name: str, strip: bool=True) ->List[str]:
+    def get_query_arguments(self, name: str, strip: bool = True) -> List[str]:
         """Returns a list of the query arguments with the given name.

         If the argument is not present, returns an empty list.

         .. versionadded:: 3.2
         """
-        pass
-
-    def decode_argument(self, value: bytes, name: Optional[str]=None) ->str:
+        return self._get_arguments(name, self.request.query_arguments, strip)
+
+    def _get_argument(
+        self,
+        name: str,
+        default: Union[None, str, _ArgDefaultMarker],
+        source: Dict[str, List[bytes]],
+        strip: bool = True,
+    ) -> Optional[str]:
+        args = self._get_arguments(name, source, strip=strip)
+        if not args:
+            if isinstance(default, _ArgDefaultMarker):
+                raise MissingArgumentError(name)
+            return default
+        return args[-1]
+
+    def _get_arguments(
+        self, name: str, source: Dict[str, List[bytes]], strip: bool = True
+    ) -> List[str]:
+        values = []
+        for v in source.get(name, []):
+            s = self.decode_argument(v, name=name)
+            if isinstance(s, unicode_type):
+                # Get rid of any weird control chars (unless decoding gave
+                # us bytes, in which case leave it alone)
+                s = RequestHandler._remove_control_chars_regex.sub(" ", s)
+            if strip:
+                s = s.strip()
+            values.append(s)
+        return values
+
+    def decode_argument(self, value: bytes, name: Optional[str] = None) -> str:
         """Decodes an argument from the request.

         The argument has been percent-decoded and is now a byte string.
@@ -384,16 +568,20 @@ class RequestHandler(object):
         The name of the argument is provided if known, but may be None
         (e.g. for unnamed groups in the url regex).
         """
-        pass
+        try:
+            return _unicode(value)
+        except UnicodeDecodeError:
+            raise HTTPError(
+                400, "Invalid unicode in %s: %r" % (name or "url", value[:40])
+            )

     @property
-    def cookies(self) ->Dict[str, http.cookies.Morsel]:
+    def cookies(self) -> Dict[str, http.cookies.Morsel]:
         """An alias for
         `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
-        pass
+        return self.request.cookies

-    def get_cookie(self, name: str, default: Optional[str]=None) ->Optional[str
-        ]:
+    def get_cookie(self, name: str, default: Optional[str] = None) -> Optional[str]:
         """Returns the value of the request cookie with the given name.

         If the named cookie is not present, returns ``default``.
@@ -402,13 +590,26 @@ class RequestHandler(object):
         It does not see the outgoing cookies set by `set_cookie` in this
         handler.
         """
-        pass
-
-    def set_cookie(self, name: str, value: Union[str, bytes], domain:
-        Optional[str]=None, expires: Optional[Union[float, Tuple, datetime.
-        datetime]]=None, path: str='/', expires_days: Optional[float]=None,
-        *, max_age: Optional[int]=None, httponly: bool=False, secure: bool=
-        False, samesite: Optional[str]=None, **kwargs: Any) ->None:
+        if self.request.cookies is not None and name in self.request.cookies:
+            return self.request.cookies[name].value
+        return default
+
+    def set_cookie(
+        self,
+        name: str,
+        value: Union[str, bytes],
+        domain: Optional[str] = None,
+        expires: Optional[Union[float, Tuple, datetime.datetime]] = None,
+        path: str = "/",
+        expires_days: Optional[float] = None,
+        # Keyword-only args start here for historical reasons.
+        *,
+        max_age: Optional[int] = None,
+        httponly: bool = False,
+        secure: bool = False,
+        samesite: Optional[str] = None,
+        **kwargs: Any,
+    ) -> None:
         """Sets an outgoing cookie name/value with the given options.

         Newly-set cookies are not immediately visible via `get_cookie`;
@@ -429,9 +630,54 @@ class RequestHandler(object):
            In Tornado 7.0 this will be changed to only accept lowercase
            arguments.
         """
-        pass
-
-    def clear_cookie(self, name: str, **kwargs: Any) ->None:
+        # The cookie library only accepts type str, in both python 2 and 3
+        name = escape.native_str(name)
+        value = escape.native_str(value)
+        if re.search(r"[\x00-\x20]", name + value):
+            # Don't let us accidentally inject bad stuff
+            raise ValueError("Invalid cookie %r: %r" % (name, value))
+        if not hasattr(self, "_new_cookie"):
+            self._new_cookie = (
+                http.cookies.SimpleCookie()
+            )  # type: http.cookies.SimpleCookie
+        if name in self._new_cookie:
+            del self._new_cookie[name]
+        self._new_cookie[name] = value
+        morsel = self._new_cookie[name]
+        if domain:
+            morsel["domain"] = domain
+        if expires_days is not None and not expires:
+            expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
+                days=expires_days
+            )
+        if expires:
+            morsel["expires"] = httputil.format_timestamp(expires)
+        if path:
+            morsel["path"] = path
+        if max_age:
+            # Note change from _ to -.
+            morsel["max-age"] = str(max_age)
+        if httponly:
+            # Note that SimpleCookie ignores the value here. The presense of an
+            # httponly (or secure) key is treated as true.
+            morsel["httponly"] = True
+        if secure:
+            morsel["secure"] = True
+        if samesite:
+            morsel["samesite"] = samesite
+        if kwargs:
+            # The setitem interface is case-insensitive, so continue to support
+            # kwargs for backwards compatibility until we can remove deprecated
+            # features.
+            for k, v in kwargs.items():
+                morsel[k] = v
+            warnings.warn(
+                f"Deprecated arguments to set_cookie: {set(kwargs.keys())} "
+                "(should be lowercase)",
+                DeprecationWarning,
+            )
+
+    def clear_cookie(self, name: str, **kwargs: Any) -> None:
         """Deletes the cookie with the given name.

         This method accepts the same arguments as `set_cookie`, except for
@@ -449,9 +695,17 @@ class RequestHandler(object):
            The ``samesite`` and ``secure`` flags have recently become
            required for clearing ``samesite="none"`` cookies.
         """
-        pass
-
-    def clear_all_cookies(self, **kwargs: Any) ->None:
+        for excluded_arg in ["expires", "max_age"]:
+            if excluded_arg in kwargs:
+                raise TypeError(
+                    f"clear_cookie() got an unexpected keyword argument '{excluded_arg}'"
+                )
+        expires = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(
+            days=365
+        )
+        self.set_cookie(name, value="", expires=expires, **kwargs)
+
+    def clear_all_cookies(self, **kwargs: Any) -> None:
         """Attempt to delete all the cookies the user sent with this request.

         See `clear_cookie` for more information on keyword arguments. Due to
@@ -479,11 +733,17 @@ class RequestHandler(object):
            since all we know about cookies are their names. Applications
            should generally use ``clear_cookie`` one at a time instead.
         """
-        pass
-
-    def set_signed_cookie(self, name: str, value: Union[str, bytes],
-        expires_days: Optional[float]=30, version: Optional[int]=None, **
-        kwargs: Any) ->None:
+        for name in self.request.cookies:
+            self.clear_cookie(name, **kwargs)
+
+    def set_signed_cookie(
+        self,
+        name: str,
+        value: Union[str, bytes],
+        expires_days: Optional[float] = 30,
+        version: Optional[int] = None,
+        **kwargs: Any,
+    ) -> None:
         """Signs and timestamps a cookie so it cannot be forged.

         You must specify the ``cookie_secret`` setting in your Application
@@ -514,11 +774,18 @@ class RequestHandler(object):
            avoid confusion with other uses of "secure" in cookie attributes
            and prefixes. The old name remains as an alias.
         """
-        pass
+        self.set_cookie(
+            name,
+            self.create_signed_value(name, value, version=version),
+            expires_days=expires_days,
+            **kwargs,
+        )
+
     set_secure_cookie = set_signed_cookie

-    def create_signed_value(self, name: str, value: Union[str, bytes],
-        version: Optional[int]=None) ->bytes:
+    def create_signed_value(
+        self, name: str, value: Union[str, bytes], version: Optional[int] = None
+    ) -> bytes:
         """Signs and timestamps a string so it cannot be forged.

         Normally used via set_signed_cookie, but provided as a separate
@@ -530,11 +797,25 @@ class RequestHandler(object):
            Added the ``version`` argument.  Introduced cookie version 2
            and made it the default.
         """
-        pass
-
-    def get_signed_cookie(self, name: str, value: Optional[str]=None,
-        max_age_days: float=31, min_version: Optional[int]=None) ->Optional[
-        bytes]:
+        self.require_setting("cookie_secret", "secure cookies")
+        secret = self.application.settings["cookie_secret"]
+        key_version = None
+        if isinstance(secret, dict):
+            if self.application.settings.get("key_version") is None:
+                raise Exception("key_version setting must be used for secret_key dicts")
+            key_version = self.application.settings["key_version"]
+
+        return create_signed_value(
+            secret, name, value, version=version, key_version=key_version
+        )
+
+    def get_signed_cookie(
+        self,
+        name: str,
+        value: Optional[str] = None,
+        max_age_days: float = 31,
+        min_version: Optional[int] = None,
+    ) -> Optional[bytes]:
         """Returns the given signed cookie if it validates, or None.

         The decoded cookie value is returned as a byte string (unlike
@@ -556,11 +837,22 @@ class RequestHandler(object):
            and prefixes. The old name remains as an alias.

         """
-        pass
+        self.require_setting("cookie_secret", "secure cookies")
+        if value is None:
+            value = self.get_cookie(name)
+        return decode_signed_value(
+            self.application.settings["cookie_secret"],
+            name,
+            value,
+            max_age_days=max_age_days,
+            min_version=min_version,
+        )
+
     get_secure_cookie = get_signed_cookie

-    def get_signed_cookie_key_version(self, name: str, value: Optional[str]
-        =None) ->Optional[int]:
+    def get_signed_cookie_key_version(
+        self, name: str, value: Optional[str] = None
+    ) -> Optional[int]:
         """Returns the signing key version of the secure cookie.

         The version is returned as int.
@@ -573,11 +865,18 @@ class RequestHandler(object):
            remains as an alias.

         """
-        pass
+        self.require_setting("cookie_secret", "secure cookies")
+        if value is None:
+            value = self.get_cookie(name)
+        if value is None:
+            return None
+        return get_signature_key_version(value)
+
     get_secure_cookie_key_version = get_signed_cookie_key_version

-    def redirect(self, url: str, permanent: bool=False, status: Optional[
-        int]=None) ->None:
+    def redirect(
+        self, url: str, permanent: bool = False, status: Optional[int] = None
+    ) -> None:
         """Sends a redirect to the given (optionally relative) URL.

         If the ``status`` argument is specified, that value is used as the
@@ -585,9 +884,17 @@ class RequestHandler(object):
         (temporary) is chosen based on the ``permanent`` argument.
         The default is 302 (temporary).
         """
-        pass
+        if self._headers_written:
+            raise Exception("Cannot redirect after headers have been written")
+        if status is None:
+            status = 301 if permanent else 302
+        else:
+            assert isinstance(status, int) and 300 <= status <= 399
+        self.set_status(status)
+        self.set_header("Location", utf8(url))
+        self.finish()

-    def write(self, chunk: Union[str, bytes, dict]) ->None:
+    def write(self, chunk: Union[str, bytes, dict]) -> None:
         """Writes the given chunk to the output buffer.

         To write the output to the network, use the `flush()` method below.
@@ -603,9 +910,23 @@ class RequestHandler(object):
         http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
         https://github.com/facebook/tornado/issues/1009
         """
-        pass
-
-    def render(self, template_name: str, **kwargs: Any) ->'Future[None]':
+        if self._finished:
+            raise RuntimeError("Cannot write() after finish()")
+        if not isinstance(chunk, (bytes, unicode_type, dict)):
+            message = "write() only accepts bytes, unicode, and dict objects"
+            if isinstance(chunk, list):
+                message += (
+                    ". Lists not accepted for security reasons; see "
+                    + "http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"  # noqa: E501
+                )
+            raise TypeError(message)
+        if isinstance(chunk, dict):
+            chunk = escape.json_encode(chunk)
+            self.set_header("Content-Type", "application/json; charset=UTF-8")
+        chunk = utf8(chunk)
+        self._write_buffer.append(chunk)
+
+    def render(self, template_name: str, **kwargs: Any) -> "Future[None]":
         """Renders the template with the given arguments as the response.

         ``render()`` calls ``finish()``, so no other output methods can be called
@@ -618,49 +939,160 @@ class RequestHandler(object):

            Now returns a `.Future` instead of ``None``.
         """
-        pass
-
-    def render_linked_js(self, js_files: Iterable[str]) ->str:
+        if self._finished:
+            raise RuntimeError("Cannot render() after finish()")
+        html = self.render_string(template_name, **kwargs)
+
+        # Insert the additional JS and CSS added by the modules on the page
+        js_embed = []
+        js_files = []
+        css_embed = []
+        css_files = []
+        html_heads = []
+        html_bodies = []
+        for module in getattr(self, "_active_modules", {}).values():
+            embed_part = module.embedded_javascript()
+            if embed_part:
+                js_embed.append(utf8(embed_part))
+            file_part = module.javascript_files()
+            if file_part:
+                if isinstance(file_part, (unicode_type, bytes)):
+                    js_files.append(_unicode(file_part))
+                else:
+                    js_files.extend(file_part)
+            embed_part = module.embedded_css()
+            if embed_part:
+                css_embed.append(utf8(embed_part))
+            file_part = module.css_files()
+            if file_part:
+                if isinstance(file_part, (unicode_type, bytes)):
+                    css_files.append(_unicode(file_part))
+                else:
+                    css_files.extend(file_part)
+            head_part = module.html_head()
+            if head_part:
+                html_heads.append(utf8(head_part))
+            body_part = module.html_body()
+            if body_part:
+                html_bodies.append(utf8(body_part))
+
+        if js_files:
+            # Maintain order of JavaScript files given by modules
+            js = self.render_linked_js(js_files)
+            sloc = html.rindex(b"</body>")
+            html = html[:sloc] + utf8(js) + b"\n" + html[sloc:]
+        if js_embed:
+            js_bytes = self.render_embed_js(js_embed)
+            sloc = html.rindex(b"</body>")
+            html = html[:sloc] + js_bytes + b"\n" + html[sloc:]
+        if css_files:
+            css = self.render_linked_css(css_files)
+            hloc = html.index(b"</head>")
+            html = html[:hloc] + utf8(css) + b"\n" + html[hloc:]
+        if css_embed:
+            css_bytes = self.render_embed_css(css_embed)
+            hloc = html.index(b"</head>")
+            html = html[:hloc] + css_bytes + b"\n" + html[hloc:]
+        if html_heads:
+            hloc = html.index(b"</head>")
+            html = html[:hloc] + b"".join(html_heads) + b"\n" + html[hloc:]
+        if html_bodies:
+            hloc = html.index(b"</body>")
+            html = html[:hloc] + b"".join(html_bodies) + b"\n" + html[hloc:]
+        return self.finish(html)
+
+    def render_linked_js(self, js_files: Iterable[str]) -> str:
         """Default method used to render the final js links for the
         rendered webpage.

         Override this method in a sub-classed controller to change the output.
         """
-        pass
+        paths = []
+        unique_paths = set()  # type: Set[str]
+
+        for path in js_files:
+            if not is_absolute(path):
+                path = self.static_url(path)
+            if path not in unique_paths:
+                paths.append(path)
+                unique_paths.add(path)

-    def render_embed_js(self, js_embed: Iterable[bytes]) ->bytes:
+        return "".join(
+            '<script src="'
+            + escape.xhtml_escape(p)
+            + '" type="text/javascript"></script>'
+            for p in paths
+        )
+
+    def render_embed_js(self, js_embed: Iterable[bytes]) -> bytes:
         """Default method used to render the final embedded js for the
         rendered webpage.

         Override this method in a sub-classed controller to change the output.
         """
-        pass
+        return (
+            b'<script type="text/javascript">\n//<![CDATA[\n'
+            + b"\n".join(js_embed)
+            + b"\n//]]>\n</script>"
+        )

-    def render_linked_css(self, css_files: Iterable[str]) ->str:
+    def render_linked_css(self, css_files: Iterable[str]) -> str:
         """Default method used to render the final css links for the
         rendered webpage.

         Override this method in a sub-classed controller to change the output.
         """
-        pass
+        paths = []
+        unique_paths = set()  # type: Set[str]
+
+        for path in css_files:
+            if not is_absolute(path):
+                path = self.static_url(path)
+            if path not in unique_paths:
+                paths.append(path)
+                unique_paths.add(path)

-    def render_embed_css(self, css_embed: Iterable[bytes]) ->bytes:
+        return "".join(
+            '<link href="' + escape.xhtml_escape(p) + '" '
+            'type="text/css" rel="stylesheet"/>'
+            for p in paths
+        )
+
+    def render_embed_css(self, css_embed: Iterable[bytes]) -> bytes:
         """Default method used to render the final embedded css for the
         rendered webpage.

         Override this method in a sub-classed controller to change the output.
         """
-        pass
+        return b'<style type="text/css">\n' + b"\n".join(css_embed) + b"\n</style>"

-    def render_string(self, template_name: str, **kwargs: Any) ->bytes:
+    def render_string(self, template_name: str, **kwargs: Any) -> bytes:
         """Generate the given template with the given arguments.

         We return the generated byte string (in utf8). To generate and
         write a template as a response, use render() above.
         """
-        pass
-
-    def get_template_namespace(self) ->Dict[str, Any]:
+        # If no template_path is specified, use the path of the calling file
+        template_path = self.get_template_path()
+        if not template_path:
+            frame = sys._getframe(0)
+            web_file = frame.f_code.co_filename
+            while frame.f_code.co_filename == web_file and frame.f_back is not None:
+                frame = frame.f_back
+            assert frame.f_code.co_filename is not None
+            template_path = os.path.dirname(frame.f_code.co_filename)
+        with RequestHandler._template_loader_lock:
+            if template_path not in RequestHandler._template_loaders:
+                loader = self.create_template_loader(template_path)
+                RequestHandler._template_loaders[template_path] = loader
+            else:
+                loader = RequestHandler._template_loaders[template_path]
+        t = loader.load(template_name)
+        namespace = self.get_template_namespace()
+        namespace.update(kwargs)
+        return t.generate(**namespace)
+
+    def get_template_namespace(self) -> Dict[str, Any]:
         """Returns a dictionary to be used as the default template namespace.

         May be overridden by subclasses to add or modify values.
@@ -669,9 +1101,21 @@ class RequestHandler(object):
         defaults in the `tornado.template` module and keyword arguments
         to `render` or `render_string`.
         """
-        pass
-
-    def create_template_loader(self, template_path: str) ->template.BaseLoader:
+        namespace = dict(
+            handler=self,
+            request=self.request,
+            current_user=self.current_user,
+            locale=self.locale,
+            _=self.locale.translate,
+            pgettext=self.locale.pgettext,
+            static_url=self.static_url,
+            xsrf_form_html=self.xsrf_form_html,
+            reverse_url=self.reverse_url,
+        )
+        namespace.update(self.ui)
+        return namespace
+
+    def create_template_loader(self, template_path: str) -> template.BaseLoader:
         """Returns a new template loader for the given path.

         May be overridden by subclasses.  By default returns a
@@ -680,9 +1124,19 @@ class RequestHandler(object):
         settings.  If a ``template_loader`` application setting is
         supplied, uses that instead.
         """
-        pass
-
-    def flush(self, include_footers: bool=False) ->'Future[None]':
+        settings = self.application.settings
+        if "template_loader" in settings:
+            return settings["template_loader"]
+        kwargs = {}
+        if "autoescape" in settings:
+            # autoescape=None means "no escaping", so we have to be sure
+            # to only pass this kwarg if the user asked for it.
+            kwargs["autoescape"] = settings["autoescape"]
+        if "template_whitespace" in settings:
+            kwargs["whitespace"] = settings["template_whitespace"]
+        return template.Loader(template_path, **kwargs)
+
+    def flush(self, include_footers: bool = False) -> "Future[None]":
         """Flushes the current output buffer to the network.

         .. versionchanged:: 4.0
@@ -692,10 +1146,47 @@ class RequestHandler(object):

            The ``callback`` argument was removed.
         """
-        pass
-
-    def finish(self, chunk: Optional[Union[str, bytes, dict]]=None
-        ) ->'Future[None]':
+        assert self.request.connection is not None
+        chunk = b"".join(self._write_buffer)
+        self._write_buffer = []
+        if not self._headers_written:
+            self._headers_written = True
+            for transform in self._transforms:
+                assert chunk is not None
+                (
+                    self._status_code,
+                    self._headers,
+                    chunk,
+                ) = transform.transform_first_chunk(
+                    self._status_code, self._headers, chunk, include_footers
+                )
+            # Ignore the chunk and only write the headers for HEAD requests
+            if self.request.method == "HEAD":
+                chunk = b""
+
+            # Finalize the cookie headers (which have been stored in a side
+            # object so an outgoing cookie could be overwritten before it
+            # is sent).
+            if hasattr(self, "_new_cookie"):
+                for cookie in self._new_cookie.values():
+                    self.add_header("Set-Cookie", cookie.OutputString(None))
+
+            start_line = httputil.ResponseStartLine("", self._status_code, self._reason)
+            return self.request.connection.write_headers(
+                start_line, self._headers, chunk
+            )
+        else:
+            for transform in self._transforms:
+                chunk = transform.transform_chunk(chunk, include_footers)
+            # Ignore the chunk and only write the headers for HEAD requests
+            if self.request.method != "HEAD":
+                return self.request.connection.write(chunk)
+            else:
+                future = Future()  # type: Future[None]
+                future.set_result(None)
+                return future
+
+    def finish(self, chunk: Optional[Union[str, bytes, dict]] = None) -> "Future[None]":
         """Finishes this response, ending the HTTP request.

         Passing a ``chunk`` to ``finish()`` is equivalent to passing that
@@ -710,9 +1201,49 @@ class RequestHandler(object):

            Now returns a `.Future` instead of ``None``.
         """
-        pass
+        if self._finished:
+            raise RuntimeError("finish() called twice")
+
+        if chunk is not None:
+            self.write(chunk)
+
+        # Automatically support ETags and add the Content-Length header if
+        # we have not flushed any content yet.
+        if not self._headers_written:
+            if (
+                self._status_code == 200
+                and self.request.method in ("GET", "HEAD")
+                and "Etag" not in self._headers
+            ):
+                self.set_etag_header()
+                if self.check_etag_header():
+                    self._write_buffer = []
+                    self.set_status(304)
+            if self._status_code in (204, 304) or (100 <= self._status_code < 200):
+                assert not self._write_buffer, (
+                    "Cannot send body with %s" % self._status_code
+                )
+                self._clear_representation_headers()
+            elif "Content-Length" not in self._headers:
+                content_length = sum(len(part) for part in self._write_buffer)
+                self.set_header("Content-Length", content_length)

-    def detach(self) ->iostream.IOStream:
+        assert self.request.connection is not None
+        # Now that the request is finished, clear the callback we
+        # set on the HTTPConnection (which would otherwise prevent the
+        # garbage collection of the RequestHandler when there
+        # are keepalive connections)
+        self.request.connection.set_close_callback(None)  # type: ignore
+
+        future = self.flush(include_footers=True)
+        self.request.connection.finish()
+        self._log()
+        self._finished = True
+        self.on_finish()
+        self._break_cycles()
+        return future
+
+    def detach(self) -> iostream.IOStream:
         """Take control of the underlying stream.

         Returns the underlying `.IOStream` object and stops all
@@ -723,9 +1254,16 @@ class RequestHandler(object):

         .. versionadded:: 5.1
         """
-        pass
+        self._finished = True
+        # TODO: add detach to HTTPConnection?
+        return self.request.connection.detach()  # type: ignore

-    def send_error(self, status_code: int=500, **kwargs: Any) ->None:
+    def _break_cycles(self) -> None:
+        # Break up a reference cycle between this handler and the
+        # _ui_module closures to allow for faster GC on CPython.
+        self.ui = None  # type: ignore
+
+    def send_error(self, status_code: int = 500, **kwargs: Any) -> None:
         """Sends the given HTTP error code to the browser.

         If `flush()` has already been called, it is not possible to send
@@ -736,9 +1274,34 @@ class RequestHandler(object):
         Override `write_error()` to customize the error page that is returned.
         Additional keyword arguments are passed through to `write_error`.
         """
-        pass
+        if self._headers_written:
+            gen_log.error("Cannot send error response after headers written")
+            if not self._finished:
+                # If we get an error between writing headers and finishing,
+                # we are unlikely to be able to finish due to a
+                # Content-Length mismatch. Try anyway to release the
+                # socket.
+                try:
+                    self.finish()
+                except Exception:
+                    gen_log.error("Failed to flush partial response", exc_info=True)
+            return
+        self.clear()
+
+        reason = kwargs.get("reason")
+        if "exc_info" in kwargs:
+            exception = kwargs["exc_info"][1]
+            if isinstance(exception, HTTPError) and exception.reason:
+                reason = exception.reason
+        self.set_status(status_code, reason=reason)
+        try:
+            self.write_error(status_code, **kwargs)
+        except Exception:
+            app_log.error("Uncaught exception in write_error", exc_info=True)
+        if not self._finished:
+            self.finish()

-    def write_error(self, status_code: int, **kwargs: Any) ->None:
+    def write_error(self, status_code: int, **kwargs: Any) -> None:
         """Override to implement custom error pages.

         ``write_error`` may call `write`, `render`, `set_header`, etc
@@ -750,10 +1313,21 @@ class RequestHandler(object):
         the "current" exception for purposes of methods like
         ``sys.exc_info()`` or ``traceback.format_exc``.
         """
-        pass
+        if self.settings.get("serve_traceback") and "exc_info" in kwargs:
+            # in debug mode, try to send a traceback
+            self.set_header("Content-Type", "text/plain")
+            for line in traceback.format_exception(*kwargs["exc_info"]):
+                self.write(line)
+            self.finish()
+        else:
+            self.finish(
+                "<html><title>%(code)d: %(message)s</title>"
+                "<body>%(code)d: %(message)s</body></html>"
+                % {"code": status_code, "message": self._reason}
+            )

     @property
-    def locale(self) ->tornado.locale.Locale:
+    def locale(self) -> tornado.locale.Locale:
         """The locale for the current session.

         Determined by either `get_user_locale`, which you can override to
@@ -764,9 +1338,20 @@ class RequestHandler(object):
         .. versionchanged: 4.1
            Added a property setter.
         """
-        pass
+        if not hasattr(self, "_locale"):
+            loc = self.get_user_locale()
+            if loc is not None:
+                self._locale = loc
+            else:
+                self._locale = self.get_browser_locale()
+                assert self._locale
+        return self._locale

-    def get_user_locale(self) ->Optional[tornado.locale.Locale]:
+    @locale.setter
+    def locale(self, value: tornado.locale.Locale) -> None:
+        self._locale = value
+
+    def get_user_locale(self) -> Optional[tornado.locale.Locale]:
         """Override to determine the locale from the authenticated user.

         If None is returned, we fall back to `get_browser_locale()`.
@@ -774,17 +1359,37 @@ class RequestHandler(object):
         This method should return a `tornado.locale.Locale` object,
         most likely obtained via a call like ``tornado.locale.get("en")``
         """
-        pass
+        return None

-    def get_browser_locale(self, default: str='en_US') ->tornado.locale.Locale:
+    def get_browser_locale(self, default: str = "en_US") -> tornado.locale.Locale:
         """Determines the user's locale from ``Accept-Language`` header.

         See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
         """
-        pass
+        if "Accept-Language" in self.request.headers:
+            languages = self.request.headers["Accept-Language"].split(",")
+            locales = []
+            for language in languages:
+                parts = language.strip().split(";")
+                if len(parts) > 1 and parts[1].strip().startswith("q="):
+                    try:
+                        score = float(parts[1].strip()[2:])
+                        if score < 0:
+                            raise ValueError()
+                    except (ValueError, TypeError):
+                        score = 0.0
+                else:
+                    score = 1.0
+                if score > 0:
+                    locales.append((parts[0], score))
+            if locales:
+                locales.sort(key=lambda pair: pair[1], reverse=True)
+                codes = [loc[0] for loc in locales]
+                return locale.get(*codes)
+        return locale.get(default)

     @property
-    def current_user(self) ->Any:
+    def current_user(self) -> Any:
         """The authenticated user for this request.

         This is set in one of two ways:
@@ -815,32 +1420,39 @@ class RequestHandler(object):

         The user object may be any type of the application's choosing.
         """
-        pass
+        if not hasattr(self, "_current_user"):
+            self._current_user = self.get_current_user()
+        return self._current_user

-    def get_current_user(self) ->Any:
+    @current_user.setter
+    def current_user(self, value: Any) -> None:
+        self._current_user = value
+
+    def get_current_user(self) -> Any:
         """Override to determine the current user from, e.g., a cookie.

         This method may not be a coroutine.
         """
-        pass
+        return None

-    def get_login_url(self) ->str:
+    def get_login_url(self) -> str:
         """Override to customize the login URL based on the request.

         By default, we use the ``login_url`` application setting.
         """
-        pass
+        self.require_setting("login_url", "@tornado.web.authenticated")
+        return self.application.settings["login_url"]

-    def get_template_path(self) ->Optional[str]:
+    def get_template_path(self) -> Optional[str]:
         """Override to customize template path for each handler.

         By default, we use the ``template_path`` application setting.
         Return None to load templates relative to the calling file.
         """
-        pass
+        return self.application.settings.get("template_path")

     @property
-    def xsrf_token(self) ->bytes:
+    def xsrf_token(self) -> bytes:
         """The XSRF-prevention token for the current user/session.

         To prevent cross-site request forgery, we set an '_xsrf' cookie
@@ -872,9 +1484,32 @@ class RequestHandler(object):
            will set the ``secure`` and ``httponly`` flags on the
            ``_xsrf`` cookie.
         """
-        pass
-
-    def _get_raw_xsrf_token(self) ->Tuple[Optional[int], bytes, float]:
+        if not hasattr(self, "_xsrf_token"):
+            version, token, timestamp = self._get_raw_xsrf_token()
+            output_version = self.settings.get("xsrf_cookie_version", 2)
+            cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
+            if output_version == 1:
+                self._xsrf_token = binascii.b2a_hex(token)
+            elif output_version == 2:
+                mask = os.urandom(4)
+                self._xsrf_token = b"|".join(
+                    [
+                        b"2",
+                        binascii.b2a_hex(mask),
+                        binascii.b2a_hex(_websocket_mask(mask, token)),
+                        utf8(str(int(timestamp))),
+                    ]
+                )
+            else:
+                raise ValueError("unknown xsrf cookie version %d", output_version)
+            if version is None:
+                if self.current_user and "expires_days" not in cookie_kwargs:
+                    cookie_kwargs["expires_days"] = 30
+                cookie_name = self.settings.get("xsrf_cookie_name", "_xsrf")
+                self.set_cookie(cookie_name, self._xsrf_token, **cookie_kwargs)
+        return self._xsrf_token
+
+    def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
         """Read or generate the xsrf token in its raw form.

         The raw_xsrf_token is a tuple containing:
@@ -885,16 +1520,59 @@ class RequestHandler(object):
         * timestamp: the time this token was generated (will not be accurate
           for version 1 cookies)
         """
-        pass
-
-    def _decode_xsrf_token(self, cookie: str) ->Tuple[Optional[int],
-        Optional[bytes], Optional[float]]:
+        if not hasattr(self, "_raw_xsrf_token"):
+            cookie_name = self.settings.get("xsrf_cookie_name", "_xsrf")
+            cookie = self.get_cookie(cookie_name)
+            if cookie:
+                version, token, timestamp = self._decode_xsrf_token(cookie)
+            else:
+                version, token, timestamp = None, None, None
+            if token is None:
+                version = None
+                token = os.urandom(16)
+                timestamp = time.time()
+            assert token is not None
+            assert timestamp is not None
+            self._raw_xsrf_token = (version, token, timestamp)
+        return self._raw_xsrf_token
+
+    def _decode_xsrf_token(
+        self, cookie: str
+    ) -> Tuple[Optional[int], Optional[bytes], Optional[float]]:
         """Convert a cookie string into a the tuple form returned by
         _get_raw_xsrf_token.
         """
-        pass

-    def check_xsrf_cookie(self) ->None:
+        try:
+            m = _signed_value_version_re.match(utf8(cookie))
+
+            if m:
+                version = int(m.group(1))
+                if version == 2:
+                    _, mask_str, masked_token, timestamp_str = cookie.split("|")
+
+                    mask = binascii.a2b_hex(utf8(mask_str))
+                    token = _websocket_mask(mask, binascii.a2b_hex(utf8(masked_token)))
+                    timestamp = int(timestamp_str)
+                    return version, token, timestamp
+                else:
+                    # Treat unknown versions as not present instead of failing.
+                    raise Exception("Unknown xsrf cookie version")
+            else:
+                version = 1
+                try:
+                    token = binascii.a2b_hex(utf8(cookie))
+                except (binascii.Error, TypeError):
+                    token = utf8(cookie)
+                # We don't have a usable timestamp in older versions.
+                timestamp = int(time.time())
+                return (version, token, timestamp)
+        except Exception:
+            # Catch exceptions and return nothing instead of failing.
+            gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
+            return None, None, None
+
+    def check_xsrf_cookie(self) -> None:
         """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.

         To prevent cross-site request forgery, we set an ``_xsrf``
@@ -912,9 +1590,27 @@ class RequestHandler(object):
            Added support for cookie version 2.  Both versions 1 and 2 are
            supported.
         """
-        pass
-
-    def xsrf_form_html(self) ->str:
+        # Prior to release 1.1.1, this check was ignored if the HTTP header
+        # ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
+        # has been shown to be insecure and has been removed.  For more
+        # information please see
+        # http://www.djangoproject.com/weblog/2011/feb/08/security/
+        # http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
+        token = (
+            self.get_argument("_xsrf", None)
+            or self.request.headers.get("X-Xsrftoken")
+            or self.request.headers.get("X-Csrftoken")
+        )
+        if not token:
+            raise HTTPError(403, "'_xsrf' argument missing from POST")
+        _, token, _ = self._decode_xsrf_token(token)
+        _, expected_token, _ = self._get_raw_xsrf_token()
+        if not token:
+            raise HTTPError(403, "'_xsrf' argument has invalid format")
+        if not hmac.compare_digest(utf8(token), utf8(expected_token)):
+            raise HTTPError(403, "XSRF cookie does not match POST argument")
+
+    def xsrf_form_html(self) -> str:
         """An HTML ``<input/>`` element to be included with all POST forms.

         It defines the ``_xsrf`` input value, which we check on all POST
@@ -927,10 +1623,15 @@ class RequestHandler(object):

         See `check_xsrf_cookie()` above for more information.
         """
-        pass
+        return (
+            '<input type="hidden" name="_xsrf" value="'
+            + escape.xhtml_escape(self.xsrf_token)
+            + '"/>'
+        )

-    def static_url(self, path: str, include_host: Optional[bool]=None, **
-        kwargs: Any) ->str:
+    def static_url(
+        self, path: str, include_host: Optional[bool] = None, **kwargs: Any
+    ) -> str:
         """Returns a static URL for the given relative static file path.

         This method requires you set the ``static_path`` setting in your
@@ -951,17 +1652,34 @@ class RequestHandler(object):
         calls that do not pass ``include_host`` as a keyword argument.

         """
-        pass
+        self.require_setting("static_path", "static_url")
+        get_url = self.settings.get(
+            "static_handler_class", StaticFileHandler
+        ).make_static_url

-    def require_setting(self, name: str, feature: str='this feature') ->None:
+        if include_host is None:
+            include_host = getattr(self, "include_host", False)
+
+        if include_host:
+            base = self.request.protocol + "://" + self.request.host
+        else:
+            base = ""
+
+        return base + get_url(self.settings, path, **kwargs)
+
+    def require_setting(self, name: str, feature: str = "this feature") -> None:
         """Raises an exception if the given app setting is not defined."""
-        pass
+        if not self.application.settings.get(name):
+            raise Exception(
+                "You must define the '%s' setting in your "
+                "application to use %s" % (name, feature)
+            )

-    def reverse_url(self, name: str, *args: Any) ->str:
+    def reverse_url(self, name: str, *args: Any) -> str:
         """Alias for `Application.reverse_url`."""
-        pass
+        return self.application.reverse_url(name, *args)

-    def compute_etag(self) ->Optional[str]:
+    def compute_etag(self) -> Optional[str]:
         """Computes the etag header to be used for this request.

         By default uses a hash of the content written so far.
@@ -969,18 +1687,23 @@ class RequestHandler(object):
         May be overridden to provide custom etag implementations,
         or may return None to disable tornado's default etag support.
         """
-        pass
+        hasher = hashlib.sha1()
+        for part in self._write_buffer:
+            hasher.update(part)
+        return '"%s"' % hasher.hexdigest()

-    def set_etag_header(self) ->None:
+    def set_etag_header(self) -> None:
         """Sets the response's Etag header using ``self.compute_etag()``.

         Note: no header will be set if ``compute_etag()`` returns ``None``.

         This method is called automatically when the request is finished.
         """
-        pass
+        etag = self.compute_etag()
+        if etag is not None:
+            self.set_header("Etag", etag)

-    def check_etag_header(self) ->bool:
+    def check_etag_header(self) -> bool:
         """Checks the ``Etag`` header against requests's ``If-None-Match``.

         Returns ``True`` if the request's Etag matches and a 304 should be
@@ -997,33 +1720,143 @@ class RequestHandler(object):
         before completing the request.  The ``Etag`` header should be set
         (perhaps with `set_etag_header`) before calling this method.
         """
-        pass
-
-    async def _execute(self, transforms: List['OutputTransform'], *args:
-        bytes, **kwargs: bytes) ->None:
+        computed_etag = utf8(self._headers.get("Etag", ""))
+        # Find all weak and strong etag values from If-None-Match header
+        # because RFC 7232 allows multiple etag values in a single header.
+        etags = re.findall(
+            rb'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
+        )
+        if not computed_etag or not etags:
+            return False
+
+        match = False
+        if etags[0] == b"*":
+            match = True
+        else:
+            # Use a weak comparison when comparing entity-tags.
+            def val(x: bytes) -> bytes:
+                return x[2:] if x.startswith(b"W/") else x
+
+            for etag in etags:
+                if val(etag) == val(computed_etag):
+                    match = True
+                    break
+        return match
+
+    async def _execute(
+        self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes
+    ) -> None:
         """Executes this request with the given output transforms."""
-        pass
+        self._transforms = transforms
+        try:
+            if self.request.method not in self.SUPPORTED_METHODS:
+                raise HTTPError(405)
+            self.path_args = [self.decode_argument(arg) for arg in args]
+            self.path_kwargs = dict(
+                (k, self.decode_argument(v, name=k)) for (k, v) in kwargs.items()
+            )
+            # If XSRF cookies are turned on, reject form submissions without
+            # the proper cookie
+            if self.request.method not in (
+                "GET",
+                "HEAD",
+                "OPTIONS",
+            ) and self.application.settings.get("xsrf_cookies"):
+                self.check_xsrf_cookie()
+
+            result = self.prepare()
+            if result is not None:
+                result = await result  # type: ignore
+            if self._prepared_future is not None:
+                # Tell the Application we've finished with prepare()
+                # and are ready for the body to arrive.
+                future_set_result_unless_cancelled(self._prepared_future, None)
+            if self._finished:
+                return

-    def data_received(self, chunk: bytes) ->Optional[Awaitable[None]]:
+            if _has_stream_request_body(self.__class__):
+                # In streaming mode request.body is a Future that signals
+                # the body has been completely received.  The Future has no
+                # result; the data has been passed to self.data_received
+                # instead.
+                try:
+                    await self.request._body_future
+                except iostream.StreamClosedError:
+                    return
+
+            method = getattr(self, self.request.method.lower())
+            result = method(*self.path_args, **self.path_kwargs)
+            if result is not None:
+                result = await result
+            if self._auto_finish and not self._finished:
+                self.finish()
+        except Exception as e:
+            try:
+                self._handle_request_exception(e)
+            except Exception:
+                app_log.error("Exception in exception handler", exc_info=True)
+            finally:
+                # Unset result to avoid circular references
+                result = None
+            if self._prepared_future is not None and not self._prepared_future.done():
+                # In case we failed before setting _prepared_future, do it
+                # now (to unblock the HTTP server).  Note that this is not
+                # in a finally block to avoid GC issues prior to Python 3.4.
+                self._prepared_future.set_result(None)
+
+    def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
         """Implement this method to handle streamed request data.

         Requires the `.stream_request_body` decorator.

         May be a coroutine for flow control.
         """
-        pass
+        raise NotImplementedError()

-    def _log(self) ->None:
+    def _log(self) -> None:
         """Logs the current request.

         Sort of deprecated since this functionality was moved to the
         Application, but left in place for the benefit of existing apps
         that have overridden this method.
         """
-        pass
-
-    def log_exception(self, typ: 'Optional[Type[BaseException]]', value:
-        Optional[BaseException], tb: Optional[TracebackType]) ->None:
+        self.application.log_request(self)
+
+    def _request_summary(self) -> str:
+        return "%s %s (%s)" % (
+            self.request.method,
+            self.request.uri,
+            self.request.remote_ip,
+        )
+
+    def _handle_request_exception(self, e: BaseException) -> None:
+        if isinstance(e, Finish):
+            # Not an error; just finish the request without logging.
+            if not self._finished:
+                self.finish(*e.args)
+            return
+        try:
+            self.log_exception(*sys.exc_info())
+        except Exception:
+            # An error here should still get a best-effort send_error()
+            # to avoid leaking the connection.
+            app_log.error("Error in exception logger", exc_info=True)
+        if self._finished:
+            # Extra errors after the request has been finished should
+            # be logged, but there is no reason to continue to try and
+            # send a response.
+            return
+        if isinstance(e, HTTPError):
+            self.send_error(e.status_code, exc_info=sys.exc_info())
+        else:
+            self.send_error(500, exc_info=sys.exc_info())
+
+    def log_exception(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[TracebackType],
+    ) -> None:
         """Override to customize logging of uncaught exceptions.

         By default logs instances of `HTTPError` as warnings without
@@ -1033,14 +1866,48 @@ class RequestHandler(object):

         .. versionadded:: 3.1
         """
-        pass
-
-
-_RequestHandlerType = TypeVar('_RequestHandlerType', bound=RequestHandler)
-
-
-def stream_request_body(cls: Type[_RequestHandlerType]) ->Type[
-    _RequestHandlerType]:
+        if isinstance(value, HTTPError):
+            if value.log_message:
+                format = "%d %s: " + value.log_message
+                args = [value.status_code, self._request_summary()] + list(value.args)
+                gen_log.warning(format, *args)
+        else:
+            app_log.error(
+                "Uncaught exception %s\n%r",
+                self._request_summary(),
+                self.request,
+                exc_info=(typ, value, tb),  # type: ignore
+            )
+
+    def _ui_module(self, name: str, module: Type["UIModule"]) -> Callable[..., str]:
+        def render(*args, **kwargs) -> str:  # type: ignore
+            if not hasattr(self, "_active_modules"):
+                self._active_modules = {}  # type: Dict[str, UIModule]
+            if name not in self._active_modules:
+                self._active_modules[name] = module(self)
+            rendered = self._active_modules[name].render(*args, **kwargs)
+            return rendered
+
+        return render
+
+    def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
+        return lambda *args, **kwargs: method(self, *args, **kwargs)
+
+    def _clear_representation_headers(self) -> None:
+        # 304 responses should not contain representation metadata
+        # headers (defined in
+        # https://tools.ietf.org/html/rfc7231#section-3.1)
+        # not explicitly allowed by
+        # https://tools.ietf.org/html/rfc7232#section-4.1
+        headers = ["Content-Encoding", "Content-Language", "Content-Type"]
+        for h in headers:
+            self.clear_header(h)
+
+
+_RequestHandlerType = TypeVar("_RequestHandlerType", bound=RequestHandler)
+
+
+def stream_request_body(cls: Type[_RequestHandlerType]) -> Type[_RequestHandlerType]:
     """Apply to `RequestHandler` subclasses to enable streaming body support.

     This decorator implies the following changes:
@@ -1060,30 +1927,73 @@ def stream_request_body(cls: Type[_RequestHandlerType]) ->Type[

     See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/stable/demos/file_upload/>`_
     for example usage.
-    """
-    pass
+    """  # noqa: E501
+    if not issubclass(cls, RequestHandler):
+        raise TypeError("expected subclass of RequestHandler, got %r", cls)
+    cls._stream_request_body = True
+    return cls
+
+
+def _has_stream_request_body(cls: Type[RequestHandler]) -> bool:
+    if not issubclass(cls, RequestHandler):
+        raise TypeError("expected subclass of RequestHandler, got %r", cls)
+    return cls._stream_request_body


-def removeslash(method: Callable[..., Optional[Awaitable[None]]]) ->Callable[
-    ..., Optional[Awaitable[None]]]:
+def removeslash(
+    method: Callable[..., Optional[Awaitable[None]]]
+) -> Callable[..., Optional[Awaitable[None]]]:
     """Use this decorator to remove trailing slashes from the request path.

     For example, a request to ``/foo/`` would redirect to ``/foo`` with this
     decorator. Your request handler mapping should use a regular expression
     like ``r'/foo/*'`` in conjunction with using the decorator.
     """
-    pass
-

-def addslash(method: Callable[..., Optional[Awaitable[None]]]) ->Callable[
-    ..., Optional[Awaitable[None]]]:
+    @functools.wraps(method)
+    def wrapper(  # type: ignore
+        self: RequestHandler, *args, **kwargs
+    ) -> Optional[Awaitable[None]]:
+        if self.request.path.endswith("/"):
+            if self.request.method in ("GET", "HEAD"):
+                uri = self.request.path.rstrip("/")
+                if uri:  # don't try to redirect '/' to ''
+                    if self.request.query:
+                        uri += "?" + self.request.query
+                    self.redirect(uri, permanent=True)
+                    return None
+            else:
+                raise HTTPError(404)
+        return method(self, *args, **kwargs)
+
+    return wrapper
+
+
+def addslash(
+    method: Callable[..., Optional[Awaitable[None]]]
+) -> Callable[..., Optional[Awaitable[None]]]:
     """Use this decorator to add a missing trailing slash to the request path.

     For example, a request to ``/foo`` would redirect to ``/foo/`` with this
     decorator. Your request handler mapping should use a regular expression
     like ``r'/foo/?'`` in conjunction with using the decorator.
     """
-    pass
+
+    @functools.wraps(method)
+    def wrapper(  # type: ignore
+        self: RequestHandler, *args, **kwargs
+    ) -> Optional[Awaitable[None]]:
+        if not self.request.path.endswith("/"):
+            if self.request.method in ("GET", "HEAD"):
+                uri = self.request.path + "/"
+                if self.request.query:
+                    uri += "?" + self.request.query
+                self.redirect(uri, permanent=True)
+                return None
+            raise HTTPError(404)
+        return method(self, *args, **kwargs)
+
+    return wrapper


 class _ApplicationRouter(ReversibleRuleRouter):
@@ -1097,15 +2007,36 @@ class _ApplicationRouter(ReversibleRuleRouter):
         `_ApplicationRouter` instance.
     """

-    def __init__(self, application: 'Application', rules: Optional[
-        _RuleList]=None) ->None:
+    def __init__(
+        self, application: "Application", rules: Optional[_RuleList] = None
+    ) -> None:
         assert isinstance(application, Application)
         self.application = application
         super().__init__(rules)

+    def process_rule(self, rule: Rule) -> Rule:
+        rule = super().process_rule(rule)
+
+        if isinstance(rule.target, (list, tuple)):
+            rule.target = _ApplicationRouter(
+                self.application, rule.target  # type: ignore
+            )
+
+        return rule
+
+    def get_target_delegate(
+        self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
+    ) -> Optional[httputil.HTTPMessageDelegate]:
+        if isclass(target) and issubclass(target, RequestHandler):
+            return self.application.get_handler_delegate(
+                request, target, **target_params
+            )
+
+        return super().get_target_delegate(target, request, **target_params)
+

 class Application(ReversibleRouter):
-    """A collection of request handlers that make up a web application.
+    r"""A collection of request handlers that make up a web application.

     Instances of this class are callable and can be passed directly to
     HTTPServer to serve the application::
@@ -1156,7 +2087,7 @@ class Application(ReversibleRouter):
     We support virtual hosts with the `add_handlers` method, which takes in
     a host regular expression as the first argument::

-        application.add_handlers(r"www\\.myhost\\.com", [
+        application.add_handlers(r"www\.myhost\.com", [
             (r"/article/([0-9]+)", ArticleHandler),
         ])

@@ -1187,50 +2118,73 @@ class Application(ReversibleRouter):

     """

-    def __init__(self, handlers: Optional[_RuleList]=None, default_host:
-        Optional[str]=None, transforms: Optional[List[Type[
-        'OutputTransform']]]=None, **settings: Any) ->None:
+    def __init__(
+        self,
+        handlers: Optional[_RuleList] = None,
+        default_host: Optional[str] = None,
+        transforms: Optional[List[Type["OutputTransform"]]] = None,
+        **settings: Any,
+    ) -> None:
         if transforms is None:
-            self.transforms = []
-            if settings.get('compress_response') or settings.get('gzip'):
+            self.transforms = []  # type: List[Type[OutputTransform]]
+            if settings.get("compress_response") or settings.get("gzip"):
                 self.transforms.append(GZipContentEncoding)
         else:
             self.transforms = transforms
         self.default_host = default_host
         self.settings = settings
-        self.ui_modules = {'linkify': _linkify, 'xsrf_form_html':
-            _xsrf_form_html, 'Template': TemplateModule}
-        self.ui_methods = {}
-        self._load_ui_modules(settings.get('ui_modules', {}))
-        self._load_ui_methods(settings.get('ui_methods', {}))
-        if self.settings.get('static_path'):
-            path = self.settings['static_path']
+        self.ui_modules = {
+            "linkify": _linkify,
+            "xsrf_form_html": _xsrf_form_html,
+            "Template": TemplateModule,
+        }
+        self.ui_methods = {}  # type: Dict[str, Callable[..., str]]
+        self._load_ui_modules(settings.get("ui_modules", {}))
+        self._load_ui_methods(settings.get("ui_methods", {}))
+        if self.settings.get("static_path"):
+            path = self.settings["static_path"]
             handlers = list(handlers or [])
-            static_url_prefix = settings.get('static_url_prefix', '/static/')
-            static_handler_class = settings.get('static_handler_class',
-                StaticFileHandler)
-            static_handler_args = settings.get('static_handler_args', {})
-            static_handler_args['path'] = path
-            for pattern in [re.escape(static_url_prefix) + '(.*)',
-                '/(favicon\\.ico)', '/(robots\\.txt)']:
-                handlers.insert(0, (pattern, static_handler_class,
-                    static_handler_args))
-        if self.settings.get('debug'):
-            self.settings.setdefault('autoreload', True)
-            self.settings.setdefault('compiled_template_cache', False)
-            self.settings.setdefault('static_hash_cache', False)
-            self.settings.setdefault('serve_traceback', True)
+            static_url_prefix = settings.get("static_url_prefix", "/static/")
+            static_handler_class = settings.get(
+                "static_handler_class", StaticFileHandler
+            )
+            static_handler_args = settings.get("static_handler_args", {})
+            static_handler_args["path"] = path
+            for pattern in [
+                re.escape(static_url_prefix) + r"(.*)",
+                r"/(favicon\.ico)",
+                r"/(robots\.txt)",
+            ]:
+                handlers.insert(0, (pattern, static_handler_class, static_handler_args))
+
+        if self.settings.get("debug"):
+            self.settings.setdefault("autoreload", True)
+            self.settings.setdefault("compiled_template_cache", False)
+            self.settings.setdefault("static_hash_cache", False)
+            self.settings.setdefault("serve_traceback", True)
+
         self.wildcard_router = _ApplicationRouter(self, handlers)
-        self.default_router = _ApplicationRouter(self, [Rule(AnyMatches(),
-            self.wildcard_router)])
-        if self.settings.get('autoreload'):
+        self.default_router = _ApplicationRouter(
+            self, [Rule(AnyMatches(), self.wildcard_router)]
+        )
+
+        # Automatically reload modified modules
+        if self.settings.get("autoreload"):
             from tornado import autoreload
+
             autoreload.start()

-    def listen(self, port: int, address: Optional[str]=None, *, family:
-        socket.AddressFamily=socket.AF_UNSPEC, backlog: int=tornado.netutil
-        ._DEFAULT_BACKLOG, flags: Optional[int]=None, reuse_port: bool=
-        False, **kwargs: Any) ->HTTPServer:
+    def listen(
+        self,
+        port: int,
+        address: Optional[str] = None,
+        *,
+        family: socket.AddressFamily = socket.AF_UNSPEC,
+        backlog: int = tornado.netutil._DEFAULT_BACKLOG,
+        flags: Optional[int] = None,
+        reuse_port: bool = False,
+        **kwargs: Any,
+    ) -> HTTPServer:
         """Starts an HTTP server for this application on the given port.

         This is a convenience alias for creating an `.HTTPServer` object and
@@ -1253,25 +2207,97 @@ class Application(ReversibleRouter):
            Added support for new keyword arguments in `.TCPServer.listen`,
            including ``reuse_port``.
         """
-        pass
-
-    def add_handlers(self, host_pattern: str, host_handlers: _RuleList) ->None:
+        server = HTTPServer(self, **kwargs)
+        server.listen(
+            port,
+            address=address,
+            family=family,
+            backlog=backlog,
+            flags=flags,
+            reuse_port=reuse_port,
+        )
+        return server
+
+    def add_handlers(self, host_pattern: str, host_handlers: _RuleList) -> None:
         """Appends the given handlers to our handler list.

         Host patterns are processed sequentially in the order they were
         added. All matching patterns will be considered.
         """
-        pass
+        host_matcher = HostMatches(host_pattern)
+        rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers))

-    def __call__(self, request: httputil.HTTPServerRequest) ->Optional[
-        Awaitable[None]]:
+        self.default_router.rules.insert(-1, rule)
+
+        if self.default_host is not None:
+            self.wildcard_router.add_rules(
+                [(DefaultHostMatches(self, host_matcher.host_pattern), host_handlers)]
+            )
+
+    def add_transform(self, transform_class: Type["OutputTransform"]) -> None:
+        self.transforms.append(transform_class)
+
+    def _load_ui_methods(self, methods: Any) -> None:
+        if isinstance(methods, types.ModuleType):
+            self._load_ui_methods(dict((n, getattr(methods, n)) for n in dir(methods)))
+        elif isinstance(methods, list):
+            for m in methods:
+                self._load_ui_methods(m)
+        else:
+            for name, fn in methods.items():
+                if (
+                    not name.startswith("_")
+                    and hasattr(fn, "__call__")
+                    and name[0].lower() == name[0]
+                ):
+                    self.ui_methods[name] = fn
+
+    def _load_ui_modules(self, modules: Any) -> None:
+        if isinstance(modules, types.ModuleType):
+            self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules)))
+        elif isinstance(modules, list):
+            for m in modules:
+                self._load_ui_modules(m)
+        else:
+            assert isinstance(modules, dict)
+            for name, cls in modules.items():
+                try:
+                    if issubclass(cls, UIModule):
+                        self.ui_modules[name] = cls
+                except TypeError:
+                    pass
+
+    def __call__(
+        self, request: httputil.HTTPServerRequest
+    ) -> Optional[Awaitable[None]]:
+        # Legacy HTTPServer interface
         dispatcher = self.find_handler(request)
         return dispatcher.execute()

-    def get_handler_delegate(self, request: httputil.HTTPServerRequest,
-        target_class: Type[RequestHandler], target_kwargs: Optional[Dict[
-        str, Any]]=None, path_args: Optional[List[bytes]]=None, path_kwargs:
-        Optional[Dict[str, bytes]]=None) ->'_HandlerDelegate':
+    def find_handler(
+        self, request: httputil.HTTPServerRequest, **kwargs: Any
+    ) -> "_HandlerDelegate":
+        route = self.default_router.find_handler(request)
+        if route is not None:
+            return cast("_HandlerDelegate", route)
+
+        if self.settings.get("default_handler_class"):
+            return self.get_handler_delegate(
+                request,
+                self.settings["default_handler_class"],
+                self.settings.get("default_handler_args", {}),
+            )
+
+        return self.get_handler_delegate(request, ErrorHandler, {"status_code": 404})
+
+    def get_handler_delegate(
+        self,
+        request: httputil.HTTPServerRequest,
+        target_class: Type[RequestHandler],
+        target_kwargs: Optional[Dict[str, Any]] = None,
+        path_args: Optional[List[bytes]] = None,
+        path_kwargs: Optional[Dict[str, bytes]] = None,
+    ) -> "_HandlerDelegate":
         """Returns `~.httputil.HTTPMessageDelegate` that can serve a request
         for application and `RequestHandler` subclass.

@@ -1282,9 +2308,11 @@ class Application(ReversibleRouter):
             will be executed while handling a request (``get``, ``post`` or any other).
         :arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
         """
-        pass
+        return _HandlerDelegate(
+            self, request, target_class, target_kwargs, path_args, path_kwargs
+        )

-    def reverse_url(self, name: str, *args: Any) ->str:
+    def reverse_url(self, name: str, *args: Any) -> str:
         """Returns a URL path for handler named ``name``

         The handler must be added to the application as a named `URLSpec`.
@@ -1293,9 +2321,13 @@ class Application(ReversibleRouter):
         They will be converted to strings if necessary, encoded as utf8,
         and url-escaped.
         """
-        pass
+        reversed_url = self.default_router.reverse_url(name, *args)
+        if reversed_url is not None:
+            return reversed_url
+
+        raise KeyError("%s not found in named urls" % name)

-    def log_request(self, handler: RequestHandler) ->None:
+    def log_request(self, handler: RequestHandler) -> None:
         """Writes a completed HTTP request to the logs.

         By default writes to the python root logger.  To change
@@ -1303,15 +2335,34 @@ class Application(ReversibleRouter):
         or pass a function in the application settings dictionary as
         ``log_function``.
         """
-        pass
+        if "log_function" in self.settings:
+            self.settings["log_function"](handler)
+            return
+        if handler.get_status() < 400:
+            log_method = access_log.info
+        elif handler.get_status() < 500:
+            log_method = access_log.warning
+        else:
+            log_method = access_log.error
+        request_time = 1000.0 * handler.request.request_time()
+        log_method(
+            "%d %s %.2fms",
+            handler.get_status(),
+            handler._request_summary(),
+            request_time,
+        )


 class _HandlerDelegate(httputil.HTTPMessageDelegate):
-
-    def __init__(self, application: Application, request: httputil.
-        HTTPServerRequest, handler_class: Type[RequestHandler],
-        handler_kwargs: Optional[Dict[str, Any]], path_args: Optional[List[
-        bytes]], path_kwargs: Optional[Dict[str, bytes]]) ->None:
+    def __init__(
+        self,
+        application: Application,
+        request: httputil.HTTPServerRequest,
+        handler_class: Type[RequestHandler],
+        handler_kwargs: Optional[Dict[str, Any]],
+        path_args: Optional[List[bytes]],
+        path_kwargs: Optional[Dict[str, bytes]],
+    ) -> None:
         self.application = application
         self.connection = request.connection
         self.request = request
@@ -1319,9 +2370,77 @@ class _HandlerDelegate(httputil.HTTPMessageDelegate):
         self.handler_kwargs = handler_kwargs or {}
         self.path_args = path_args or []
         self.path_kwargs = path_kwargs or {}
-        self.chunks = []
+        self.chunks = []  # type: List[bytes]
         self.stream_request_body = _has_stream_request_body(self.handler_class)

+    def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> Optional[Awaitable[None]]:
+        if self.stream_request_body:
+            self.request._body_future = Future()
+            return self.execute()
+        return None
+
+    def data_received(self, data: bytes) -> Optional[Awaitable[None]]:
+        if self.stream_request_body:
+            return self.handler.data_received(data)
+        else:
+            self.chunks.append(data)
+            return None
+
+    def finish(self) -> None:
+        if self.stream_request_body:
+            future_set_result_unless_cancelled(self.request._body_future, None)
+        else:
+            self.request.body = b"".join(self.chunks)
+            self.request._parse_body()
+            self.execute()
+
+    def on_connection_close(self) -> None:
+        if self.stream_request_body:
+            self.handler.on_connection_close()
+        else:
+            self.chunks = None  # type: ignore
+
+    def execute(self) -> Optional[Awaitable[None]]:
+        # If template cache is disabled (usually in the debug mode),
+        # re-compile templates and reload static files on every
+        # request so you don't need to restart to see changes
+        if not self.application.settings.get("compiled_template_cache", True):
+            with RequestHandler._template_loader_lock:
+                for loader in RequestHandler._template_loaders.values():
+                    loader.reset()
+        if not self.application.settings.get("static_hash_cache", True):
+            static_handler_class = self.application.settings.get(
+                "static_handler_class", StaticFileHandler
+            )
+            static_handler_class.reset()
+
+        self.handler = self.handler_class(
+            self.application, self.request, **self.handler_kwargs
+        )
+        transforms = [t(self.request) for t in self.application.transforms]
+
+        if self.stream_request_body:
+            self.handler._prepared_future = Future()
+        # Note that if an exception escapes handler._execute it will be
+        # trapped in the Future it returns (which we are ignoring here,
+        # leaving it to be logged when the Future is GC'd).
+        # However, that shouldn't happen because _execute has a blanket
+        # except handler, and we cannot easily access the IOLoop here to
+        # call add_future (because of the requirement to remain compatible
+        # with WSGI)
+        fut = gen.convert_yielded(
+            self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
+        )
+        fut.add_done_callback(lambda f: f.result())
+        # If we are streaming the request body, then execute() is finished
+        # when the handler has prepared to receive the body.  If not,
+        # it doesn't matter when execute() finishes (so we return None)
+        return self.handler._prepared_future
+

 class HTTPError(Exception):
     """An exception that will turn into an HTTP error response.
@@ -1346,20 +2465,27 @@ class HTTPError(Exception):
         to use a non-standard numeric code.
     """

-    def __init__(self, status_code: int=500, log_message: Optional[str]=
-        None, *args: Any, **kwargs: Any) ->None:
+    def __init__(
+        self,
+        status_code: int = 500,
+        log_message: Optional[str] = None,
+        *args: Any,
+        **kwargs: Any,
+    ) -> None:
         self.status_code = status_code
         self.log_message = log_message
         self.args = args
-        self.reason = kwargs.get('reason', None)
+        self.reason = kwargs.get("reason", None)
         if log_message and not args:
-            self.log_message = log_message.replace('%', '%%')
+            self.log_message = log_message.replace("%", "%%")

-    def __str__(self) ->str:
-        message = 'HTTP %d: %s' % (self.status_code, self.reason or
-            httputil.responses.get(self.status_code, 'Unknown'))
+    def __str__(self) -> str:
+        message = "HTTP %d: %s" % (
+            self.status_code,
+            self.reason or httputil.responses.get(self.status_code, "Unknown"),
+        )
         if self.log_message:
-            return message + ' (' + self.log_message % self.args + ')'
+            return message + " (" + (self.log_message % self.args) + ")"
         else:
             return message

@@ -1388,6 +2514,7 @@ class Finish(Exception):
        Arguments passed to ``Finish()`` will be passed on to
        `RequestHandler.finish`.
     """
+
     pass


@@ -1400,14 +2527,26 @@ class MissingArgumentError(HTTPError):
     .. versionadded:: 3.1
     """

-    def __init__(self, arg_name: str) ->None:
-        super().__init__(400, 'Missing argument %s' % arg_name)
+    def __init__(self, arg_name: str) -> None:
+        super().__init__(400, "Missing argument %s" % arg_name)
         self.arg_name = arg_name


 class ErrorHandler(RequestHandler):
     """Generates an error response with ``status_code`` for all requests."""

+    def initialize(self, status_code: int) -> None:
+        self.set_status(status_code)
+
+    def prepare(self) -> None:
+        raise HTTPError(self._status_code)
+
+    def check_xsrf_cookie(self) -> None:
+        # POSTs to an ErrorHandler don't actually have side effects,
+        # so we don't need to check the xsrf token.  This allows POSTs
+        # to the wrong url to return a 404 instead of 403.
+        pass
+

 class RedirectHandler(RequestHandler):
     """Redirects the client to the given URL for all GET requests.
@@ -1442,6 +2581,20 @@ class RedirectHandler(RequestHandler):
        destination URL.
     """

+    def initialize(self, url: str, permanent: bool = True) -> None:
+        self._url = url
+        self._permanent = permanent
+
+    def get(self, *args: Any, **kwargs: Any) -> None:
+        to_url = self._url.format(*args, **kwargs)
+        if self.request.query_arguments:
+            # TODO: figure out typing for the next line.
+            to_url = httputil.url_concat(
+                to_url,
+                list(httputil.qs_to_qsl(self.request.query_arguments)),  # type: ignore
+            )
+        self.redirect(to_url, permanent=self._permanent)
+

 class StaticFileHandler(RequestHandler):
     """A simple handler that can serve static content from a directory.
@@ -1509,11 +2662,108 @@ class StaticFileHandler(RequestHandler):
     .. versionchanged:: 3.1
        Many of the methods for subclasses were added in Tornado 3.1.
     """
-    CACHE_MAX_AGE = 86400 * 365 * 10
-    _static_hashes = {}
-    _lock = threading.Lock()

-    def compute_etag(self) ->Optional[str]:
+    CACHE_MAX_AGE = 86400 * 365 * 10  # 10 years
+
+    _static_hashes = {}  # type: Dict[str, Optional[str]]
+    _lock = threading.Lock()  # protects _static_hashes
+
+    def initialize(self, path: str, default_filename: Optional[str] = None) -> None:
+        self.root = path
+        self.default_filename = default_filename
+
+    @classmethod
+    def reset(cls) -> None:
+        with cls._lock:
+            cls._static_hashes = {}
+
+    def head(self, path: str) -> Awaitable[None]:
+        return self.get(path, include_body=False)
+
+    async def get(self, path: str, include_body: bool = True) -> None:
+        # Set up our path instance variables.
+        self.path = self.parse_url_path(path)
+        del path  # make sure we don't refer to path instead of self.path again
+        absolute_path = self.get_absolute_path(self.root, self.path)
+        self.absolute_path = self.validate_absolute_path(self.root, absolute_path)
+        if self.absolute_path is None:
+            return
+
+        self.modified = self.get_modified_time()
+        self.set_headers()
+
+        if self.should_return_304():
+            self.set_status(304)
+            return
+
+        request_range = None
+        range_header = self.request.headers.get("Range")
+        if range_header:
+            # As per RFC 2616 14.16, if an invalid Range header is specified,
+            # the request will be treated as if the header didn't exist.
+            request_range = httputil._parse_request_range(range_header)
+
+        size = self.get_content_size()
+        if request_range:
+            start, end = request_range
+            if start is not None and start < 0:
+                start += size
+                if start < 0:
+                    start = 0
+            if (
+                start is not None
+                and (start >= size or (end is not None and start >= end))
+            ) or end == 0:
+                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
+                # the first requested byte is equal to or greater than the
+                # content, or when a suffix with length 0 is specified.
+                # https://tools.ietf.org/html/rfc7233#section-2.1
+                # A byte-range-spec is invalid if the last-byte-pos value is present
+                # and less than the first-byte-pos.
+                self.set_status(416)  # Range Not Satisfiable
+                self.set_header("Content-Type", "text/plain")
+                self.set_header("Content-Range", "bytes */%s" % (size,))
+                return
+            if end is not None and end > size:
+                # Clients sometimes blindly use a large range to limit their
+                # download size; cap the endpoint at the actual file size.
+                end = size
+            # Note: only return HTTP 206 if less than the entire range has been
+            # requested. Not only is this semantically correct, but Chrome
+            # refuses to play audio if it gets an HTTP 206 in response to
+            # ``Range: bytes=0-``.
+            if size != (end or size) - (start or 0):
+                self.set_status(206)  # Partial Content
+                self.set_header(
+                    "Content-Range", httputil._get_content_range(start, end, size)
+                )
+        else:
+            start = end = None
+
+        if start is not None and end is not None:
+            content_length = end - start
+        elif end is not None:
+            content_length = end
+        elif start is not None:
+            content_length = size - start
+        else:
+            content_length = size
+        self.set_header("Content-Length", content_length)
+
+        if include_body:
+            content = self.get_content(self.absolute_path, start, end)
+            if isinstance(content, bytes):
+                content = [content]
+            for chunk in content:
+                try:
+                    self.write(chunk)
+                    await self.flush()
+                except iostream.StreamClosedError:
+                    return
+        else:
+            assert self.request.method == "HEAD"
+
+    def compute_etag(self) -> Optional[str]:
         """Sets the ``Etag`` header based on static url version.

         This allows efficient ``If-None-Match`` checks against cached
@@ -1522,24 +2772,62 @@ class StaticFileHandler(RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        assert self.absolute_path is not None
+        version_hash = self._get_cached_version(self.absolute_path)
+        if not version_hash:
+            return None
+        return '"%s"' % (version_hash,)

-    def set_headers(self) ->None:
+    def set_headers(self) -> None:
         """Sets the content and caching headers on the response.

         .. versionadded:: 3.1
         """
-        pass
+        self.set_header("Accept-Ranges", "bytes")
+        self.set_etag_header()
+
+        if self.modified is not None:
+            self.set_header("Last-Modified", self.modified)
+
+        content_type = self.get_content_type()
+        if content_type:
+            self.set_header("Content-Type", content_type)
+
+        cache_time = self.get_cache_time(self.path, self.modified, content_type)
+        if cache_time > 0:
+            self.set_header(
+                "Expires",
+                datetime.datetime.now(datetime.timezone.utc)
+                + datetime.timedelta(seconds=cache_time),
+            )
+            self.set_header("Cache-Control", "max-age=" + str(cache_time))
+
+        self.set_extra_headers(self.path)

-    def should_return_304(self) ->bool:
+    def should_return_304(self) -> bool:
         """Returns True if the headers indicate that we should return 304.

         .. versionadded:: 3.1
         """
-        pass
+        # If client sent If-None-Match, use it, ignore If-Modified-Since
+        if self.request.headers.get("If-None-Match"):
+            return self.check_etag_header()
+
+        # Check the If-Modified-Since, and don't send the result if the
+        # content has not been modified
+        ims_value = self.request.headers.get("If-Modified-Since")
+        if ims_value is not None:
+            if_since = email.utils.parsedate_to_datetime(ims_value)
+            if if_since.tzinfo is None:
+                if_since = if_since.replace(tzinfo=datetime.timezone.utc)
+            assert self.modified is not None
+            if if_since >= self.modified:
+                return True
+
+        return False

     @classmethod
-    def get_absolute_path(cls, root: str, path: str) ->str:
+    def get_absolute_path(cls, root: str, path: str) -> str:
         """Returns the absolute location of ``path`` relative to ``root``.

         ``root`` is the path configured for this `StaticFileHandler`
@@ -1552,10 +2840,10 @@ class StaticFileHandler(RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        abspath = os.path.abspath(os.path.join(root, path))
+        return abspath

-    def validate_absolute_path(self, root: str, absolute_path: str) ->Optional[
-        str]:
+    def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
         """Validate and return the absolute path.

         ``root`` is the configured path for the `StaticFileHandler`,
@@ -1575,11 +2863,49 @@ class StaticFileHandler(RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        # os.path.abspath strips a trailing /.
+        # We must add it back to `root` so that we only match files
+        # in a directory named `root` instead of files starting with
+        # that prefix.
+        root = os.path.abspath(root)
+        if not root.endswith(os.path.sep):
+            # abspath always removes a trailing slash, except when
+            # root is '/'. This is an unusual case, but several projects
+            # have independently discovered this technique to disable
+            # Tornado's path validation and (hopefully) do their own,
+            # so we need to support it.
+            root += os.path.sep
+        # The trailing slash also needs to be temporarily added back
+        # the requested path so a request to root/ will match.
+        if not (absolute_path + os.path.sep).startswith(root):
+            raise HTTPError(403, "%s is not in root static directory", self.path)
+        if os.path.isdir(absolute_path) and self.default_filename is not None:
+            # need to look at the request.path here for when path is empty
+            # but there is some prefix to the path that was already
+            # trimmed by the routing
+            if not self.request.path.endswith("/"):
+                if self.request.path.startswith("//"):
+                    # A redirect with two initial slashes is a "protocol-relative" URL.
+                    # This means the next path segment is treated as a hostname instead
+                    # of a part of the path, making this effectively an open redirect.
+                    # Reject paths starting with two slashes to prevent this.
+                    # This is only reachable under certain configurations.
+                    raise HTTPError(
+                        403, "cannot redirect path with two initial slashes"
+                    )
+                self.redirect(self.request.path + "/", permanent=True)
+                return None
+            absolute_path = os.path.join(absolute_path, self.default_filename)
+        if not os.path.exists(absolute_path):
+            raise HTTPError(404)
+        if not os.path.isfile(absolute_path):
+            raise HTTPError(403, "%s is not a file", self.path)
+        return absolute_path

     @classmethod
-    def get_content(cls, abspath: str, start: Optional[int]=None, end:
-        Optional[int]=None) ->Generator[bytes, None, None]:
+    def get_content(
+        cls, abspath: str, start: Optional[int] = None, end: Optional[int] = None
+    ) -> Generator[bytes, None, None]:
         """Retrieve the content of the requested resource which is located
         at the given absolute path.

@@ -1594,10 +2920,29 @@ class StaticFileHandler(RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        with open(abspath, "rb") as file:
+            if start is not None:
+                file.seek(start)
+            if end is not None:
+                remaining = end - (start or 0)  # type: Optional[int]
+            else:
+                remaining = None
+            while True:
+                chunk_size = 64 * 1024
+                if remaining is not None and remaining < chunk_size:
+                    chunk_size = remaining
+                chunk = file.read(chunk_size)
+                if chunk:
+                    if remaining is not None:
+                        remaining -= len(chunk)
+                    yield chunk
+                else:
+                    if remaining is not None:
+                        assert remaining == 0
+                    return

     @classmethod
-    def get_content_version(cls, abspath: str) ->str:
+    def get_content_version(cls, abspath: str) -> str:
         """Returns a version string for the resource at the given path.

         This class method may be overridden by subclasses.  The
@@ -1605,9 +2950,22 @@ class StaticFileHandler(RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        data = cls.get_content(abspath)
+        hasher = hashlib.sha512()
+        if isinstance(data, bytes):
+            hasher.update(data)
+        else:
+            for chunk in data:
+                hasher.update(chunk)
+        return hasher.hexdigest()

-    def get_content_size(self) ->int:
+    def _stat(self) -> os.stat_result:
+        assert self.absolute_path is not None
+        if not hasattr(self, "_stat_result"):
+            self._stat_result = os.stat(self.absolute_path)
+        return self._stat_result
+
+    def get_content_size(self) -> int:
         """Retrieve the total size of the resource at the given path.

         This method may be overridden by subclasses.
@@ -1618,9 +2976,10 @@ class StaticFileHandler(RequestHandler):
            This method is now always called, instead of only when
            partial results are requested.
         """
-        pass
+        stat_result = self._stat()
+        return stat_result.st_size

-    def get_modified_time(self) ->Optional[datetime.datetime]:
+    def get_modified_time(self) -> Optional[datetime.datetime]:
         """Returns the time that ``self.absolute_path`` was last modified.

         May be overridden in subclasses.  Should return a `~datetime.datetime`
@@ -1632,21 +2991,48 @@ class StaticFileHandler(RequestHandler):
            Now returns an aware datetime object instead of a naive one.
            Subclasses that override this method may return either kind.
         """
-        pass
-
-    def get_content_type(self) ->str:
+        stat_result = self._stat()
+        # NOTE: Historically, this used stat_result[stat.ST_MTIME],
+        # which truncates the fractional portion of the timestamp. It
+        # was changed from that form to stat_result.st_mtime to
+        # satisfy mypy (which disallows the bracket operator), but the
+        # latter form returns a float instead of an int. For
+        # consistency with the past (and because we have a unit test
+        # that relies on this), we truncate the float here, although
+        # I'm not sure that's the right thing to do.
+        modified = datetime.datetime.fromtimestamp(
+            int(stat_result.st_mtime), datetime.timezone.utc
+        )
+        return modified
+
+    def get_content_type(self) -> str:
         """Returns the ``Content-Type`` header to be used for this request.

         .. versionadded:: 3.1
         """
-        pass
+        assert self.absolute_path is not None
+        mime_type, encoding = mimetypes.guess_type(self.absolute_path)
+        # per RFC 6713, use the appropriate type for a gzip compressed file
+        if encoding == "gzip":
+            return "application/gzip"
+        # As of 2015-07-21 there is no bzip2 encoding defined at
+        # http://www.iana.org/assignments/media-types/media-types.xhtml
+        # So for that (and any other encoding), use octet-stream.
+        elif encoding is not None:
+            return "application/octet-stream"
+        elif mime_type is not None:
+            return mime_type
+        # if mime_type not detected, use application/octet-stream
+        else:
+            return "application/octet-stream"

-    def set_extra_headers(self, path: str) ->None:
+    def set_extra_headers(self, path: str) -> None:
         """For subclass to add extra headers to the response"""
         pass

-    def get_cache_time(self, path: str, modified: Optional[datetime.
-        datetime], mime_type: str) ->int:
+    def get_cache_time(
+        self, path: str, modified: Optional[datetime.datetime], mime_type: str
+    ) -> int:
         """Override to customize cache control behavior.

         Return a positive number of seconds to make the result
@@ -1657,11 +3043,12 @@ class StaticFileHandler(RequestHandler):
         By default returns cache expiry of 10 years for resources requested
         with ``v`` argument.
         """
-        pass
+        return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0

     @classmethod
-    def make_static_url(cls, settings: Dict[str, Any], path: str,
-        include_version: bool=True) ->str:
+    def make_static_url(
+        cls, settings: Dict[str, Any], path: str, include_version: bool = True
+    ) -> str:
         """Constructs a versioned url for the given path.

         This method may be overridden in subclasses (but note that it
@@ -1680,9 +3067,17 @@ class StaticFileHandler(RequestHandler):
         file corresponding to the given ``path``.

         """
-        pass
+        url = settings.get("static_url_prefix", "/static/") + path
+        if not include_version:
+            return url
+
+        version_hash = cls.get_version(settings, path)
+        if not version_hash:
+            return url

-    def parse_url_path(self, url_path: str) ->str:
+        return "%s?v=%s" % (url, version_hash)
+
+    def parse_url_path(self, url_path: str) -> str:
         """Converts a static URL path into a filesystem path.

         ``url_path`` is the path component of the URL with
@@ -1691,10 +3086,12 @@ class StaticFileHandler(RequestHandler):

         This is the inverse of `make_static_url`.
         """
-        pass
+        if os.path.sep != "/":
+            url_path = url_path.replace("/", os.path.sep)
+        return url_path

     @classmethod
-    def get_version(cls, settings: Dict[str, Any], path: str) ->Optional[str]:
+    def get_version(cls, settings: Dict[str, Any], path: str) -> Optional[str]:
         """Generate the version string to be used in static URLs.

         ``settings`` is the `Application.settings` dictionary and ``path``
@@ -1707,7 +3104,23 @@ class StaticFileHandler(RequestHandler):
            `get_content_version` is now preferred as it allows the base
            class to handle caching of the result.
         """
-        pass
+        abs_path = cls.get_absolute_path(settings["static_path"], path)
+        return cls._get_cached_version(abs_path)
+
+    @classmethod
+    def _get_cached_version(cls, abs_path: str) -> Optional[str]:
+        with cls._lock:
+            hashes = cls._static_hashes
+            if abs_path not in hashes:
+                try:
+                    hashes[abs_path] = cls.get_content_version(abs_path)
+                except Exception:
+                    gen_log.error("Could not open static file %r", abs_path)
+                    hashes[abs_path] = None
+            hsh = hashes.get(abs_path)
+            if hsh:
+                return hsh
+        return None


 class FallbackHandler(RequestHandler):
@@ -1727,6 +3140,16 @@ class FallbackHandler(RequestHandler):
         ])
     """

+    def initialize(
+        self, fallback: Callable[[httputil.HTTPServerRequest], None]
+    ) -> None:
+        self.fallback = fallback
+
+    def prepare(self) -> None:
+        self.fallback(self.request)
+        self._finished = True
+        self.on_finish()
+

 class OutputTransform(object):
     """A transform modifies the result of an HTTP request (e.g., GZip encoding)
@@ -1736,9 +3159,21 @@ class OutputTransform(object):
     (if any) to apply.
     """

-    def __init__(self, request: httputil.HTTPServerRequest) ->None:
+    def __init__(self, request: httputil.HTTPServerRequest) -> None:
         pass

+    def transform_first_chunk(
+        self,
+        status_code: int,
+        headers: httputil.HTTPHeaders,
+        chunk: bytes,
+        finishing: bool,
+    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
+        return status_code, headers, chunk
+
+    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
+        return chunk
+

 class GZipContentEncoding(OutputTransform):
     """Applies the gzip content encoding to the response.
@@ -1750,19 +3185,90 @@ class GZipContentEncoding(OutputTransform):
         of just a whitelist. (the whitelist is still used for certain
         non-text mime types).
     """
-    CONTENT_TYPES = set(['application/javascript',
-        'application/x-javascript', 'application/xml',
-        'application/atom+xml', 'application/json', 'application/xhtml+xml',
-        'image/svg+xml'])
+
+    # Whitelist of compressible mime types (in addition to any types
+    # beginning with "text/").
+    CONTENT_TYPES = set(
+        [
+            "application/javascript",
+            "application/x-javascript",
+            "application/xml",
+            "application/atom+xml",
+            "application/json",
+            "application/xhtml+xml",
+            "image/svg+xml",
+        ]
+    )
+    # Python's GzipFile defaults to level 9, while most other gzip
+    # tools (including gzip itself) default to 6, which is probably a
+    # better CPU/size tradeoff.
     GZIP_LEVEL = 6
+    # Responses that are too short are unlikely to benefit from gzipping
+    # after considering the "Content-Encoding: gzip" header and the header
+    # inside the gzip encoding.
+    # Note that responses written in multiple chunks will be compressed
+    # regardless of size.
     MIN_LENGTH = 1024

-    def __init__(self, request: httputil.HTTPServerRequest) ->None:
-        self._gzipping = 'gzip' in request.headers.get('Accept-Encoding', '')
-
-
-def authenticated(method: Callable[..., Optional[Awaitable[None]]]) ->Callable[
-    ..., Optional[Awaitable[None]]]:
+    def __init__(self, request: httputil.HTTPServerRequest) -> None:
+        self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
+
+    def _compressible_type(self, ctype: str) -> bool:
+        return ctype.startswith("text/") or ctype in self.CONTENT_TYPES
+
+    def transform_first_chunk(
+        self,
+        status_code: int,
+        headers: httputil.HTTPHeaders,
+        chunk: bytes,
+        finishing: bool,
+    ) -> Tuple[int, httputil.HTTPHeaders, bytes]:
+        # TODO: can/should this type be inherited from the superclass?
+        if "Vary" in headers:
+            headers["Vary"] += ", Accept-Encoding"
+        else:
+            headers["Vary"] = "Accept-Encoding"
+        if self._gzipping:
+            ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
+            self._gzipping = (
+                self._compressible_type(ctype)
+                and (not finishing or len(chunk) >= self.MIN_LENGTH)
+                and ("Content-Encoding" not in headers)
+            )
+        if self._gzipping:
+            headers["Content-Encoding"] = "gzip"
+            self._gzip_value = BytesIO()
+            self._gzip_file = gzip.GzipFile(
+                mode="w", fileobj=self._gzip_value, compresslevel=self.GZIP_LEVEL
+            )
+            chunk = self.transform_chunk(chunk, finishing)
+            if "Content-Length" in headers:
+                # The original content length is no longer correct.
+                # If this is the last (and only) chunk, we can set the new
+                # content-length; otherwise we remove it and fall back to
+                # chunked encoding.
+                if finishing:
+                    headers["Content-Length"] = str(len(chunk))
+                else:
+                    del headers["Content-Length"]
+        return status_code, headers, chunk
+
+    def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
+        if self._gzipping:
+            self._gzip_file.write(chunk)
+            if finishing:
+                self._gzip_file.close()
+            else:
+                self._gzip_file.flush()
+            chunk = self._gzip_value.getvalue()
+            self._gzip_value.truncate(0)
+            self._gzip_value.seek(0)
+        return chunk
+
+
+def authenticated(
+    method: Callable[..., Optional[Awaitable[None]]]
+) -> Callable[..., Optional[Awaitable[None]]]:
     """Decorate methods with this to require that the user be logged in.

     If the user is not logged in, they will be redirected to the configured
@@ -1773,7 +3279,28 @@ def authenticated(method: Callable[..., Optional[Awaitable[None]]]) ->Callable[
     will add a `next` parameter so the login page knows where to send
     you once you're logged in.
     """
-    pass
+
+    @functools.wraps(method)
+    def wrapper(  # type: ignore
+        self: RequestHandler, *args, **kwargs
+    ) -> Optional[Awaitable[None]]:
+        if not self.current_user:
+            if self.request.method in ("GET", "HEAD"):
+                url = self.get_login_url()
+                if "?" not in url:
+                    if urllib.parse.urlsplit(url).scheme:
+                        # if login url is absolute, make next absolute too
+                        next_url = self.request.full_url()
+                    else:
+                        assert self.request.uri is not None
+                        next_url = self.request.uri
+                    url += "?" + urlencode(dict(next=next_url))
+                self.redirect(url)
+                return None
+            raise HTTPError(403)
+        return method(self, *args, **kwargs)
+
+    return wrapper


 class UIModule(object):
@@ -1786,65 +3313,71 @@ class UIModule(object):
     Subclasses of UIModule must override the `render` method.
     """

-    def __init__(self, handler: RequestHandler) ->None:
+    def __init__(self, handler: RequestHandler) -> None:
         self.handler = handler
         self.request = handler.request
         self.ui = handler.ui
         self.locale = handler.locale

-    def render(self, *args: Any, **kwargs: Any) ->str:
+    @property
+    def current_user(self) -> Any:
+        return self.handler.current_user
+
+    def render(self, *args: Any, **kwargs: Any) -> str:
         """Override in subclasses to return this module's output."""
-        pass
+        raise NotImplementedError()

-    def embedded_javascript(self) ->Optional[str]:
+    def embedded_javascript(self) -> Optional[str]:
         """Override to return a JavaScript string
         to be embedded in the page."""
-        pass
+        return None

-    def javascript_files(self) ->Optional[Iterable[str]]:
+    def javascript_files(self) -> Optional[Iterable[str]]:
         """Override to return a list of JavaScript files needed by this module.

         If the return values are relative paths, they will be passed to
         `RequestHandler.static_url`; otherwise they will be used as-is.
         """
-        pass
+        return None

-    def embedded_css(self) ->Optional[str]:
+    def embedded_css(self) -> Optional[str]:
         """Override to return a CSS string
         that will be embedded in the page."""
-        pass
+        return None

-    def css_files(self) ->Optional[Iterable[str]]:
+    def css_files(self) -> Optional[Iterable[str]]:
         """Override to returns a list of CSS files required by this module.

         If the return values are relative paths, they will be passed to
         `RequestHandler.static_url`; otherwise they will be used as-is.
         """
-        pass
+        return None

-    def html_head(self) ->Optional[str]:
+    def html_head(self) -> Optional[str]:
         """Override to return an HTML string that will be put in the <head/>
         element.
         """
-        pass
+        return None

-    def html_body(self) ->Optional[str]:
+    def html_body(self) -> Optional[str]:
         """Override to return an HTML string that will be put at the end of
         the <body/> element.
         """
-        pass
+        return None

-    def render_string(self, path: str, **kwargs: Any) ->bytes:
+    def render_string(self, path: str, **kwargs: Any) -> bytes:
         """Renders a template and returns it as a string."""
-        pass
+        return self.handler.render_string(path, **kwargs)


 class _linkify(UIModule):
-    pass
+    def render(self, text: str, **kwargs: Any) -> str:  # type: ignore
+        return escape.linkify(text, **kwargs)


 class _xsrf_form_html(UIModule):
-    pass
+    def render(self) -> str:  # type: ignore
+        return self.handler.xsrf_form_html()


 class TemplateModule(UIModule):
@@ -1863,28 +3396,321 @@ class TemplateModule(UIModule):
     any arguments to the template.
     """

-    def __init__(self, handler: RequestHandler) ->None:
+    def __init__(self, handler: RequestHandler) -> None:
         super().__init__(handler)
-        self._resource_list = []
-        self._resource_dict = {}
+        # keep resources in both a list and a dict to preserve order
+        self._resource_list = []  # type: List[Dict[str, Any]]
+        self._resource_dict = {}  # type: Dict[str, Dict[str, Any]]
+
+    def render(self, path: str, **kwargs: Any) -> bytes:  # type: ignore
+        def set_resources(**kwargs) -> str:  # type: ignore
+            if path not in self._resource_dict:
+                self._resource_list.append(kwargs)
+                self._resource_dict[path] = kwargs
+            else:
+                if self._resource_dict[path] != kwargs:
+                    raise ValueError(
+                        "set_resources called with different "
+                        "resources for the same template"
+                    )
+            return ""
+
+        return self.render_string(path, set_resources=set_resources, **kwargs)
+
+    def _get_resources(self, key: str) -> Iterable[str]:
+        return (r[key] for r in self._resource_list if key in r)
+
+    def embedded_javascript(self) -> str:
+        return "\n".join(self._get_resources("embedded_javascript"))
+
+    def javascript_files(self) -> Iterable[str]:
+        result = []
+        for f in self._get_resources("javascript_files"):
+            if isinstance(f, (unicode_type, bytes)):
+                result.append(f)
+            else:
+                result.extend(f)
+        return result
+
+    def embedded_css(self) -> str:
+        return "\n".join(self._get_resources("embedded_css"))
+
+    def css_files(self) -> Iterable[str]:
+        result = []
+        for f in self._get_resources("css_files"):
+            if isinstance(f, (unicode_type, bytes)):
+                result.append(f)
+            else:
+                result.extend(f)
+        return result
+
+    def html_head(self) -> str:
+        return "".join(self._get_resources("html_head"))
+
+    def html_body(self) -> str:
+        return "".join(self._get_resources("html_body"))


 class _UIModuleNamespace(object):
     """Lazy namespace which creates UIModule proxies bound to a handler."""

-    def __init__(self, handler: RequestHandler, ui_modules: Dict[str, Type[
-        UIModule]]) ->None:
+    def __init__(
+        self, handler: RequestHandler, ui_modules: Dict[str, Type[UIModule]]
+    ) -> None:
         self.handler = handler
         self.ui_modules = ui_modules

-    def __getitem__(self, key: str) ->Callable[..., str]:
+    def __getitem__(self, key: str) -> Callable[..., str]:
         return self.handler._ui_module(key, self.ui_modules[key])

-    def __getattr__(self, key: str) ->Callable[..., str]:
+    def __getattr__(self, key: str) -> Callable[..., str]:
         try:
             return self[key]
         except KeyError as e:
             raise AttributeError(str(e))


-_signed_value_version_re = re.compile(b'^([1-9][0-9]*)\\|(.*)$')
+def create_signed_value(
+    secret: _CookieSecretTypes,
+    name: str,
+    value: Union[str, bytes],
+    version: Optional[int] = None,
+    clock: Optional[Callable[[], float]] = None,
+    key_version: Optional[int] = None,
+) -> bytes:
+    if version is None:
+        version = DEFAULT_SIGNED_VALUE_VERSION
+    if clock is None:
+        clock = time.time
+
+    timestamp = utf8(str(int(clock())))
+    value = base64.b64encode(utf8(value))
+    if version == 1:
+        assert not isinstance(secret, dict)
+        signature = _create_signature_v1(secret, name, value, timestamp)
+        value = b"|".join([value, timestamp, signature])
+        return value
+    elif version == 2:
+        # The v2 format consists of a version number and a series of
+        # length-prefixed fields "%d:%s", the last of which is a
+        # signature, all separated by pipes.  All numbers are in
+        # decimal format with no leading zeros.  The signature is an
+        # HMAC-SHA256 of the whole string up to that point, including
+        # the final pipe.
+        #
+        # The fields are:
+        # - format version (i.e. 2; no length prefix)
+        # - key version (integer, default is 0)
+        # - timestamp (integer seconds since epoch)
+        # - name (not encoded; assumed to be ~alphanumeric)
+        # - value (base64-encoded)
+        # - signature (hex-encoded; no length prefix)
+        def format_field(s: Union[str, bytes]) -> bytes:
+            return utf8("%d:" % len(s)) + utf8(s)
+
+        to_sign = b"|".join(
+            [
+                b"2",
+                format_field(str(key_version or 0)),
+                format_field(timestamp),
+                format_field(name),
+                format_field(value),
+                b"",
+            ]
+        )
+
+        if isinstance(secret, dict):
+            assert (
+                key_version is not None
+            ), "Key version must be set when sign key dict is used"
+            assert version >= 2, "Version must be at least 2 for key version support"
+            secret = secret[key_version]
+
+        signature = _create_signature_v2(secret, to_sign)
+        return to_sign + signature
+    else:
+        raise ValueError("Unsupported version %d" % version)
+
+
+# A leading version number in decimal
+# with no leading zeros, followed by a pipe.
+_signed_value_version_re = re.compile(rb"^([1-9][0-9]*)\|(.*)$")
+
+
+def _get_version(value: bytes) -> int:
+    # Figures out what version value is.  Version 1 did not include an
+    # explicit version field and started with arbitrary base64 data,
+    # which makes this tricky.
+    m = _signed_value_version_re.match(value)
+    if m is None:
+        version = 1
+    else:
+        try:
+            version = int(m.group(1))
+            if version > 999:
+                # Certain payloads from the version-less v1 format may
+                # be parsed as valid integers.  Due to base64 padding
+                # restrictions, this can only happen for numbers whose
+                # length is a multiple of 4, so we can treat all
+                # numbers up to 999 as versions, and for the rest we
+                # fall back to v1 format.
+                version = 1
+        except ValueError:
+            version = 1
+    return version
+
+
+def decode_signed_value(
+    secret: _CookieSecretTypes,
+    name: str,
+    value: Union[None, str, bytes],
+    max_age_days: float = 31,
+    clock: Optional[Callable[[], float]] = None,
+    min_version: Optional[int] = None,
+) -> Optional[bytes]:
+    if clock is None:
+        clock = time.time
+    if min_version is None:
+        min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
+    if min_version > 2:
+        raise ValueError("Unsupported min_version %d" % min_version)
+    if not value:
+        return None
+
+    value = utf8(value)
+    version = _get_version(value)
+
+    if version < min_version:
+        return None
+    if version == 1:
+        assert not isinstance(secret, dict)
+        return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
+    elif version == 2:
+        return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
+    else:
+        return None
+
+
+def _decode_signed_value_v1(
+    secret: Union[str, bytes],
+    name: str,
+    value: bytes,
+    max_age_days: float,
+    clock: Callable[[], float],
+) -> Optional[bytes]:
+    parts = utf8(value).split(b"|")
+    if len(parts) != 3:
+        return None
+    signature = _create_signature_v1(secret, name, parts[0], parts[1])
+    if not hmac.compare_digest(parts[2], signature):
+        gen_log.warning("Invalid cookie signature %r", value)
+        return None
+    timestamp = int(parts[1])
+    if timestamp < clock() - max_age_days * 86400:
+        gen_log.warning("Expired cookie %r", value)
+        return None
+    if timestamp > clock() + 31 * 86400:
+        # _cookie_signature does not hash a delimiter between the
+        # parts of the cookie, so an attacker could transfer trailing
+        # digits from the payload to the timestamp without altering the
+        # signature.  For backwards compatibility, sanity-check timestamp
+        # here instead of modifying _cookie_signature.
+        gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
+        return None
+    if parts[1].startswith(b"0"):
+        gen_log.warning("Tampered cookie %r", value)
+        return None
+    try:
+        return base64.b64decode(parts[0])
+    except Exception:
+        return None
+
+
+def _decode_fields_v2(value: bytes) -> Tuple[int, bytes, bytes, bytes, bytes]:
+    def _consume_field(s: bytes) -> Tuple[bytes, bytes]:
+        length, _, rest = s.partition(b":")
+        n = int(length)
+        field_value = rest[:n]
+        # In python 3, indexing bytes returns small integers; we must
+        # use a slice to get a byte string as in python 2.
+        if rest[n : n + 1] != b"|":
+            raise ValueError("malformed v2 signed value field")
+        rest = rest[n + 1 :]
+        return field_value, rest
+
+    rest = value[2:]  # remove version number
+    key_version, rest = _consume_field(rest)
+    timestamp, rest = _consume_field(rest)
+    name_field, rest = _consume_field(rest)
+    value_field, passed_sig = _consume_field(rest)
+    return int(key_version), timestamp, name_field, value_field, passed_sig
+
+
+def _decode_signed_value_v2(
+    secret: _CookieSecretTypes,
+    name: str,
+    value: bytes,
+    max_age_days: float,
+    clock: Callable[[], float],
+) -> Optional[bytes]:
+    try:
+        (
+            key_version,
+            timestamp_bytes,
+            name_field,
+            value_field,
+            passed_sig,
+        ) = _decode_fields_v2(value)
+    except ValueError:
+        return None
+    signed_string = value[: -len(passed_sig)]
+
+    if isinstance(secret, dict):
+        try:
+            secret = secret[key_version]
+        except KeyError:
+            return None
+
+    expected_sig = _create_signature_v2(secret, signed_string)
+    if not hmac.compare_digest(passed_sig, expected_sig):
+        return None
+    if name_field != utf8(name):
+        return None
+    timestamp = int(timestamp_bytes)
+    if timestamp < clock() - max_age_days * 86400:
+        # The signature has expired.
+        return None
+    try:
+        return base64.b64decode(value_field)
+    except Exception:
+        return None
+
+
+def get_signature_key_version(value: Union[str, bytes]) -> Optional[int]:
+    value = utf8(value)
+    version = _get_version(value)
+    if version < 2:
+        return None
+    try:
+        key_version, _, _, _, _ = _decode_fields_v2(value)
+    except ValueError:
+        return None
+
+    return key_version
+
+
+def _create_signature_v1(secret: Union[str, bytes], *parts: Union[str, bytes]) -> bytes:
+    hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
+    for part in parts:
+        hash.update(utf8(part))
+    return utf8(hash.hexdigest())
+
+
+def _create_signature_v2(secret: Union[str, bytes], s: bytes) -> bytes:
+    hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
+    hash.update(utf8(s))
+    return utf8(hash.hexdigest())
+
+
+def is_absolute(path: str) -> bool:
+    return any(path.startswith(x) for x in ["/", "http:", "https:"])
diff --git a/tornado/websocket.py b/tornado/websocket.py
index 8d336cb2..8f0e0aef 100644
--- a/tornado/websocket.py
+++ b/tornado/websocket.py
@@ -10,6 +10,7 @@ defined in `RFC 6455 <http://tools.ietf.org/html/rfc6455>`_.
 .. versionchanged:: 4.0
    Removed support for the draft 76 protocol version.
 """
+
 import abc
 import asyncio
 import base64
@@ -21,6 +22,7 @@ import tornado
 from urllib.parse import urlparse
 import warnings
 import zlib
+
 from tornado.concurrent import Future, future_set_result_unless_cancelled
 from tornado.escape import utf8, native_str, to_unicode
 from tornado import gen, httpclient, httputil
@@ -32,22 +34,67 @@ from tornado import simple_httpclient
 from tornado.queues import Queue
 from tornado.tcpclient import TCPClient
 from tornado.util import _websocket_mask
-from typing import TYPE_CHECKING, cast, Any, Optional, Dict, Union, List, Awaitable, Callable, Tuple, Type
+
+from typing import (
+    TYPE_CHECKING,
+    cast,
+    Any,
+    Optional,
+    Dict,
+    Union,
+    List,
+    Awaitable,
+    Callable,
+    Tuple,
+    Type,
+)
 from types import TracebackType
+
 if TYPE_CHECKING:
     from typing_extensions import Protocol

-
+    # The zlib compressor types aren't actually exposed anywhere
+    # publicly, so declare protocols for the portions we use.
     class _Compressor(Protocol):
-        pass
+        def compress(self, data: bytes) -> bytes:
+            pass

+        def flush(self, mode: int) -> bytes:
+            pass

     class _Decompressor(Protocol):
-        unconsumed_tail = b''
+        unconsumed_tail = b""  # type: bytes

+        def decompress(self, data: bytes, max_length: int) -> bytes:
+            pass

     class _WebSocketDelegate(Protocol):
-        pass
+        # The common base interface implemented by WebSocketHandler on
+        # the server side and WebSocketClientConnection on the client
+        # side.
+        def on_ws_connection_close(
+            self, close_code: Optional[int] = None, close_reason: Optional[str] = None
+        ) -> None:
+            pass
+
+        def on_message(self, message: Union[str, bytes]) -> Optional["Awaitable[None]"]:
+            pass
+
+        def on_ping(self, data: bytes) -> None:
+            pass
+
+        def on_pong(self, data: bytes) -> None:
+            pass
+
+        def log_exception(
+            self,
+            typ: Optional[Type[BaseException]],
+            value: Optional[BaseException],
+            tb: Optional[TracebackType],
+        ) -> None:
+            pass
+
+
 _default_max_message_size = 10 * 1024 * 1024


@@ -60,6 +107,7 @@ class WebSocketClosedError(WebSocketError):

     .. versionadded:: 3.2
     """
+
     pass


@@ -68,11 +116,13 @@ class _DecompressTooLargeError(Exception):


 class _WebSocketParams(object):
-
-    def __init__(self, ping_interval: Optional[float]=None, ping_timeout:
-        Optional[float]=None, max_message_size: int=
-        _default_max_message_size, compression_options: Optional[Dict[str,
-        Any]]=None) ->None:
+    def __init__(
+        self,
+        ping_interval: Optional[float] = None,
+        ping_timeout: Optional[float] = None,
+        max_message_size: int = _default_max_message_size,
+        compression_options: Optional[Dict[str, Any]] = None,
+    ) -> None:
         self.ping_interval = ping_interval
         self.ping_timeout = ping_timeout
         self.max_message_size = max_message_size
@@ -161,32 +211,88 @@ class WebSocketHandler(tornado.web.RequestHandler):
        ``websocket_max_message_size``.
     """

-    def __init__(self, application: tornado.web.Application, request:
-        httputil.HTTPServerRequest, **kwargs: Any) ->None:
+    def __init__(
+        self,
+        application: tornado.web.Application,
+        request: httputil.HTTPServerRequest,
+        **kwargs: Any
+    ) -> None:
         super().__init__(application, request, **kwargs)
-        self.ws_connection = None
-        self.close_code = None
-        self.close_reason = None
+        self.ws_connection = None  # type: Optional[WebSocketProtocol]
+        self.close_code = None  # type: Optional[int]
+        self.close_reason = None  # type: Optional[str]
         self._on_close_called = False

+    async def get(self, *args: Any, **kwargs: Any) -> None:
+        self.open_args = args
+        self.open_kwargs = kwargs
+
+        # Upgrade header should be present and should be equal to WebSocket
+        if self.request.headers.get("Upgrade", "").lower() != "websocket":
+            self.set_status(400)
+            log_msg = 'Can "Upgrade" only to "WebSocket".'
+            self.finish(log_msg)
+            gen_log.debug(log_msg)
+            return
+
+        # Connection header should be upgrade.
+        # Some proxy servers/load balancers
+        # might mess with it.
+        headers = self.request.headers
+        connection = map(
+            lambda s: s.strip().lower(), headers.get("Connection", "").split(",")
+        )
+        if "upgrade" not in connection:
+            self.set_status(400)
+            log_msg = '"Connection" must be "Upgrade".'
+            self.finish(log_msg)
+            gen_log.debug(log_msg)
+            return
+
+        # Handle WebSocket Origin naming convention differences
+        # The difference between version 8 and 13 is that in 8 the
+        # client sends a "Sec-Websocket-Origin" header and in 13 it's
+        # simply "Origin".
+        if "Origin" in self.request.headers:
+            origin = self.request.headers.get("Origin")
+        else:
+            origin = self.request.headers.get("Sec-Websocket-Origin", None)
+
+        # If there was an origin header, check to make sure it matches
+        # according to check_origin. When the origin is None, we assume it
+        # did not come from a browser and that it can be passed on.
+        if origin is not None and not self.check_origin(origin):
+            self.set_status(403)
+            log_msg = "Cross origin websockets not allowed"
+            self.finish(log_msg)
+            gen_log.debug(log_msg)
+            return
+
+        self.ws_connection = self.get_websocket_protocol()
+        if self.ws_connection:
+            await self.ws_connection.accept_connection(self)
+        else:
+            self.set_status(426, "Upgrade Required")
+            self.set_header("Sec-WebSocket-Version", "7, 8, 13")
+
     @property
-    def ping_interval(self) ->Optional[float]:
+    def ping_interval(self) -> Optional[float]:
         """The interval for websocket keep-alive pings.

         Set websocket_ping_interval = 0 to disable pings.
         """
-        pass
+        return self.settings.get("websocket_ping_interval", None)

     @property
-    def ping_timeout(self) ->Optional[float]:
+    def ping_timeout(self) -> Optional[float]:
         """If no ping is received in this many seconds,
         close the websocket connection (VPNs, etc. can fail to cleanly close ws connections).
         Default is max of 3 pings or 30 seconds.
         """
-        pass
+        return self.settings.get("websocket_ping_timeout", None)

     @property
-    def max_message_size(self) ->int:
+    def max_message_size(self) -> int:
         """Maximum allowed message size.

         If the remote peer sends a message larger than this, the connection
@@ -194,10 +300,13 @@ class WebSocketHandler(tornado.web.RequestHandler):

         Default is 10MiB.
         """
-        pass
+        return self.settings.get(
+            "websocket_max_message_size", _default_max_message_size
+        )

-    def write_message(self, message: Union[bytes, str, Dict[str, Any]],
-        binary: bool=False) ->'Future[None]':
+    def write_message(
+        self, message: Union[bytes, str, Dict[str, Any]], binary: bool = False
+    ) -> "Future[None]":
         """Sends the given message to the client of this Web Socket.

         The message may be either a string or a dict (which will be
@@ -219,9 +328,13 @@ class WebSocketHandler(tornado.web.RequestHandler):
            Consistently raises `WebSocketClosedError`. Previously could
            sometimes raise `.StreamClosedError`.
         """
-        pass
+        if self.ws_connection is None or self.ws_connection.is_closing():
+            raise WebSocketClosedError()
+        if isinstance(message, dict):
+            message = tornado.escape.json_encode(message)
+        return self.ws_connection.write_message(message, binary=binary)

-    def select_subprotocol(self, subprotocols: List[str]) ->Optional[str]:
+    def select_subprotocol(self, subprotocols: List[str]) -> Optional[str]:
         """Override to implement subprotocol negotiation.

         ``subprotocols`` is a list of strings identifying the
@@ -244,17 +357,18 @@ class WebSocketHandler(tornado.web.RequestHandler):
            an empty string instead of an empty list if no subprotocols
            were proposed by the client.
         """
-        pass
+        return None

     @property
-    def selected_subprotocol(self) ->Optional[str]:
+    def selected_subprotocol(self) -> Optional[str]:
         """The subprotocol returned by `select_subprotocol`.

         .. versionadded:: 5.1
         """
-        pass
+        assert self.ws_connection is not None
+        return self.ws_connection.selected_subprotocol

-    def get_compression_options(self) ->Optional[Dict[str, Any]]:
+    def get_compression_options(self) -> Optional[Dict[str, Any]]:
         """Override to return compression options for the connection.

         If this method returns None (the default), compression will
@@ -275,9 +389,10 @@ class WebSocketHandler(tornado.web.RequestHandler):

            Added ``compression_level`` and ``mem_level``.
         """
-        pass
+        # TODO: Add wbits option.
+        return None

-    def open(self, *args: str, **kwargs: str) ->Optional[Awaitable[None]]:
+    def open(self, *args: str, **kwargs: str) -> Optional[Awaitable[None]]:
         """Invoked when a new WebSocket is opened.

         The arguments to `open` are extracted from the `tornado.web.URLSpec`
@@ -293,8 +408,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
         """
         pass

-    def on_message(self, message: Union[str, bytes]) ->Optional[Awaitable[None]
-        ]:
+    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
         """Handle incoming messages on the WebSocket

         This method must be overridden.
@@ -303,9 +417,9 @@ class WebSocketHandler(tornado.web.RequestHandler):

            ``on_message`` can be a coroutine.
         """
-        pass
+        raise NotImplementedError

-    def ping(self, data: Union[str, bytes]=b'') ->None:
+    def ping(self, data: Union[str, bytes] = b"") -> None:
         """Send ping frame to the remote end.

         The data argument allows a small amount of data (up to 125
@@ -321,17 +435,20 @@ class WebSocketHandler(tornado.web.RequestHandler):
            The data argument is now optional.

         """
-        pass
+        data = utf8(data)
+        if self.ws_connection is None or self.ws_connection.is_closing():
+            raise WebSocketClosedError()
+        self.ws_connection.write_ping(data)

-    def on_pong(self, data: bytes) ->None:
+    def on_pong(self, data: bytes) -> None:
         """Invoked when the response to a ping frame is received."""
         pass

-    def on_ping(self, data: bytes) ->None:
+    def on_ping(self, data: bytes) -> None:
         """Invoked when the a ping frame is received."""
         pass

-    def on_close(self) ->None:
+    def on_close(self) -> None:
         """Invoked when the WebSocket is closed.

         If the connection was closed cleanly and a status code or reason
@@ -344,8 +461,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
         """
         pass

-    def close(self, code: Optional[int]=None, reason: Optional[str]=None
-        ) ->None:
+    def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
         """Closes this Web Socket.

         Once the close handshake is successful the socket will be closed.
@@ -361,9 +477,11 @@ class WebSocketHandler(tornado.web.RequestHandler):

            Added the ``code`` and ``reason`` arguments.
         """
-        pass
+        if self.ws_connection:
+            self.ws_connection.close(code, reason)
+            self.ws_connection = None

-    def check_origin(self, origin: str) ->bool:
+    def check_origin(self, origin: str) -> bool:
         """Override to enable support for allowing alternate origins.

         The ``origin`` argument is the value of the ``Origin`` HTTP
@@ -410,9 +528,16 @@ class WebSocketHandler(tornado.web.RequestHandler):
         .. versionadded:: 4.0

         """
-        pass
+        parsed_origin = urlparse(origin)
+        origin = parsed_origin.netloc
+        origin = origin.lower()
+
+        host = self.request.headers.get("Host")
+
+        # Check to see that origin matches host directly, including ports
+        return origin == host

-    def set_nodelay(self, value: bool) ->None:
+    def set_nodelay(self, value: bool) -> None:
         """Set the no-delay flag for this stream.

         By default, small messages may be delayed and/or combined to minimize
@@ -426,76 +551,241 @@ class WebSocketHandler(tornado.web.RequestHandler):

         .. versionadded:: 3.1
         """
-        pass
+        assert self.ws_connection is not None
+        self.ws_connection.set_nodelay(value)
+
+    def on_connection_close(self) -> None:
+        if self.ws_connection:
+            self.ws_connection.on_connection_close()
+            self.ws_connection = None
+        if not self._on_close_called:
+            self._on_close_called = True
+            self.on_close()
+            self._break_cycles()
+
+    def on_ws_connection_close(
+        self, close_code: Optional[int] = None, close_reason: Optional[str] = None
+    ) -> None:
+        self.close_code = close_code
+        self.close_reason = close_reason
+        self.on_connection_close()
+
+    def _break_cycles(self) -> None:
+        # WebSocketHandlers call finish() early, but we don't want to
+        # break up reference cycles (which makes it impossible to call
+        # self.render_string) until after we've really closed the
+        # connection (if it was established in the first place,
+        # indicated by status code 101).
+        if self.get_status() != 101 or self._on_close_called:
+            super()._break_cycles()
+
+    def get_websocket_protocol(self) -> Optional["WebSocketProtocol"]:
+        websocket_version = self.request.headers.get("Sec-WebSocket-Version")
+        if websocket_version in ("7", "8", "13"):
+            params = _WebSocketParams(
+                ping_interval=self.ping_interval,
+                ping_timeout=self.ping_timeout,
+                max_message_size=self.max_message_size,
+                compression_options=self.get_compression_options(),
+            )
+            return WebSocketProtocol13(self, False, params)
+        return None
+
+    def _detach_stream(self) -> IOStream:
+        # disable non-WS methods
+        for method in [
+            "write",
+            "redirect",
+            "set_header",
+            "set_cookie",
+            "set_status",
+            "flush",
+            "finish",
+        ]:
+            setattr(self, method, _raise_not_supported_for_websockets)
+        return self.detach()
+
+
+def _raise_not_supported_for_websockets(*args: Any, **kwargs: Any) -> None:
+    raise RuntimeError("Method not supported for Web Sockets")


 class WebSocketProtocol(abc.ABC):
     """Base class for WebSocket protocol versions."""

-    def __init__(self, handler: '_WebSocketDelegate') ->None:
+    def __init__(self, handler: "_WebSocketDelegate") -> None:
         self.handler = handler
-        self.stream = None
+        self.stream = None  # type: Optional[IOStream]
         self.client_terminated = False
         self.server_terminated = False

-    def _run_callback(self, callback: Callable, *args: Any, **kwargs: Any
-        ) ->'Optional[Future[Any]]':
+    def _run_callback(
+        self, callback: Callable, *args: Any, **kwargs: Any
+    ) -> "Optional[Future[Any]]":
         """Runs the given callback with exception handling.

         If the callback is a coroutine, returns its Future. On error, aborts the
         websocket connection and returns None.
         """
-        pass
+        try:
+            result = callback(*args, **kwargs)
+        except Exception:
+            self.handler.log_exception(*sys.exc_info())
+            self._abort()
+            return None
+        else:
+            if result is not None:
+                result = gen.convert_yielded(result)
+                assert self.stream is not None
+                self.stream.io_loop.add_future(result, lambda f: f.result())
+            return result
+
+    def on_connection_close(self) -> None:
+        self._abort()

-    def _abort(self) ->None:
+    def _abort(self) -> None:
         """Instantly aborts the WebSocket connection by closing the socket"""
-        pass
+        self.client_terminated = True
+        self.server_terminated = True
+        if self.stream is not None:
+            self.stream.close()  # forcibly tear down the connection
+        self.close()  # let the subclass cleanup
+
+    @abc.abstractmethod
+    def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def is_closing(self) -> bool:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    async def accept_connection(self, handler: WebSocketHandler) -> None:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def write_message(
+        self, message: Union[str, bytes, Dict[str, Any]], binary: bool = False
+    ) -> "Future[None]":
+        raise NotImplementedError()

+    @property
+    @abc.abstractmethod
+    def selected_subprotocol(self) -> Optional[str]:
+        raise NotImplementedError()

-class _PerMessageDeflateCompressor(object):
+    @abc.abstractmethod
+    def write_ping(self, data: bytes) -> None:
+        raise NotImplementedError()

-    def __init__(self, persistent: bool, max_wbits: Optional[int],
-        compression_options: Optional[Dict[str, Any]]=None) ->None:
+    # The entry points below are used by WebSocketClientConnection,
+    # which was introduced after we only supported a single version of
+    # WebSocketProtocol. The WebSocketProtocol/WebSocketProtocol13
+    # boundary is currently pretty ad-hoc.
+    @abc.abstractmethod
+    def _process_server_headers(
+        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
+    ) -> None:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def start_pinging(self) -> None:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    async def _receive_frame_loop(self) -> None:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def set_nodelay(self, x: bool) -> None:
+        raise NotImplementedError()
+
+
+class _PerMessageDeflateCompressor(object):
+    def __init__(
+        self,
+        persistent: bool,
+        max_wbits: Optional[int],
+        compression_options: Optional[Dict[str, Any]] = None,
+    ) -> None:
         if max_wbits is None:
             max_wbits = zlib.MAX_WBITS
-        if not 8 <= max_wbits <= zlib.MAX_WBITS:
-            raise ValueError('Invalid max_wbits value %r; allowed range 8-%d',
-                max_wbits, zlib.MAX_WBITS)
+        # There is no symbolic constant for the minimum wbits value.
+        if not (8 <= max_wbits <= zlib.MAX_WBITS):
+            raise ValueError(
+                "Invalid max_wbits value %r; allowed range 8-%d",
+                max_wbits,
+                zlib.MAX_WBITS,
+            )
         self._max_wbits = max_wbits
-        if (compression_options is None or 'compression_level' not in
-            compression_options):
-            self._compression_level = (tornado.web.GZipContentEncoding.
-                GZIP_LEVEL)
+
+        if (
+            compression_options is None
+            or "compression_level" not in compression_options
+        ):
+            self._compression_level = tornado.web.GZipContentEncoding.GZIP_LEVEL
         else:
-            self._compression_level = compression_options['compression_level']
-        if (compression_options is None or 'mem_level' not in
-            compression_options):
+            self._compression_level = compression_options["compression_level"]
+
+        if compression_options is None or "mem_level" not in compression_options:
             self._mem_level = 8
         else:
-            self._mem_level = compression_options['mem_level']
+            self._mem_level = compression_options["mem_level"]
+
         if persistent:
-            self._compressor = self._create_compressor()
+            self._compressor = self._create_compressor()  # type: Optional[_Compressor]
         else:
             self._compressor = None

+    def _create_compressor(self) -> "_Compressor":
+        return zlib.compressobj(
+            self._compression_level, zlib.DEFLATED, -self._max_wbits, self._mem_level
+        )
+
+    def compress(self, data: bytes) -> bytes:
+        compressor = self._compressor or self._create_compressor()
+        data = compressor.compress(data) + compressor.flush(zlib.Z_SYNC_FLUSH)
+        assert data.endswith(b"\x00\x00\xff\xff")
+        return data[:-4]

-class _PerMessageDeflateDecompressor(object):

-    def __init__(self, persistent: bool, max_wbits: Optional[int],
-        max_message_size: int, compression_options: Optional[Dict[str, Any]
-        ]=None) ->None:
+class _PerMessageDeflateDecompressor(object):
+    def __init__(
+        self,
+        persistent: bool,
+        max_wbits: Optional[int],
+        max_message_size: int,
+        compression_options: Optional[Dict[str, Any]] = None,
+    ) -> None:
         self._max_message_size = max_message_size
         if max_wbits is None:
             max_wbits = zlib.MAX_WBITS
-        if not 8 <= max_wbits <= zlib.MAX_WBITS:
-            raise ValueError('Invalid max_wbits value %r; allowed range 8-%d',
-                max_wbits, zlib.MAX_WBITS)
+        if not (8 <= max_wbits <= zlib.MAX_WBITS):
+            raise ValueError(
+                "Invalid max_wbits value %r; allowed range 8-%d",
+                max_wbits,
+                zlib.MAX_WBITS,
+            )
         self._max_wbits = max_wbits
         if persistent:
-            self._decompressor = self._create_decompressor()
+            self._decompressor = (
+                self._create_decompressor()
+            )  # type: Optional[_Decompressor]
         else:
             self._decompressor = None

+    def _create_decompressor(self) -> "_Decompressor":
+        return zlib.decompressobj(-self._max_wbits)
+
+    def decompress(self, data: bytes) -> bytes:
+        decompressor = self._decompressor or self._create_decompressor()
+        result = decompressor.decompress(
+            data + b"\x00\x00\xff\xff", self._max_message_size
+        )
+        if decompressor.unconsumed_tail:
+            raise _DecompressTooLargeError()
+        return result
+

 class WebSocketProtocol13(WebSocketProtocol):
     """Implementation of the WebSocket protocol from RFC 6455.
@@ -503,110 +793,551 @@ class WebSocketProtocol13(WebSocketProtocol):
     This class supports versions 7 and 8 of the protocol in addition to the
     final version 13.
     """
-    FIN = 128
-    RSV1 = 64
-    RSV2 = 32
-    RSV3 = 16
+
+    # Bit masks for the first byte of a frame.
+    FIN = 0x80
+    RSV1 = 0x40
+    RSV2 = 0x20
+    RSV3 = 0x10
     RSV_MASK = RSV1 | RSV2 | RSV3
-    OPCODE_MASK = 15
-    stream = None
+    OPCODE_MASK = 0x0F
+
+    stream = None  # type: IOStream

-    def __init__(self, handler: '_WebSocketDelegate', mask_outgoing: bool,
-        params: _WebSocketParams) ->None:
+    def __init__(
+        self,
+        handler: "_WebSocketDelegate",
+        mask_outgoing: bool,
+        params: _WebSocketParams,
+    ) -> None:
         WebSocketProtocol.__init__(self, handler)
         self.mask_outgoing = mask_outgoing
         self.params = params
         self._final_frame = False
         self._frame_opcode = None
         self._masked_frame = None
-        self._frame_mask = None
+        self._frame_mask = None  # type: Optional[bytes]
         self._frame_length = None
-        self._fragmented_message_buffer = None
+        self._fragmented_message_buffer = None  # type: Optional[bytearray]
         self._fragmented_message_opcode = None
-        self._waiting = None
+        self._waiting = None  # type: object
         self._compression_options = params.compression_options
-        self._decompressor = None
-        self._compressor = None
-        self._frame_compressed = None
+        self._decompressor = None  # type: Optional[_PerMessageDeflateDecompressor]
+        self._compressor = None  # type: Optional[_PerMessageDeflateCompressor]
+        self._frame_compressed = None  # type: Optional[bool]
+        # The total uncompressed size of all messages received or sent.
+        # Unicode messages are encoded to utf8.
+        # Only for testing; subject to change.
         self._message_bytes_in = 0
         self._message_bytes_out = 0
+        # The total size of all packets received or sent.  Includes
+        # the effect of compression, frame overhead, and control frames.
         self._wire_bytes_in = 0
         self._wire_bytes_out = 0
-        self.ping_callback = None
+        self.ping_callback = None  # type: Optional[PeriodicCallback]
         self.last_ping = 0.0
         self.last_pong = 0.0
-        self.close_code = None
-        self.close_reason = None
+        self.close_code = None  # type: Optional[int]
+        self.close_reason = None  # type: Optional[str]

-    def _handle_websocket_headers(self, handler: WebSocketHandler) ->None:
+    # Use a property for this to satisfy the abc.
+    @property
+    def selected_subprotocol(self) -> Optional[str]:
+        return self._selected_subprotocol
+
+    @selected_subprotocol.setter
+    def selected_subprotocol(self, value: Optional[str]) -> None:
+        self._selected_subprotocol = value
+
+    async def accept_connection(self, handler: WebSocketHandler) -> None:
+        try:
+            self._handle_websocket_headers(handler)
+        except ValueError:
+            handler.set_status(400)
+            log_msg = "Missing/Invalid WebSocket headers"
+            handler.finish(log_msg)
+            gen_log.debug(log_msg)
+            return
+
+        try:
+            await self._accept_connection(handler)
+        except asyncio.CancelledError:
+            self._abort()
+            return
+        except ValueError:
+            gen_log.debug("Malformed WebSocket request received", exc_info=True)
+            self._abort()
+            return
+
+    def _handle_websocket_headers(self, handler: WebSocketHandler) -> None:
         """Verifies all invariant- and required headers

         If a header is missing or have an incorrect value ValueError will be
         raised
         """
-        pass
+        fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
+        if not all(map(lambda f: handler.request.headers.get(f), fields)):
+            raise ValueError("Missing/Invalid WebSocket headers")

     @staticmethod
-    def compute_accept_value(key: Union[str, bytes]) ->str:
+    def compute_accept_value(key: Union[str, bytes]) -> str:
         """Computes the value for the Sec-WebSocket-Accept header,
         given the value for Sec-WebSocket-Key.
         """
-        pass
-
-    def _process_server_headers(self, key: Union[str, bytes], headers:
-        httputil.HTTPHeaders) ->None:
+        sha1 = hashlib.sha1()
+        sha1.update(utf8(key))
+        sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
+        return native_str(base64.b64encode(sha1.digest()))
+
+    def _challenge_response(self, handler: WebSocketHandler) -> str:
+        return WebSocketProtocol13.compute_accept_value(
+            cast(str, handler.request.headers.get("Sec-Websocket-Key"))
+        )
+
+    async def _accept_connection(self, handler: WebSocketHandler) -> None:
+        subprotocol_header = handler.request.headers.get("Sec-WebSocket-Protocol")
+        if subprotocol_header:
+            subprotocols = [s.strip() for s in subprotocol_header.split(",")]
+        else:
+            subprotocols = []
+        self.selected_subprotocol = handler.select_subprotocol(subprotocols)
+        if self.selected_subprotocol:
+            assert self.selected_subprotocol in subprotocols
+            handler.set_header("Sec-WebSocket-Protocol", self.selected_subprotocol)
+
+        extensions = self._parse_extensions_header(handler.request.headers)
+        for ext in extensions:
+            if ext[0] == "permessage-deflate" and self._compression_options is not None:
+                # TODO: negotiate parameters if compression_options
+                # specifies limits.
+                self._create_compressors("server", ext[1], self._compression_options)
+                if (
+                    "client_max_window_bits" in ext[1]
+                    and ext[1]["client_max_window_bits"] is None
+                ):
+                    # Don't echo an offered client_max_window_bits
+                    # parameter with no value.
+                    del ext[1]["client_max_window_bits"]
+                handler.set_header(
+                    "Sec-WebSocket-Extensions",
+                    httputil._encode_header("permessage-deflate", ext[1]),
+                )
+                break
+
+        handler.clear_header("Content-Type")
+        handler.set_status(101)
+        handler.set_header("Upgrade", "websocket")
+        handler.set_header("Connection", "Upgrade")
+        handler.set_header("Sec-WebSocket-Accept", self._challenge_response(handler))
+        handler.finish()
+
+        self.stream = handler._detach_stream()
+
+        self.start_pinging()
+        try:
+            open_result = handler.open(*handler.open_args, **handler.open_kwargs)
+            if open_result is not None:
+                await open_result
+        except Exception:
+            handler.log_exception(*sys.exc_info())
+            self._abort()
+            return
+
+        await self._receive_frame_loop()
+
+    def _parse_extensions_header(
+        self, headers: httputil.HTTPHeaders
+    ) -> List[Tuple[str, Dict[str, str]]]:
+        extensions = headers.get("Sec-WebSocket-Extensions", "")
+        if extensions:
+            return [httputil._parse_header(e.strip()) for e in extensions.split(",")]
+        return []
+
+    def _process_server_headers(
+        self, key: Union[str, bytes], headers: httputil.HTTPHeaders
+    ) -> None:
         """Process the headers sent by the server to this client connection.

         'key' is the websocket handshake challenge/response key.
         """
-        pass
-
-    def _get_compressor_options(self, side: str, agreed_parameters: Dict[
-        str, Any], compression_options: Optional[Dict[str, Any]]=None) ->Dict[
-        str, Any]:
+        assert headers["Upgrade"].lower() == "websocket"
+        assert headers["Connection"].lower() == "upgrade"
+        accept = self.compute_accept_value(key)
+        assert headers["Sec-Websocket-Accept"] == accept
+
+        extensions = self._parse_extensions_header(headers)
+        for ext in extensions:
+            if ext[0] == "permessage-deflate" and self._compression_options is not None:
+                self._create_compressors("client", ext[1])
+            else:
+                raise ValueError("unsupported extension %r", ext)
+
+        self.selected_subprotocol = headers.get("Sec-WebSocket-Protocol", None)
+
+    def _get_compressor_options(
+        self,
+        side: str,
+        agreed_parameters: Dict[str, Any],
+        compression_options: Optional[Dict[str, Any]] = None,
+    ) -> Dict[str, Any]:
         """Converts a websocket agreed_parameters set to keyword arguments
         for our compressor objects.
         """
-        pass
-
-    def write_message(self, message: Union[str, bytes, Dict[str, Any]],
-        binary: bool=False) ->'Future[None]':
+        options = dict(
+            persistent=(side + "_no_context_takeover") not in agreed_parameters
+        )  # type: Dict[str, Any]
+        wbits_header = agreed_parameters.get(side + "_max_window_bits", None)
+        if wbits_header is None:
+            options["max_wbits"] = zlib.MAX_WBITS
+        else:
+            options["max_wbits"] = int(wbits_header)
+        options["compression_options"] = compression_options
+        return options
+
+    def _create_compressors(
+        self,
+        side: str,
+        agreed_parameters: Dict[str, Any],
+        compression_options: Optional[Dict[str, Any]] = None,
+    ) -> None:
+        # TODO: handle invalid parameters gracefully
+        allowed_keys = set(
+            [
+                "server_no_context_takeover",
+                "client_no_context_takeover",
+                "server_max_window_bits",
+                "client_max_window_bits",
+            ]
+        )
+        for key in agreed_parameters:
+            if key not in allowed_keys:
+                raise ValueError("unsupported compression parameter %r" % key)
+        other_side = "client" if (side == "server") else "server"
+        self._compressor = _PerMessageDeflateCompressor(
+            **self._get_compressor_options(side, agreed_parameters, compression_options)
+        )
+        self._decompressor = _PerMessageDeflateDecompressor(
+            max_message_size=self.params.max_message_size,
+            **self._get_compressor_options(
+                other_side, agreed_parameters, compression_options
+            )
+        )
+
+    def _write_frame(
+        self, fin: bool, opcode: int, data: bytes, flags: int = 0
+    ) -> "Future[None]":
+        data_len = len(data)
+        if opcode & 0x8:
+            # All control frames MUST have a payload length of 125
+            # bytes or less and MUST NOT be fragmented.
+            if not fin:
+                raise ValueError("control frames may not be fragmented")
+            if data_len > 125:
+                raise ValueError("control frame payloads may not exceed 125 bytes")
+        if fin:
+            finbit = self.FIN
+        else:
+            finbit = 0
+        frame = struct.pack("B", finbit | opcode | flags)
+        if self.mask_outgoing:
+            mask_bit = 0x80
+        else:
+            mask_bit = 0
+        if data_len < 126:
+            frame += struct.pack("B", data_len | mask_bit)
+        elif data_len <= 0xFFFF:
+            frame += struct.pack("!BH", 126 | mask_bit, data_len)
+        else:
+            frame += struct.pack("!BQ", 127 | mask_bit, data_len)
+        if self.mask_outgoing:
+            mask = os.urandom(4)
+            data = mask + _websocket_mask(mask, data)
+        frame += data
+        self._wire_bytes_out += len(frame)
+        return self.stream.write(frame)
+
+    def write_message(
+        self, message: Union[str, bytes, Dict[str, Any]], binary: bool = False
+    ) -> "Future[None]":
         """Sends the given message to the client of this Web Socket."""
-        pass
-
-    def write_ping(self, data: bytes) ->None:
+        if binary:
+            opcode = 0x2
+        else:
+            opcode = 0x1
+        if isinstance(message, dict):
+            message = tornado.escape.json_encode(message)
+        message = tornado.escape.utf8(message)
+        assert isinstance(message, bytes)
+        self._message_bytes_out += len(message)
+        flags = 0
+        if self._compressor:
+            message = self._compressor.compress(message)
+            flags |= self.RSV1
+        # For historical reasons, write methods in Tornado operate in a semi-synchronous
+        # mode in which awaiting the Future they return is optional (But errors can
+        # still be raised). This requires us to go through an awkward dance here
+        # to transform the errors that may be returned while presenting the same
+        # semi-synchronous interface.
+        try:
+            fut = self._write_frame(True, opcode, message, flags=flags)
+        except StreamClosedError:
+            raise WebSocketClosedError()
+
+        async def wrapper() -> None:
+            try:
+                await fut
+            except StreamClosedError:
+                raise WebSocketClosedError()
+
+        return asyncio.ensure_future(wrapper())
+
+    def write_ping(self, data: bytes) -> None:
         """Send ping frame."""
-        pass
-
-    def _handle_message(self, opcode: int, data: bytes
-        ) ->'Optional[Future[None]]':
+        assert isinstance(data, bytes)
+        self._write_frame(True, 0x9, data)
+
+    async def _receive_frame_loop(self) -> None:
+        try:
+            while not self.client_terminated:
+                await self._receive_frame()
+        except StreamClosedError:
+            self._abort()
+        self.handler.on_ws_connection_close(self.close_code, self.close_reason)
+
+    async def _read_bytes(self, n: int) -> bytes:
+        data = await self.stream.read_bytes(n)
+        self._wire_bytes_in += n
+        return data
+
+    async def _receive_frame(self) -> None:
+        # Read the frame header.
+        data = await self._read_bytes(2)
+        header, mask_payloadlen = struct.unpack("BB", data)
+        is_final_frame = header & self.FIN
+        reserved_bits = header & self.RSV_MASK
+        opcode = header & self.OPCODE_MASK
+        opcode_is_control = opcode & 0x8
+        if self._decompressor is not None and opcode != 0:
+            # Compression flag is present in the first frame's header,
+            # but we can't decompress until we have all the frames of
+            # the message.
+            self._frame_compressed = bool(reserved_bits & self.RSV1)
+            reserved_bits &= ~self.RSV1
+        if reserved_bits:
+            # client is using as-yet-undefined extensions; abort
+            self._abort()
+            return
+        is_masked = bool(mask_payloadlen & 0x80)
+        payloadlen = mask_payloadlen & 0x7F
+
+        # Parse and validate the length.
+        if opcode_is_control and payloadlen >= 126:
+            # control frames must have payload < 126
+            self._abort()
+            return
+        if payloadlen < 126:
+            self._frame_length = payloadlen
+        elif payloadlen == 126:
+            data = await self._read_bytes(2)
+            payloadlen = struct.unpack("!H", data)[0]
+        elif payloadlen == 127:
+            data = await self._read_bytes(8)
+            payloadlen = struct.unpack("!Q", data)[0]
+        new_len = payloadlen
+        if self._fragmented_message_buffer is not None:
+            new_len += len(self._fragmented_message_buffer)
+        if new_len > self.params.max_message_size:
+            self.close(1009, "message too big")
+            self._abort()
+            return
+
+        # Read the payload, unmasking if necessary.
+        if is_masked:
+            self._frame_mask = await self._read_bytes(4)
+        data = await self._read_bytes(payloadlen)
+        if is_masked:
+            assert self._frame_mask is not None
+            data = _websocket_mask(self._frame_mask, data)
+
+        # Decide what to do with this frame.
+        if opcode_is_control:
+            # control frames may be interleaved with a series of fragmented
+            # data frames, so control frames must not interact with
+            # self._fragmented_*
+            if not is_final_frame:
+                # control frames must not be fragmented
+                self._abort()
+                return
+        elif opcode == 0:  # continuation frame
+            if self._fragmented_message_buffer is None:
+                # nothing to continue
+                self._abort()
+                return
+            self._fragmented_message_buffer.extend(data)
+            if is_final_frame:
+                opcode = self._fragmented_message_opcode
+                data = bytes(self._fragmented_message_buffer)
+                self._fragmented_message_buffer = None
+        else:  # start of new data message
+            if self._fragmented_message_buffer is not None:
+                # can't start new message until the old one is finished
+                self._abort()
+                return
+            if not is_final_frame:
+                self._fragmented_message_opcode = opcode
+                self._fragmented_message_buffer = bytearray(data)
+
+        if is_final_frame:
+            handled_future = self._handle_message(opcode, data)
+            if handled_future is not None:
+                await handled_future
+
+    def _handle_message(self, opcode: int, data: bytes) -> "Optional[Future[None]]":
         """Execute on_message, returning its Future if it is a coroutine."""
-        pass
+        if self.client_terminated:
+            return None
+
+        if self._frame_compressed:
+            assert self._decompressor is not None
+            try:
+                data = self._decompressor.decompress(data)
+            except _DecompressTooLargeError:
+                self.close(1009, "message too big after decompression")
+                self._abort()
+                return None
+
+        if opcode == 0x1:
+            # UTF-8 data
+            self._message_bytes_in += len(data)
+            try:
+                decoded = data.decode("utf-8")
+            except UnicodeDecodeError:
+                self._abort()
+                return None
+            return self._run_callback(self.handler.on_message, decoded)
+        elif opcode == 0x2:
+            # Binary data
+            self._message_bytes_in += len(data)
+            return self._run_callback(self.handler.on_message, data)
+        elif opcode == 0x8:
+            # Close
+            self.client_terminated = True
+            if len(data) >= 2:
+                self.close_code = struct.unpack(">H", data[:2])[0]
+            if len(data) > 2:
+                self.close_reason = to_unicode(data[2:])
+            # Echo the received close code, if any (RFC 6455 section 5.5.1).
+            self.close(self.close_code)
+        elif opcode == 0x9:
+            # Ping
+            try:
+                self._write_frame(True, 0xA, data)
+            except StreamClosedError:
+                self._abort()
+            self._run_callback(self.handler.on_ping, data)
+        elif opcode == 0xA:
+            # Pong
+            self.last_pong = IOLoop.current().time()
+            return self._run_callback(self.handler.on_pong, data)
+        else:
+            self._abort()
+        return None

-    def close(self, code: Optional[int]=None, reason: Optional[str]=None
-        ) ->None:
+    def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
         """Closes the WebSocket connection."""
-        pass
-
-    def is_closing(self) ->bool:
+        if not self.server_terminated:
+            if not self.stream.closed():
+                if code is None and reason is not None:
+                    code = 1000  # "normal closure" status code
+                if code is None:
+                    close_data = b""
+                else:
+                    close_data = struct.pack(">H", code)
+                if reason is not None:
+                    close_data += utf8(reason)
+                try:
+                    self._write_frame(True, 0x8, close_data)
+                except StreamClosedError:
+                    self._abort()
+            self.server_terminated = True
+        if self.client_terminated:
+            if self._waiting is not None:
+                self.stream.io_loop.remove_timeout(self._waiting)
+                self._waiting = None
+            self.stream.close()
+        elif self._waiting is None:
+            # Give the client a few seconds to complete a clean shutdown,
+            # otherwise just close the connection.
+            self._waiting = self.stream.io_loop.add_timeout(
+                self.stream.io_loop.time() + 5, self._abort
+            )
+        if self.ping_callback:
+            self.ping_callback.stop()
+            self.ping_callback = None
+
+    def is_closing(self) -> bool:
         """Return ``True`` if this connection is closing.

         The connection is considered closing if either side has
         initiated its closing handshake or if the stream has been
         shut down uncleanly.
         """
-        pass
+        return self.stream.closed() or self.client_terminated or self.server_terminated

-    def start_pinging(self) ->None:
-        """Start sending periodic pings to keep the connection alive"""
-        pass
+    @property
+    def ping_interval(self) -> Optional[float]:
+        interval = self.params.ping_interval
+        if interval is not None:
+            return interval
+        return 0

-    def periodic_ping(self) ->None:
+    @property
+    def ping_timeout(self) -> Optional[float]:
+        timeout = self.params.ping_timeout
+        if timeout is not None:
+            return timeout
+        assert self.ping_interval is not None
+        return max(3 * self.ping_interval, 30)
+
+    def start_pinging(self) -> None:
+        """Start sending periodic pings to keep the connection alive"""
+        assert self.ping_interval is not None
+        if self.ping_interval > 0:
+            self.last_ping = self.last_pong = IOLoop.current().time()
+            self.ping_callback = PeriodicCallback(
+                self.periodic_ping, self.ping_interval * 1000
+            )
+            self.ping_callback.start()
+
+    def periodic_ping(self) -> None:
         """Send a ping to keep the websocket alive

         Called periodically if the websocket_ping_interval is set and non-zero.
         """
-        pass
+        if self.is_closing() and self.ping_callback is not None:
+            self.ping_callback.stop()
+            return
+
+        # Check for timeout on pong. Make sure that we really have
+        # sent a recent ping in case the machine with both server and
+        # client has been suspended since the last ping.
+        now = IOLoop.current().time()
+        since_last_pong = now - self.last_pong
+        since_last_ping = now - self.last_ping
+        assert self.ping_interval is not None
+        assert self.ping_timeout is not None
+        if (
+            since_last_ping < 2 * self.ping_interval
+            and since_last_pong > self.ping_timeout
+        ):
+            self.close()
+            return
+
+        self.write_ping(b"")
+        self.last_ping = now
+
+    def set_nodelay(self, x: bool) -> None:
+        self.stream.set_nodelay(x)


 class WebSocketClientConnection(simple_httpclient._HTTPConnection):
@@ -615,46 +1346,81 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
     This class should not be instantiated directly; use the
     `websocket_connect` function instead.
     """
-    protocol = None
-
-    def __init__(self, request: httpclient.HTTPRequest, on_message_callback:
-        Optional[Callable[[Union[None, str, bytes]], None]]=None,
-        compression_options: Optional[Dict[str, Any]]=None, ping_interval:
-        Optional[float]=None, ping_timeout: Optional[float]=None,
-        max_message_size: int=_default_max_message_size, subprotocols:
-        Optional[List[str]]=None, resolver: Optional[Resolver]=None) ->None:
-        self.connect_future = Future()
-        self.read_queue = Queue(1)
+
+    protocol = None  # type: WebSocketProtocol
+
+    def __init__(
+        self,
+        request: httpclient.HTTPRequest,
+        on_message_callback: Optional[Callable[[Union[None, str, bytes]], None]] = None,
+        compression_options: Optional[Dict[str, Any]] = None,
+        ping_interval: Optional[float] = None,
+        ping_timeout: Optional[float] = None,
+        max_message_size: int = _default_max_message_size,
+        subprotocols: Optional[List[str]] = None,
+        resolver: Optional[Resolver] = None,
+    ) -> None:
+        self.connect_future = Future()  # type: Future[WebSocketClientConnection]
+        self.read_queue = Queue(1)  # type: Queue[Union[None, str, bytes]]
         self.key = base64.b64encode(os.urandom(16))
         self._on_message_callback = on_message_callback
-        self.close_code = None
-        self.close_reason = None
-        self.params = _WebSocketParams(ping_interval=ping_interval,
-            ping_timeout=ping_timeout, max_message_size=max_message_size,
-            compression_options=compression_options)
-        scheme, sep, rest = request.url.partition(':')
-        scheme = {'ws': 'http', 'wss': 'https'}[scheme]
+        self.close_code = None  # type: Optional[int]
+        self.close_reason = None  # type: Optional[str]
+        self.params = _WebSocketParams(
+            ping_interval=ping_interval,
+            ping_timeout=ping_timeout,
+            max_message_size=max_message_size,
+            compression_options=compression_options,
+        )
+
+        scheme, sep, rest = request.url.partition(":")
+        scheme = {"ws": "http", "wss": "https"}[scheme]
         request.url = scheme + sep + rest
-        request.headers.update({'Upgrade': 'websocket', 'Connection':
-            'Upgrade', 'Sec-WebSocket-Key': self.key,
-            'Sec-WebSocket-Version': '13'})
+        request.headers.update(
+            {
+                "Upgrade": "websocket",
+                "Connection": "Upgrade",
+                "Sec-WebSocket-Key": self.key,
+                "Sec-WebSocket-Version": "13",
+            }
+        )
         if subprotocols is not None:
-            request.headers['Sec-WebSocket-Protocol'] = ','.join(subprotocols)
+            request.headers["Sec-WebSocket-Protocol"] = ",".join(subprotocols)
         if compression_options is not None:
-            request.headers['Sec-WebSocket-Extensions'
-                ] = 'permessage-deflate; client_max_window_bits'
+            # Always offer to let the server set our max_wbits (and even though
+            # we don't offer it, we will accept a client_no_context_takeover
+            # from the server).
+            # TODO: set server parameters for deflate extension
+            # if requested in self.compression_options.
+            request.headers["Sec-WebSocket-Extensions"] = (
+                "permessage-deflate; client_max_window_bits"
+            )
+
+        # Websocket connection is currently unable to follow redirects
         request.follow_redirects = False
-        self.tcp_client = TCPClient(resolver=resolver)
-        super().__init__(None, request, lambda : None, self.
-            _on_http_response, 104857600, self.tcp_client, 65536, 104857600)

-    def __del__(self) ->None:
+        self.tcp_client = TCPClient(resolver=resolver)
+        super().__init__(
+            None,
+            request,
+            lambda: None,
+            self._on_http_response,
+            104857600,
+            self.tcp_client,
+            65536,
+            104857600,
+        )
+
+    def __del__(self) -> None:
         if self.protocol is not None:
-            warnings.warn('Unclosed WebSocketClientConnection', ResourceWarning
-                )
-
-    def close(self, code: Optional[int]=None, reason: Optional[str]=None
-        ) ->None:
+            # Unclosed client connections can sometimes log "task was destroyed but
+            # was pending" warnings if shutdown strikes at the wrong time (such as
+            # while a ping is being processed due to ping_interval). Log our own
+            # warning to make it a little more deterministic (although it's still
+            # dependent on GC timing).
+            warnings.warn("Unclosed WebSocketClientConnection", ResourceWarning)
+
+    def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
         """Closes the websocket connection.

         ``code`` and ``reason`` are documented under
@@ -666,10 +1432,66 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):

            Added the ``code`` and ``reason`` arguments.
         """
-        pass
+        if self.protocol is not None:
+            self.protocol.close(code, reason)
+            self.protocol = None  # type: ignore
+
+    def on_connection_close(self) -> None:
+        if not self.connect_future.done():
+            self.connect_future.set_exception(StreamClosedError())
+        self._on_message(None)
+        self.tcp_client.close()
+        super().on_connection_close()
+
+    def on_ws_connection_close(
+        self, close_code: Optional[int] = None, close_reason: Optional[str] = None
+    ) -> None:
+        self.close_code = close_code
+        self.close_reason = close_reason
+        self.on_connection_close()
+
+    def _on_http_response(self, response: httpclient.HTTPResponse) -> None:
+        if not self.connect_future.done():
+            if response.error:
+                self.connect_future.set_exception(response.error)
+            else:
+                self.connect_future.set_exception(
+                    WebSocketError("Non-websocket response")
+                )

-    def write_message(self, message: Union[str, bytes, Dict[str, Any]],
-        binary: bool=False) ->'Future[None]':
+    async def headers_received(
+        self,
+        start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
+        headers: httputil.HTTPHeaders,
+    ) -> None:
+        assert isinstance(start_line, httputil.ResponseStartLine)
+        if start_line.code != 101:
+            await super().headers_received(start_line, headers)
+            return
+
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+            self._timeout = None
+
+        self.headers = headers
+        self.protocol = self.get_websocket_protocol()
+        self.protocol._process_server_headers(self.key, self.headers)
+        self.protocol.stream = self.connection.detach()
+
+        IOLoop.current().add_callback(self.protocol._receive_frame_loop)
+        self.protocol.start_pinging()
+
+        # Once we've taken over the connection, clear the final callback
+        # we set on the http request.  This deactivates the error handling
+        # in simple_httpclient that would otherwise interfere with our
+        # ability to see exceptions.
+        self.final_callback = None  # type: ignore
+
+        future_set_result_unless_cancelled(self.connect_future, self)
+
+    def write_message(
+        self, message: Union[str, bytes, Dict[str, Any]], binary: bool = False
+    ) -> "Future[None]":
         """Sends a message to the WebSocket server.

         If the stream is closed, raises `WebSocketClosedError`.
@@ -679,11 +1501,14 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
            Exception raised on a closed stream changed from `.StreamClosedError`
            to `WebSocketClosedError`.
         """
-        pass
-
-    def read_message(self, callback: Optional[Callable[[
-        'Future[Union[None, str, bytes]]'], None]]=None) ->Awaitable[Union[
-        None, str, bytes]]:
+        if self.protocol is None:
+            raise WebSocketClosedError("Client connection has been closed")
+        return self.protocol.write_message(message, binary=binary)
+
+    def read_message(
+        self,
+        callback: Optional[Callable[["Future[Union[None, str, bytes]]"], None]] = None,
+    ) -> Awaitable[Union[None, str, bytes]]:
         """Reads a message from the WebSocket server.

         If on_message_callback was specified at WebSocket
@@ -694,9 +1519,25 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
         is given it will be called with the future when it is
         ready.
         """
-        pass

-    def ping(self, data: bytes=b'') ->None:
+        awaitable = self.read_queue.get()
+        if callback is not None:
+            self.io_loop.add_future(asyncio.ensure_future(awaitable), callback)
+        return awaitable
+
+    def on_message(self, message: Union[str, bytes]) -> Optional[Awaitable[None]]:
+        return self._on_message(message)
+
+    def _on_message(
+        self, message: Union[None, str, bytes]
+    ) -> Optional[Awaitable[None]]:
+        if self._on_message_callback:
+            self._on_message_callback(message)
+            return None
+        else:
+            return self.read_queue.put(message)
+
+    def ping(self, data: bytes = b"") -> None:
         """Send ping frame to the remote end.

         The data argument allows a small amount of data (up to 125
@@ -710,26 +1551,51 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
         .. versionadded:: 5.1

         """
+        data = utf8(data)
+        if self.protocol is None:
+            raise WebSocketClosedError()
+        self.protocol.write_ping(data)
+
+    def on_pong(self, data: bytes) -> None:
         pass

+    def on_ping(self, data: bytes) -> None:
+        pass
+
+    def get_websocket_protocol(self) -> WebSocketProtocol:
+        return WebSocketProtocol13(self, mask_outgoing=True, params=self.params)
+
     @property
-    def selected_subprotocol(self) ->Optional[str]:
+    def selected_subprotocol(self) -> Optional[str]:
         """The subprotocol selected by the server.

         .. versionadded:: 5.1
         """
-        pass
-
-
-def websocket_connect(url: Union[str, httpclient.HTTPRequest], callback:
-    Optional[Callable[['Future[WebSocketClientConnection]'], None]]=None,
-    connect_timeout: Optional[float]=None, on_message_callback: Optional[
-    Callable[[Union[None, str, bytes]], None]]=None, compression_options:
-    Optional[Dict[str, Any]]=None, ping_interval: Optional[float]=None,
-    ping_timeout: Optional[float]=None, max_message_size: int=
-    _default_max_message_size, subprotocols: Optional[List[str]]=None,
-    resolver: Optional[Resolver]=None
-    ) ->'Awaitable[WebSocketClientConnection]':
+        return self.protocol.selected_subprotocol
+
+    def log_exception(
+        self,
+        typ: "Optional[Type[BaseException]]",
+        value: Optional[BaseException],
+        tb: Optional[TracebackType],
+    ) -> None:
+        assert typ is not None
+        assert value is not None
+        app_log.error("Uncaught exception %s", value, exc_info=(typ, value, tb))
+
+
+def websocket_connect(
+    url: Union[str, httpclient.HTTPRequest],
+    callback: Optional[Callable[["Future[WebSocketClientConnection]"], None]] = None,
+    connect_timeout: Optional[float] = None,
+    on_message_callback: Optional[Callable[[Union[None, str, bytes]], None]] = None,
+    compression_options: Optional[Dict[str, Any]] = None,
+    ping_interval: Optional[float] = None,
+    ping_timeout: Optional[float] = None,
+    max_message_size: int = _default_max_message_size,
+    subprotocols: Optional[List[str]] = None,
+    resolver: Optional[Resolver] = None,
+) -> "Awaitable[WebSocketClientConnection]":
     """Client-side websocket support.

     Takes a url and returns a Future whose result is a
@@ -776,4 +1642,28 @@ def websocket_connect(url: Union[str, httpclient.HTTPRequest], callback:
     .. versionchanged:: 6.3
        Added the ``resolver`` argument.
     """
-    pass
+    if isinstance(url, httpclient.HTTPRequest):
+        assert connect_timeout is None
+        request = url
+        # Copy and convert the headers dict/object (see comments in
+        # AsyncHTTPClient.fetch)
+        request.headers = httputil.HTTPHeaders(request.headers)
+    else:
+        request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
+    request = cast(
+        httpclient.HTTPRequest,
+        httpclient._RequestProxy(request, httpclient.HTTPRequest._DEFAULTS),
+    )
+    conn = WebSocketClientConnection(
+        request,
+        on_message_callback=on_message_callback,
+        compression_options=compression_options,
+        ping_interval=ping_interval,
+        ping_timeout=ping_timeout,
+        max_message_size=max_message_size,
+        subprotocols=subprotocols,
+        resolver=resolver,
+    )
+    if callback is not None:
+        IOLoop.current().add_future(conn.connect_future, callback)
+    return conn.connect_future
diff --git a/tornado/wsgi.py b/tornado/wsgi.py
index 227b0d77..32641be3 100644
--- a/tornado/wsgi.py
+++ b/tornado/wsgi.py
@@ -1,3 +1,18 @@
+#
+# Copyright 2009 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
 """WSGI support for the Tornado web framework.

 WSGI is the Python standard for web servers, and allows for interoperability
@@ -11,25 +26,38 @@ the Tornado `.HTTPServer` and cannot be used in a generic WSGI
 container.

 """
+
 import concurrent.futures
 from io import BytesIO
 import tornado
 import sys
+
 from tornado.concurrent import dummy_executor
 from tornado import escape
 from tornado import httputil
 from tornado.ioloop import IOLoop
 from tornado.log import access_log
+
 from typing import List, Tuple, Optional, Callable, Any, Dict, Text
 from types import TracebackType
 import typing
+
 if typing.TYPE_CHECKING:
-    from typing import Type
-    from _typeshed.wsgi import WSGIApplication as WSGIAppType
+    from typing import Type  # noqa: F401
+    from _typeshed.wsgi import WSGIApplication as WSGIAppType  # noqa: F401
+
+
+# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
+# that are smuggled inside objects of type unicode (via the latin1 encoding).
+# This function is like those in the tornado.escape module, but defined
+# here to minimize the temptation to use it in non-wsgi contexts.
+def to_wsgi_str(s: bytes) -> str:
+    assert isinstance(s, bytes)
+    return s.decode("latin1")


 class WSGIContainer(object):
-    """Makes a WSGI-compatible application runnable on Tornado's HTTP server.
+    r"""Makes a WSGI-compatible application runnable on Tornado's HTTP server.

     .. warning::

@@ -56,7 +84,7 @@ class WSGIContainer(object):
             status = "200 OK"
             response_headers = [("Content-type", "text/plain")]
             start_response(status, response_headers)
-            return [b"Hello world!\\n"]
+            return [b"Hello world!\n"]

         async def main():
             container = tornado.wsgi.WSGIContainer(simple_app)
@@ -94,21 +122,147 @@ class WSGIContainer(object):
        is deprecated and will change in Tornado 7.0 to use a thread pool by default.
     """

-    def __init__(self, wsgi_application: 'WSGIAppType', executor: Optional[
-        concurrent.futures.Executor]=None) ->None:
+    def __init__(
+        self,
+        wsgi_application: "WSGIAppType",
+        executor: Optional[concurrent.futures.Executor] = None,
+    ) -> None:
         self.wsgi_application = wsgi_application
         self.executor = dummy_executor if executor is None else executor

-    def __call__(self, request: httputil.HTTPServerRequest) ->None:
+    def __call__(self, request: httputil.HTTPServerRequest) -> None:
         IOLoop.current().spawn_callback(self.handle_request, request)

-    def environ(self, request: httputil.HTTPServerRequest) ->Dict[Text, Any]:
+    async def handle_request(self, request: httputil.HTTPServerRequest) -> None:
+        data = {}  # type: Dict[str, Any]
+        response = []  # type: List[bytes]
+
+        def start_response(
+            status: str,
+            headers: List[Tuple[str, str]],
+            exc_info: Optional[
+                Tuple[
+                    "Optional[Type[BaseException]]",
+                    Optional[BaseException],
+                    Optional[TracebackType],
+                ]
+            ] = None,
+        ) -> Callable[[bytes], Any]:
+            data["status"] = status
+            data["headers"] = headers
+            return response.append
+
+        loop = IOLoop.current()
+        app_response = await loop.run_in_executor(
+            self.executor,
+            self.wsgi_application,
+            self.environ(request),
+            start_response,
+        )
+        try:
+            app_response_iter = iter(app_response)
+
+            def next_chunk() -> Optional[bytes]:
+                try:
+                    return next(app_response_iter)
+                except StopIteration:
+                    # StopIteration is special and is not allowed to pass through
+                    # coroutines normally.
+                    return None
+
+            while True:
+                chunk = await loop.run_in_executor(self.executor, next_chunk)
+                if chunk is None:
+                    break
+                response.append(chunk)
+        finally:
+            if hasattr(app_response, "close"):
+                app_response.close()  # type: ignore
+        body = b"".join(response)
+        if not data:
+            raise Exception("WSGI app did not call start_response")
+
+        status_code_str, reason = data["status"].split(" ", 1)
+        status_code = int(status_code_str)
+        headers = data["headers"]  # type: List[Tuple[str, str]]
+        header_set = set(k.lower() for (k, v) in headers)
+        body = escape.utf8(body)
+        if status_code != 304:
+            if "content-length" not in header_set:
+                headers.append(("Content-Length", str(len(body))))
+            if "content-type" not in header_set:
+                headers.append(("Content-Type", "text/html; charset=UTF-8"))
+        if "server" not in header_set:
+            headers.append(("Server", "TornadoServer/%s" % tornado.version))
+
+        start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
+        header_obj = httputil.HTTPHeaders()
+        for key, value in headers:
+            header_obj.add(key, value)
+        assert request.connection is not None
+        request.connection.write_headers(start_line, header_obj, chunk=body)
+        request.connection.finish()
+        self._log(status_code, request)
+
+    def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
         """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.

         .. versionchanged:: 6.3
            No longer a static method.
         """
-        pass
+        hostport = request.host.split(":")
+        if len(hostport) == 2:
+            host = hostport[0]
+            port = int(hostport[1])
+        else:
+            host = request.host
+            port = 443 if request.protocol == "https" else 80
+        environ = {
+            "REQUEST_METHOD": request.method,
+            "SCRIPT_NAME": "",
+            "PATH_INFO": to_wsgi_str(
+                escape.url_unescape(request.path, encoding=None, plus=False)
+            ),
+            "QUERY_STRING": request.query,
+            "REMOTE_ADDR": request.remote_ip,
+            "SERVER_NAME": host,
+            "SERVER_PORT": str(port),
+            "SERVER_PROTOCOL": request.version,
+            "wsgi.version": (1, 0),
+            "wsgi.url_scheme": request.protocol,
+            "wsgi.input": BytesIO(escape.utf8(request.body)),
+            "wsgi.errors": sys.stderr,
+            "wsgi.multithread": self.executor is not dummy_executor,
+            "wsgi.multiprocess": True,
+            "wsgi.run_once": False,
+        }
+        if "Content-Type" in request.headers:
+            environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
+        if "Content-Length" in request.headers:
+            environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
+        for key, value in request.headers.items():
+            environ["HTTP_" + key.replace("-", "_").upper()] = value
+        return environ
+
+    def _log(self, status_code: int, request: httputil.HTTPServerRequest) -> None:
+        if status_code < 400:
+            log_method = access_log.info
+        elif status_code < 500:
+            log_method = access_log.warning
+        else:
+            log_method = access_log.error
+        request_time = 1000.0 * request.request_time()
+        assert request.method is not None
+        assert request.uri is not None
+        summary = (
+            request.method  # type: ignore[operator]
+            + " "
+            + request.uri
+            + " ("
+            + request.remote_ip
+            + ")"
+        )
+        log_method("%d %s %.2fms", status_code, summary, request_time)


 HTTPRequest = httputil.HTTPServerRequest