Remove GPT.postman_collection.json e .venv do repositório
This commit is contained in:
@@ -1,222 +0,0 @@
|
|||||||
# don't import any costly modules
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
|
||||||
|
|
||||||
|
|
||||||
def warn_distutils_present():
|
|
||||||
if 'distutils' not in sys.modules:
|
|
||||||
return
|
|
||||||
if is_pypy and sys.version_info < (3, 7):
|
|
||||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
|
||||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
|
||||||
return
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
warnings.warn(
|
|
||||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
|
||||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
|
||||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
|
||||||
"using distutils directly, ensure that setuptools is installed in the "
|
|
||||||
"traditional way (e.g. not an editable install), and/or make sure "
|
|
||||||
"that setuptools is always imported before distutils."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def clear_distutils():
|
|
||||||
if 'distutils' not in sys.modules:
|
|
||||||
return
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
warnings.warn("Setuptools is replacing distutils.")
|
|
||||||
mods = [
|
|
||||||
name
|
|
||||||
for name in sys.modules
|
|
||||||
if name == "distutils" or name.startswith("distutils.")
|
|
||||||
]
|
|
||||||
for name in mods:
|
|
||||||
del sys.modules[name]
|
|
||||||
|
|
||||||
|
|
||||||
def enabled():
|
|
||||||
"""
|
|
||||||
Allow selection of distutils by environment variable.
|
|
||||||
"""
|
|
||||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
|
||||||
return which == 'local'
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_local_distutils():
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
clear_distutils()
|
|
||||||
|
|
||||||
# With the DistutilsMetaFinder in place,
|
|
||||||
# perform an import to cause distutils to be
|
|
||||||
# loaded from setuptools._distutils. Ref #2906.
|
|
||||||
with shim():
|
|
||||||
importlib.import_module('distutils')
|
|
||||||
|
|
||||||
# check that submodules load as expected
|
|
||||||
core = importlib.import_module('distutils.core')
|
|
||||||
assert '_distutils' in core.__file__, core.__file__
|
|
||||||
assert 'setuptools._distutils.log' not in sys.modules
|
|
||||||
|
|
||||||
|
|
||||||
def do_override():
|
|
||||||
"""
|
|
||||||
Ensure that the local copy of distutils is preferred over stdlib.
|
|
||||||
|
|
||||||
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
|
||||||
for more motivation.
|
|
||||||
"""
|
|
||||||
if enabled():
|
|
||||||
warn_distutils_present()
|
|
||||||
ensure_local_distutils()
|
|
||||||
|
|
||||||
|
|
||||||
class _TrivialRe:
|
|
||||||
def __init__(self, *patterns):
|
|
||||||
self._patterns = patterns
|
|
||||||
|
|
||||||
def match(self, string):
|
|
||||||
return all(pat in string for pat in self._patterns)
|
|
||||||
|
|
||||||
|
|
||||||
class DistutilsMetaFinder:
|
|
||||||
def find_spec(self, fullname, path, target=None):
|
|
||||||
# optimization: only consider top level modules and those
|
|
||||||
# found in the CPython test suite.
|
|
||||||
if path is not None and not fullname.startswith('test.'):
|
|
||||||
return
|
|
||||||
|
|
||||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
|
||||||
method = getattr(self, method_name, lambda: None)
|
|
||||||
return method()
|
|
||||||
|
|
||||||
def spec_for_distutils(self):
|
|
||||||
if self.is_cpython():
|
|
||||||
return
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
import importlib.abc
|
|
||||||
import importlib.util
|
|
||||||
|
|
||||||
try:
|
|
||||||
mod = importlib.import_module('setuptools._distutils')
|
|
||||||
except Exception:
|
|
||||||
# There are a couple of cases where setuptools._distutils
|
|
||||||
# may not be present:
|
|
||||||
# - An older Setuptools without a local distutils is
|
|
||||||
# taking precedence. Ref #2957.
|
|
||||||
# - Path manipulation during sitecustomize removes
|
|
||||||
# setuptools from the path but only after the hook
|
|
||||||
# has been loaded. Ref #2980.
|
|
||||||
# In either case, fall back to stdlib behavior.
|
|
||||||
return
|
|
||||||
|
|
||||||
class DistutilsLoader(importlib.abc.Loader):
|
|
||||||
def create_module(self, spec):
|
|
||||||
mod.__name__ = 'distutils'
|
|
||||||
return mod
|
|
||||||
|
|
||||||
def exec_module(self, module):
|
|
||||||
pass
|
|
||||||
|
|
||||||
return importlib.util.spec_from_loader(
|
|
||||||
'distutils', DistutilsLoader(), origin=mod.__file__
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_cpython():
|
|
||||||
"""
|
|
||||||
Suppress supplying distutils for CPython (build and tests).
|
|
||||||
Ref #2965 and #3007.
|
|
||||||
"""
|
|
||||||
return os.path.isfile('pybuilddir.txt')
|
|
||||||
|
|
||||||
def spec_for_pip(self):
|
|
||||||
"""
|
|
||||||
Ensure stdlib distutils when running under pip.
|
|
||||||
See pypa/pip#8761 for rationale.
|
|
||||||
"""
|
|
||||||
if self.pip_imported_during_build():
|
|
||||||
return
|
|
||||||
clear_distutils()
|
|
||||||
self.spec_for_distutils = lambda: None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pip_imported_during_build(cls):
|
|
||||||
"""
|
|
||||||
Detect if pip is being imported in a build script. Ref #2355.
|
|
||||||
"""
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
return any(
|
|
||||||
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def frame_file_is_setup(frame):
|
|
||||||
"""
|
|
||||||
Return True if the indicated frame suggests a setup.py file.
|
|
||||||
"""
|
|
||||||
# some frames may not have __file__ (#2940)
|
|
||||||
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
|
||||||
|
|
||||||
def spec_for_sensitive_tests(self):
|
|
||||||
"""
|
|
||||||
Ensure stdlib distutils when running select tests under CPython.
|
|
||||||
|
|
||||||
python/cpython#91169
|
|
||||||
"""
|
|
||||||
clear_distutils()
|
|
||||||
self.spec_for_distutils = lambda: None
|
|
||||||
|
|
||||||
sensitive_tests = (
|
|
||||||
[
|
|
||||||
'test.test_distutils',
|
|
||||||
'test.test_peg_generator',
|
|
||||||
'test.test_importlib',
|
|
||||||
]
|
|
||||||
if sys.version_info < (3, 10)
|
|
||||||
else [
|
|
||||||
'test.test_distutils',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
for name in DistutilsMetaFinder.sensitive_tests:
|
|
||||||
setattr(
|
|
||||||
DistutilsMetaFinder,
|
|
||||||
f'spec_for_{name}',
|
|
||||||
DistutilsMetaFinder.spec_for_sensitive_tests,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
DISTUTILS_FINDER = DistutilsMetaFinder()
|
|
||||||
|
|
||||||
|
|
||||||
def add_shim():
|
|
||||||
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
|
||||||
|
|
||||||
|
|
||||||
class shim:
|
|
||||||
def __enter__(self):
|
|
||||||
insert_shim()
|
|
||||||
|
|
||||||
def __exit__(self, exc, value, tb):
|
|
||||||
remove_shim()
|
|
||||||
|
|
||||||
|
|
||||||
def insert_shim():
|
|
||||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_shim():
|
|
||||||
try:
|
|
||||||
sys.meta_path.remove(DISTUTILS_FINDER)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
__import__('_distutils_hack').do_override()
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright {yyyy} {name of copyright owner}
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
@@ -1,239 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: aiofiles
|
|
||||||
Version: 0.8.0
|
|
||||||
Summary: File support for asyncio.
|
|
||||||
License: Apache-2.0
|
|
||||||
Author: Tin Tvrtkovic
|
|
||||||
Author-email: tinchester@gmail.com
|
|
||||||
Requires-Python: >=3.6,<4.0
|
|
||||||
Classifier: License :: OSI Approved :: Apache Software License
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Description-Content-Type: text/x-rst
|
|
||||||
|
|
||||||
aiofiles: file support for asyncio
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/aiofiles.svg
|
|
||||||
:target: https://pypi.python.org/pypi/aiofiles
|
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/Tinche/aiofiles.svg?branch=master
|
|
||||||
:target: https://travis-ci.org/Tinche/aiofiles
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/gh/Tinche/aiofiles/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/Tinche/aiofiles
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/pyversions/aiofiles.svg
|
|
||||||
:target: https://github.com/Tinche/aiofiles
|
|
||||||
:alt: Supported Python versions
|
|
||||||
|
|
||||||
**aiofiles** is an Apache2 licensed library, written in Python, for handling local
|
|
||||||
disk files in asyncio applications.
|
|
||||||
|
|
||||||
Ordinary local file IO is blocking, and cannot easily and portably made
|
|
||||||
asynchronous. This means doing file IO may interfere with asyncio applications,
|
|
||||||
which shouldn't block the executing thread. aiofiles helps with this by
|
|
||||||
introducing asynchronous versions of files that support delegating operations to
|
|
||||||
a separate thread pool.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
async with aiofiles.open('filename', mode='r') as f:
|
|
||||||
contents = await f.read()
|
|
||||||
print(contents)
|
|
||||||
'My file contents'
|
|
||||||
|
|
||||||
Asynchronous iteration is also supported.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
async with aiofiles.open('filename') as f:
|
|
||||||
async for line in f:
|
|
||||||
...
|
|
||||||
|
|
||||||
Asynchronous interface to tempfile module.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
async with aiofiles.tempfile.TemporaryFile('wb') as f:
|
|
||||||
await f.write(b'Hello, World!')
|
|
||||||
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- a file API very similar to Python's standard, blocking API
|
|
||||||
- support for buffered and unbuffered binary files, and buffered text files
|
|
||||||
- support for ``async``/``await`` (:PEP:`492`) constructs
|
|
||||||
- async interface to tempfile module
|
|
||||||
|
|
||||||
|
|
||||||
Installation
|
|
||||||
------------
|
|
||||||
|
|
||||||
To install aiofiles, simply:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ pip install aiofiles
|
|
||||||
|
|
||||||
Usage
|
|
||||||
-----
|
|
||||||
|
|
||||||
Files are opened using the ``aiofiles.open()`` coroutine, which in addition to
|
|
||||||
mirroring the builtin ``open`` accepts optional ``loop`` and ``executor``
|
|
||||||
arguments. If ``loop`` is absent, the default loop will be used, as per the
|
|
||||||
set asyncio policy. If ``executor`` is not specified, the default event loop
|
|
||||||
executor will be used.
|
|
||||||
|
|
||||||
In case of success, an asynchronous file object is returned with an
|
|
||||||
API identical to an ordinary file, except the following methods are coroutines
|
|
||||||
and delegate to an executor:
|
|
||||||
|
|
||||||
* ``close``
|
|
||||||
* ``flush``
|
|
||||||
* ``isatty``
|
|
||||||
* ``read``
|
|
||||||
* ``readall``
|
|
||||||
* ``read1``
|
|
||||||
* ``readinto``
|
|
||||||
* ``readline``
|
|
||||||
* ``readlines``
|
|
||||||
* ``seek``
|
|
||||||
* ``seekable``
|
|
||||||
* ``tell``
|
|
||||||
* ``truncate``
|
|
||||||
* ``writable``
|
|
||||||
* ``write``
|
|
||||||
* ``writelines``
|
|
||||||
|
|
||||||
In case of failure, one of the usual exceptions will be raised.
|
|
||||||
|
|
||||||
The ``aiofiles.os`` module contains executor-enabled coroutine versions of
|
|
||||||
several useful ``os`` functions that deal with files:
|
|
||||||
|
|
||||||
* ``stat``
|
|
||||||
* ``sendfile``
|
|
||||||
* ``rename``
|
|
||||||
* ``replace``
|
|
||||||
* ``remove``
|
|
||||||
* ``mkdir``
|
|
||||||
* ``makedirs``
|
|
||||||
* ``rmdir``
|
|
||||||
* ``removedirs``
|
|
||||||
* ``path.exists``
|
|
||||||
* ``path.isfile``
|
|
||||||
* ``path.isdir``
|
|
||||||
* ``path.getsize``
|
|
||||||
* ``path.getatime``
|
|
||||||
* ``path.getctime``
|
|
||||||
* ``path.samefile``
|
|
||||||
* ``path.sameopenfile``
|
|
||||||
|
|
||||||
Tempfile
|
|
||||||
~~~~~~~~
|
|
||||||
|
|
||||||
**aiofiles.tempfile** implements the following interfaces:
|
|
||||||
|
|
||||||
- TemporaryFile
|
|
||||||
- NamedTemporaryFile
|
|
||||||
- SpooledTemporaryFile
|
|
||||||
- TemporaryDirectory
|
|
||||||
|
|
||||||
Results return wrapped with a context manager allowing use with async with and async for.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f:
|
|
||||||
await f.write(b'Line1\n Line2')
|
|
||||||
await f.seek(0)
|
|
||||||
async for line in f:
|
|
||||||
print(line)
|
|
||||||
|
|
||||||
async with aiofiles.tempfile.TemporaryDirectory() as d:
|
|
||||||
filename = os.path.join(d, "file.ext")
|
|
||||||
|
|
||||||
|
|
||||||
Writing tests for aiofiles
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Real file IO can be mocked by patching ``aiofiles.threadpool.sync_open``
|
|
||||||
as desired. The return type also needs to be registered with the
|
|
||||||
``aiofiles.threadpool.wrap`` dispatcher:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
aiofiles.threadpool.wrap.register(mock.MagicMock)(
|
|
||||||
lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs))
|
|
||||||
|
|
||||||
async def test_stuff():
|
|
||||||
data = 'data'
|
|
||||||
mock_file = mock.MagicMock()
|
|
||||||
|
|
||||||
with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open:
|
|
||||||
async with aiofiles.open('filename', 'w') as f:
|
|
||||||
await f.write(data)
|
|
||||||
|
|
||||||
mock_file.write.assert_called_once_with(data)
|
|
||||||
|
|
||||||
History
|
|
||||||
~~~~~~~
|
|
||||||
0.8.0 (2021-11-27)
|
|
||||||
``````````````````
|
|
||||||
* aiofiles is now tested on Python 3.10.
|
|
||||||
* Added ``aiofiles.os.replace``.
|
|
||||||
`#107 <https://github.com/Tinche/aiofiles/pull/107>`_
|
|
||||||
* Added ``aiofiles.os.{makedirs, removedirs}``.
|
|
||||||
* Added ``aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}``.
|
|
||||||
`#63 <https://github.com/Tinche/aiofiles/pull/63>`_
|
|
||||||
* Added `suffix`, `prefix`, `dir` args to ``aiofiles.tempfile.TemporaryDirectory``.
|
|
||||||
`#116 <https://github.com/Tinche/aiofiles/pull/116>`_
|
|
||||||
|
|
||||||
0.7.0 (2021-05-17)
|
|
||||||
``````````````````
|
|
||||||
- Added the ``aiofiles.tempfile`` module for async temporary files.
|
|
||||||
`#56 <https://github.com/Tinche/aiofiles/pull/56>`_
|
|
||||||
- Switched to Poetry and GitHub actions.
|
|
||||||
- Dropped 3.5 support.
|
|
||||||
|
|
||||||
0.6.0 (2020-10-27)
|
|
||||||
``````````````````
|
|
||||||
- `aiofiles` is now tested on ppc64le.
|
|
||||||
- Added `name` and `mode` properties to async file objects.
|
|
||||||
`#82 <https://github.com/Tinche/aiofiles/pull/82>`_
|
|
||||||
- Fixed a DeprecationWarning internally.
|
|
||||||
`#75 <https://github.com/Tinche/aiofiles/pull/75>`_
|
|
||||||
- Python 3.9 support and tests.
|
|
||||||
|
|
||||||
0.5.0 (2020-04-12)
|
|
||||||
``````````````````
|
|
||||||
- Python 3.8 support. Code base modernization (using ``async/await`` instead of ``asyncio.coroutine``/``yield from``).
|
|
||||||
- Added ``aiofiles.os.remove``, ``aiofiles.os.rename``, ``aiofiles.os.mkdir``, ``aiofiles.os.rmdir``.
|
|
||||||
`#62 <https://github.com/Tinche/aiofiles/pull/62>`_
|
|
||||||
|
|
||||||
|
|
||||||
0.4.0 (2018-08-11)
|
|
||||||
``````````````````
|
|
||||||
- Python 3.7 support.
|
|
||||||
- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x.
|
|
||||||
|
|
||||||
0.3.2 (2017-09-23)
|
|
||||||
``````````````````
|
|
||||||
- The LICENSE is now included in the sdist.
|
|
||||||
`#31 <https://github.com/Tinche/aiofiles/pull/31>`_
|
|
||||||
|
|
||||||
0.3.1 (2017-03-10)
|
|
||||||
``````````````````
|
|
||||||
|
|
||||||
- Introduced a changelog.
|
|
||||||
- ``aiofiles.os.sendfile`` will now work if the standard ``os`` module contains a ``sendfile`` function.
|
|
||||||
|
|
||||||
Contributing
|
|
||||||
~~~~~~~~~~~~
|
|
||||||
Contributions are very welcome. Tests can be run with ``tox``, please ensure
|
|
||||||
the coverage at least stays the same before you submit a pull request.
|
|
||||||
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
aiofiles-0.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiofiles-0.8.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
|
|
||||||
aiofiles-0.8.0.dist-info/METADATA,sha256=9WSIVaJhAbMi93mj4ir3932H5Je3JxAcUvH5hgyD0qU,7022
|
|
||||||
aiofiles-0.8.0.dist-info/RECORD,,
|
|
||||||
aiofiles-0.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiofiles-0.8.0.dist-info/WHEEL,sha256=FMw6u7Kp3jrdDDtJRZ3hkPV-9adLQT8pbM4iifKUGfw,85
|
|
||||||
aiofiles/__init__.py,sha256=6fPaAw6PXV8rszheTedkYcoLHC3KbbPH--eMODJ_-IE,136
|
|
||||||
aiofiles/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
aiofiles/__pycache__/base.cpython-310.pyc,,
|
|
||||||
aiofiles/__pycache__/os.cpython-310.pyc,,
|
|
||||||
aiofiles/__pycache__/ospath.cpython-310.pyc,,
|
|
||||||
aiofiles/base.py,sha256=_ntjFm4olO13y9JEQchLudmgPJ4mAFug5MRi6Efk7g0,2114
|
|
||||||
aiofiles/os.py,sha256=A8vZ0paqH14JaepvEdfSCv6BAFoV4D1LVmnKfkEG5tQ,719
|
|
||||||
aiofiles/ospath.py,sha256=cIyDMoGVHTqKoKFXfXVzuIROJQQRmeNqVuNuc9QgPY0,387
|
|
||||||
aiofiles/tempfile/__init__.py,sha256=y6siScSCxNkydsoAN0OlxSwvgqpk1XoVTvJ0gTvAmTk,7234
|
|
||||||
aiofiles/tempfile/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
aiofiles/tempfile/__pycache__/temptypes.cpython-310.pyc,,
|
|
||||||
aiofiles/tempfile/temptypes.py,sha256=ZwWLAV3eiWWW0QoRDPHN2U7XPf0DUQFYPRid7o4Sk9s,2169
|
|
||||||
aiofiles/threadpool/__init__.py,sha256=JaRiXZRaVvx32RvrUXwgZxAByrzqdmDcxi6t_P2iP6w,2273
|
|
||||||
aiofiles/threadpool/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
aiofiles/threadpool/__pycache__/binary.cpython-310.pyc,,
|
|
||||||
aiofiles/threadpool/__pycache__/text.cpython-310.pyc,,
|
|
||||||
aiofiles/threadpool/__pycache__/utils.cpython-310.pyc,,
|
|
||||||
aiofiles/threadpool/binary.py,sha256=tRdJnH6ragF5Kr13oIBPJrljgTl3hWSOaHSXfHESRBk,1167
|
|
||||||
aiofiles/threadpool/text.py,sha256=dNweKCxpwRgfqA6XCiWYDDddOTa0lbzH-Fh5o3rho-8,665
|
|
||||||
aiofiles/threadpool/utils.py,sha256=fcqvRBrcIk2qP-rOdm92zDHyCfgD6itS621V6oOLJwk,1912
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: poetry 1.1.0a6
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
"""Utilities for asyncio-friendly file handling."""
|
|
||||||
from .threadpool import open
|
|
||||||
from . import tempfile
|
|
||||||
|
|
||||||
__all__ = ["open", "tempfile"]
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
"""Various base classes."""
|
|
||||||
from types import coroutine
|
|
||||||
from collections.abc import Coroutine
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncBase:
|
|
||||||
def __init__(self, file, loop, executor):
|
|
||||||
self._file = file
|
|
||||||
self._loop = loop
|
|
||||||
self._executor = executor
|
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
"""We are our own iterator."""
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return super().__repr__() + " wrapping " + repr(self._file)
|
|
||||||
|
|
||||||
async def __anext__(self):
|
|
||||||
"""Simulate normal file iteration."""
|
|
||||||
line = await self.readline()
|
|
||||||
if line:
|
|
||||||
return line
|
|
||||||
else:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
|
|
||||||
|
|
||||||
class _ContextManager(Coroutine):
|
|
||||||
__slots__ = ("_coro", "_obj")
|
|
||||||
|
|
||||||
def __init__(self, coro):
|
|
||||||
self._coro = coro
|
|
||||||
self._obj = None
|
|
||||||
|
|
||||||
def send(self, value):
|
|
||||||
return self._coro.send(value)
|
|
||||||
|
|
||||||
def throw(self, typ, val=None, tb=None):
|
|
||||||
if val is None:
|
|
||||||
return self._coro.throw(typ)
|
|
||||||
elif tb is None:
|
|
||||||
return self._coro.throw(typ, val)
|
|
||||||
else:
|
|
||||||
return self._coro.throw(typ, val, tb)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
return self._coro.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def gi_frame(self):
|
|
||||||
return self._coro.gi_frame
|
|
||||||
|
|
||||||
@property
|
|
||||||
def gi_running(self):
|
|
||||||
return self._coro.gi_running
|
|
||||||
|
|
||||||
@property
|
|
||||||
def gi_code(self):
|
|
||||||
return self._coro.gi_code
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
return self.send(None)
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def __iter__(self):
|
|
||||||
resp = yield from self._coro
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def __await__(self):
|
|
||||||
resp = yield from self._coro
|
|
||||||
return resp
|
|
||||||
|
|
||||||
async def __anext__(self):
|
|
||||||
resp = await self._coro
|
|
||||||
return resp
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self._obj = await self._coro
|
|
||||||
return self._obj
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc, tb):
|
|
||||||
self._obj.close()
|
|
||||||
self._obj = None
|
|
||||||
|
|
||||||
|
|
||||||
class AiofilesContextManager(_ContextManager):
|
|
||||||
"""An adjusted async context manager for aiofiles."""
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
await self._obj.close()
|
|
||||||
self._obj = None
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
"""Async executor versions of file functions from the os module."""
|
|
||||||
import asyncio
|
|
||||||
from functools import partial, wraps
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def wrap(func):
|
|
||||||
@wraps(func)
|
|
||||||
async def run(*args, loop=None, executor=None, **kwargs):
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
pfunc = partial(func, *args, **kwargs)
|
|
||||||
return await loop.run_in_executor(executor, pfunc)
|
|
||||||
|
|
||||||
return run
|
|
||||||
|
|
||||||
|
|
||||||
from . import ospath as path
|
|
||||||
|
|
||||||
|
|
||||||
stat = wrap(os.stat)
|
|
||||||
rename = wrap(os.rename)
|
|
||||||
replace = wrap(os.replace)
|
|
||||||
remove = wrap(os.remove)
|
|
||||||
mkdir = wrap(os.mkdir)
|
|
||||||
makedirs = wrap(os.makedirs)
|
|
||||||
rmdir = wrap(os.rmdir)
|
|
||||||
removedirs = wrap(os.removedirs)
|
|
||||||
|
|
||||||
if hasattr(os, "sendfile"):
|
|
||||||
sendfile = wrap(os.sendfile)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
"""Async executor versions of file functions from the os.path module."""
|
|
||||||
|
|
||||||
from .os import wrap
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
exists = wrap(path.exists)
|
|
||||||
isfile = wrap(path.isfile)
|
|
||||||
isdir = wrap(path.isdir)
|
|
||||||
getsize = wrap(path.getsize)
|
|
||||||
getmtime = wrap(path.getmtime)
|
|
||||||
getatime = wrap(path.getatime)
|
|
||||||
getctime = wrap(path.getctime)
|
|
||||||
samefile = wrap(path.samefile)
|
|
||||||
sameopenfile = wrap(path.sameopenfile)
|
|
||||||
@@ -1,263 +0,0 @@
|
|||||||
# Imports
|
|
||||||
import asyncio
|
|
||||||
from tempfile import (
|
|
||||||
TemporaryFile as syncTemporaryFile,
|
|
||||||
NamedTemporaryFile as syncNamedTemporaryFile,
|
|
||||||
SpooledTemporaryFile as syncSpooledTemporaryFile,
|
|
||||||
TemporaryDirectory as syncTemporaryDirectory,
|
|
||||||
_TemporaryFileWrapper as syncTemporaryFileWrapper,
|
|
||||||
)
|
|
||||||
from io import FileIO, TextIOBase, BufferedReader, BufferedWriter, BufferedRandom
|
|
||||||
from functools import partial, singledispatch
|
|
||||||
from ..base import AiofilesContextManager
|
|
||||||
from ..threadpool.text import AsyncTextIOWrapper
|
|
||||||
from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO
|
|
||||||
from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"NamedTemporaryFile",
|
|
||||||
"TemporaryFile",
|
|
||||||
"SpooledTemporaryFile",
|
|
||||||
"TemporaryDirectory",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# ================================================================
|
|
||||||
# Public methods for async open and return of temp file/directory
|
|
||||||
# objects with async interface
|
|
||||||
# ================================================================
|
|
||||||
def NamedTemporaryFile(
|
|
||||||
mode="w+b",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
newline=None,
|
|
||||||
suffix=None,
|
|
||||||
prefix=None,
|
|
||||||
dir=None,
|
|
||||||
delete=True,
|
|
||||||
loop=None,
|
|
||||||
executor=None,
|
|
||||||
):
|
|
||||||
"""Async open a named temporary file"""
|
|
||||||
return AiofilesContextManager(
|
|
||||||
_temporary_file(
|
|
||||||
named=True,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
delete=delete,
|
|
||||||
loop=loop,
|
|
||||||
executor=executor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def TemporaryFile(
|
|
||||||
mode="w+b",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
newline=None,
|
|
||||||
suffix=None,
|
|
||||||
prefix=None,
|
|
||||||
dir=None,
|
|
||||||
loop=None,
|
|
||||||
executor=None,
|
|
||||||
):
|
|
||||||
"""Async open an unnamed temporary file"""
|
|
||||||
return AiofilesContextManager(
|
|
||||||
_temporary_file(
|
|
||||||
named=False,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
loop=loop,
|
|
||||||
executor=executor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def SpooledTemporaryFile(
|
|
||||||
max_size=0,
|
|
||||||
mode="w+b",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
newline=None,
|
|
||||||
suffix=None,
|
|
||||||
prefix=None,
|
|
||||||
dir=None,
|
|
||||||
loop=None,
|
|
||||||
executor=None,
|
|
||||||
):
|
|
||||||
"""Async open a spooled temporary file"""
|
|
||||||
return AiofilesContextManager(
|
|
||||||
_spooled_temporary_file(
|
|
||||||
max_size=max_size,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
loop=loop,
|
|
||||||
executor=executor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None):
|
|
||||||
"""Async open a temporary directory"""
|
|
||||||
return AiofilesContextManagerTempDir(
|
|
||||||
_temporary_directory(
|
|
||||||
suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# =========================================================
|
|
||||||
# Internal coroutines to open new temp files/directories
|
|
||||||
# =========================================================
|
|
||||||
async def _temporary_file(
|
|
||||||
named=True,
|
|
||||||
mode="w+b",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
newline=None,
|
|
||||||
suffix=None,
|
|
||||||
prefix=None,
|
|
||||||
dir=None,
|
|
||||||
delete=True,
|
|
||||||
loop=None,
|
|
||||||
executor=None,
|
|
||||||
max_size=0,
|
|
||||||
):
|
|
||||||
"""Async method to open a temporary file with async interface"""
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
if named:
|
|
||||||
cb = partial(
|
|
||||||
syncNamedTemporaryFile,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
delete=delete,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cb = partial(
|
|
||||||
syncTemporaryFile,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
f = await loop.run_in_executor(executor, cb)
|
|
||||||
|
|
||||||
# Wrap based on type of underlying IO object
|
|
||||||
if type(f) is syncTemporaryFileWrapper:
|
|
||||||
# _TemporaryFileWrapper was used (named files)
|
|
||||||
result = wrap(f.file, f, loop=loop, executor=executor)
|
|
||||||
# add delete property
|
|
||||||
result.delete = f.delete
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
# IO object was returned directly without wrapper
|
|
||||||
return wrap(f, f, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
async def _spooled_temporary_file(
|
|
||||||
max_size=0,
|
|
||||||
mode="w+b",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
newline=None,
|
|
||||||
suffix=None,
|
|
||||||
prefix=None,
|
|
||||||
dir=None,
|
|
||||||
loop=None,
|
|
||||||
executor=None,
|
|
||||||
):
|
|
||||||
"""Open a spooled temporary file with async interface"""
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
cb = partial(
|
|
||||||
syncSpooledTemporaryFile,
|
|
||||||
max_size=max_size,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
newline=newline,
|
|
||||||
suffix=suffix,
|
|
||||||
prefix=prefix,
|
|
||||||
dir=dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
f = await loop.run_in_executor(executor, cb)
|
|
||||||
|
|
||||||
# Single interface provided by SpooledTemporaryFile for all modes
|
|
||||||
return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
async def _temporary_directory(
|
|
||||||
suffix=None, prefix=None, dir=None, loop=None, executor=None
|
|
||||||
):
|
|
||||||
"""Async method to open a temporary directory with async interface"""
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
cb = partial(syncTemporaryDirectory, suffix, prefix, dir)
|
|
||||||
f = await loop.run_in_executor(executor, cb)
|
|
||||||
|
|
||||||
return AsyncTemporaryDirectory(f, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
class AiofilesContextManagerTempDir(AiofilesContextManager):
|
|
||||||
"""With returns the directory location, not the object (matching sync lib)"""
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self._obj = await self._coro
|
|
||||||
return self._obj.name
|
|
||||||
|
|
||||||
|
|
||||||
@singledispatch
|
|
||||||
def wrap(base_io_obj, file, *, loop=None, executor=None):
|
|
||||||
"""Wrap the object with interface based on type of underlying IO"""
|
|
||||||
raise TypeError("Unsupported IO type: {}".format(base_io_obj))
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(TextIOBase)
|
|
||||||
def _(base_io_obj, file, *, loop=None, executor=None):
|
|
||||||
return AsyncTextIOWrapper(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(BufferedWriter)
|
|
||||||
def _(base_io_obj, file, *, loop=None, executor=None):
|
|
||||||
return AsyncBufferedIOBase(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(BufferedReader)
|
|
||||||
@wrap.register(BufferedRandom)
|
|
||||||
def _(base_io_obj, file, *, loop=None, executor=None):
|
|
||||||
return AsyncBufferedReader(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(FileIO)
|
|
||||||
def _(base_io_obj, file, *, loop=None, executor=None):
|
|
||||||
return AsyncFileIO(file, loop=loop, executor=executor)
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
"""Async wrappers for spooled temp files and temp directory objects"""
|
|
||||||
|
|
||||||
# Imports
|
|
||||||
import asyncio
|
|
||||||
from types import coroutine
|
|
||||||
|
|
||||||
from ..base import AsyncBase
|
|
||||||
from ..threadpool.utils import (
|
|
||||||
delegate_to_executor,
|
|
||||||
proxy_property_directly,
|
|
||||||
cond_delegate_to_executor,
|
|
||||||
)
|
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor("fileno", "rollover")
|
|
||||||
@cond_delegate_to_executor(
|
|
||||||
"close",
|
|
||||||
"flush",
|
|
||||||
"isatty",
|
|
||||||
"newlines",
|
|
||||||
"read",
|
|
||||||
"readline",
|
|
||||||
"readlines",
|
|
||||||
"seek",
|
|
||||||
"tell",
|
|
||||||
"truncate",
|
|
||||||
)
|
|
||||||
@proxy_property_directly("closed", "encoding", "mode", "name", "softspace")
|
|
||||||
class AsyncSpooledTemporaryFile(AsyncBase):
|
|
||||||
"""Async wrapper for SpooledTemporaryFile class"""
|
|
||||||
|
|
||||||
async def _check(self):
|
|
||||||
if self._file._rolled:
|
|
||||||
return
|
|
||||||
max_size = self._file._max_size
|
|
||||||
if max_size and self._file.tell() > max_size:
|
|
||||||
await self.rollover()
|
|
||||||
|
|
||||||
async def write(self, s):
|
|
||||||
"""Implementation to anticipate rollover"""
|
|
||||||
if self._file._rolled:
|
|
||||||
cb = partial(self._file.write, s)
|
|
||||||
return await self._loop.run_in_executor(self._executor, cb)
|
|
||||||
else:
|
|
||||||
file = self._file._file # reference underlying base IO object
|
|
||||||
rv = file.write(s)
|
|
||||||
await self._check()
|
|
||||||
return rv
|
|
||||||
|
|
||||||
async def writelines(self, iterable):
|
|
||||||
"""Implementation to anticipate rollover"""
|
|
||||||
if self._file._rolled:
|
|
||||||
cb = partial(self._file.writelines, iterable)
|
|
||||||
return await self._loop.run_in_executor(self._executor, cb)
|
|
||||||
else:
|
|
||||||
file = self._file._file # reference underlying base IO object
|
|
||||||
rv = file.writelines(iterable)
|
|
||||||
await self._check()
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor("cleanup")
|
|
||||||
@proxy_property_directly("name")
|
|
||||||
class AsyncTemporaryDirectory:
|
|
||||||
"""Async wrapper for TemporaryDirectory class"""
|
|
||||||
|
|
||||||
def __init__(self, file, loop, executor):
|
|
||||||
self._file = file
|
|
||||||
self._loop = loop
|
|
||||||
self._executor = executor
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
await self.cleanup()
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
"""Handle files using a thread pool executor."""
|
|
||||||
import asyncio
|
|
||||||
from types import coroutine
|
|
||||||
|
|
||||||
from io import (
|
|
||||||
FileIO,
|
|
||||||
TextIOBase,
|
|
||||||
BufferedReader,
|
|
||||||
BufferedWriter,
|
|
||||||
BufferedRandom,
|
|
||||||
)
|
|
||||||
from functools import partial, singledispatch
|
|
||||||
|
|
||||||
from .binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO
|
|
||||||
from .text import AsyncTextIOWrapper
|
|
||||||
from ..base import AiofilesContextManager
|
|
||||||
|
|
||||||
sync_open = open
|
|
||||||
|
|
||||||
__all__ = ("open",)
|
|
||||||
|
|
||||||
|
|
||||||
def open(
|
|
||||||
file,
|
|
||||||
mode="r",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
errors=None,
|
|
||||||
newline=None,
|
|
||||||
closefd=True,
|
|
||||||
opener=None,
|
|
||||||
*,
|
|
||||||
loop=None,
|
|
||||||
executor=None
|
|
||||||
):
|
|
||||||
return AiofilesContextManager(
|
|
||||||
_open(
|
|
||||||
file,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
errors=errors,
|
|
||||||
newline=newline,
|
|
||||||
closefd=closefd,
|
|
||||||
opener=opener,
|
|
||||||
loop=loop,
|
|
||||||
executor=executor,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
|
||||||
def _open(
|
|
||||||
file,
|
|
||||||
mode="r",
|
|
||||||
buffering=-1,
|
|
||||||
encoding=None,
|
|
||||||
errors=None,
|
|
||||||
newline=None,
|
|
||||||
closefd=True,
|
|
||||||
opener=None,
|
|
||||||
*,
|
|
||||||
loop=None,
|
|
||||||
executor=None
|
|
||||||
):
|
|
||||||
"""Open an asyncio file."""
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
cb = partial(
|
|
||||||
sync_open,
|
|
||||||
file,
|
|
||||||
mode=mode,
|
|
||||||
buffering=buffering,
|
|
||||||
encoding=encoding,
|
|
||||||
errors=errors,
|
|
||||||
newline=newline,
|
|
||||||
closefd=closefd,
|
|
||||||
opener=opener,
|
|
||||||
)
|
|
||||||
f = yield from loop.run_in_executor(executor, cb)
|
|
||||||
|
|
||||||
return wrap(f, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@singledispatch
|
|
||||||
def wrap(file, *, loop=None, executor=None):
|
|
||||||
raise TypeError("Unsupported io type: {}.".format(file))
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(TextIOBase)
|
|
||||||
def _(file, *, loop=None, executor=None):
|
|
||||||
return AsyncTextIOWrapper(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(BufferedWriter)
|
|
||||||
def _(file, *, loop=None, executor=None):
|
|
||||||
return AsyncBufferedIOBase(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(BufferedReader)
|
|
||||||
@wrap.register(BufferedRandom)
|
|
||||||
def _(file, *, loop=None, executor=None):
|
|
||||||
return AsyncBufferedReader(file, loop=loop, executor=executor)
|
|
||||||
|
|
||||||
|
|
||||||
@wrap.register(FileIO)
|
|
||||||
def _(file, *, loop=None, executor=None):
|
|
||||||
return AsyncFileIO(file, loop, executor)
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
from ..base import AsyncBase
|
|
||||||
from .utils import (
|
|
||||||
delegate_to_executor,
|
|
||||||
proxy_method_directly,
|
|
||||||
proxy_property_directly,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor(
|
|
||||||
"close",
|
|
||||||
"flush",
|
|
||||||
"isatty",
|
|
||||||
"read",
|
|
||||||
"read1",
|
|
||||||
"readinto",
|
|
||||||
"readline",
|
|
||||||
"readlines",
|
|
||||||
"seek",
|
|
||||||
"seekable",
|
|
||||||
"tell",
|
|
||||||
"truncate",
|
|
||||||
"writable",
|
|
||||||
"write",
|
|
||||||
"writelines",
|
|
||||||
)
|
|
||||||
@proxy_method_directly("detach", "fileno", "readable")
|
|
||||||
@proxy_property_directly("closed", "raw", "name", "mode")
|
|
||||||
class AsyncBufferedIOBase(AsyncBase):
|
|
||||||
"""The asyncio executor version of io.BufferedWriter."""
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor("peek")
|
|
||||||
class AsyncBufferedReader(AsyncBufferedIOBase):
|
|
||||||
"""The asyncio executor version of io.BufferedReader and Random."""
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor(
|
|
||||||
"close",
|
|
||||||
"flush",
|
|
||||||
"isatty",
|
|
||||||
"read",
|
|
||||||
"readall",
|
|
||||||
"readinto",
|
|
||||||
"readline",
|
|
||||||
"readlines",
|
|
||||||
"seek",
|
|
||||||
"seekable",
|
|
||||||
"tell",
|
|
||||||
"truncate",
|
|
||||||
"writable",
|
|
||||||
"write",
|
|
||||||
"writelines",
|
|
||||||
)
|
|
||||||
@proxy_method_directly("fileno", "readable")
|
|
||||||
@proxy_property_directly("closed", "name", "mode")
|
|
||||||
class AsyncFileIO(AsyncBase):
|
|
||||||
"""The asyncio executor version of io.FileIO."""
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from ..base import AsyncBase
|
|
||||||
from .utils import (
|
|
||||||
delegate_to_executor,
|
|
||||||
proxy_method_directly,
|
|
||||||
proxy_property_directly,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@delegate_to_executor(
|
|
||||||
"close",
|
|
||||||
"flush",
|
|
||||||
"isatty",
|
|
||||||
"read",
|
|
||||||
"readable",
|
|
||||||
"readline",
|
|
||||||
"readlines",
|
|
||||||
"seek",
|
|
||||||
"seekable",
|
|
||||||
"tell",
|
|
||||||
"truncate",
|
|
||||||
"write",
|
|
||||||
"writable",
|
|
||||||
"writelines",
|
|
||||||
)
|
|
||||||
@proxy_method_directly("detach", "fileno", "readable")
|
|
||||||
@proxy_property_directly(
|
|
||||||
"buffer",
|
|
||||||
"closed",
|
|
||||||
"encoding",
|
|
||||||
"errors",
|
|
||||||
"line_buffering",
|
|
||||||
"newlines",
|
|
||||||
"name",
|
|
||||||
"mode",
|
|
||||||
)
|
|
||||||
class AsyncTextIOWrapper(AsyncBase):
|
|
||||||
"""The asyncio executor version of io.TextIOWrapper."""
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
import functools
|
|
||||||
from types import coroutine
|
|
||||||
|
|
||||||
|
|
||||||
def delegate_to_executor(*attrs):
|
|
||||||
def cls_builder(cls):
|
|
||||||
for attr_name in attrs:
|
|
||||||
setattr(cls, attr_name, _make_delegate_method(attr_name))
|
|
||||||
return cls
|
|
||||||
|
|
||||||
return cls_builder
|
|
||||||
|
|
||||||
|
|
||||||
def proxy_method_directly(*attrs):
|
|
||||||
def cls_builder(cls):
|
|
||||||
for attr_name in attrs:
|
|
||||||
setattr(cls, attr_name, _make_proxy_method(attr_name))
|
|
||||||
return cls
|
|
||||||
|
|
||||||
return cls_builder
|
|
||||||
|
|
||||||
|
|
||||||
def proxy_property_directly(*attrs):
|
|
||||||
def cls_builder(cls):
|
|
||||||
for attr_name in attrs:
|
|
||||||
setattr(cls, attr_name, _make_proxy_property(attr_name))
|
|
||||||
return cls
|
|
||||||
|
|
||||||
return cls_builder
|
|
||||||
|
|
||||||
|
|
||||||
def cond_delegate_to_executor(*attrs):
|
|
||||||
def cls_builder(cls):
|
|
||||||
for attr_name in attrs:
|
|
||||||
setattr(cls, attr_name, _make_cond_delegate_method(attr_name))
|
|
||||||
return cls
|
|
||||||
|
|
||||||
return cls_builder
|
|
||||||
|
|
||||||
|
|
||||||
def _make_delegate_method(attr_name):
|
|
||||||
@coroutine
|
|
||||||
def method(self, *args, **kwargs):
|
|
||||||
cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
|
|
||||||
return (yield from self._loop.run_in_executor(self._executor, cb))
|
|
||||||
|
|
||||||
return method
|
|
||||||
|
|
||||||
|
|
||||||
def _make_proxy_method(attr_name):
|
|
||||||
def method(self, *args, **kwargs):
|
|
||||||
return getattr(self._file, attr_name)(*args, **kwargs)
|
|
||||||
|
|
||||||
return method
|
|
||||||
|
|
||||||
|
|
||||||
def _make_proxy_property(attr_name):
|
|
||||||
def proxy_property(self):
|
|
||||||
return getattr(self._file, attr_name)
|
|
||||||
|
|
||||||
return property(proxy_property)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_cond_delegate_method(attr_name):
|
|
||||||
"""For spooled temp files, delegate only if rolled to file object"""
|
|
||||||
|
|
||||||
async def method(self, *args, **kwargs):
|
|
||||||
if self._file._rolled:
|
|
||||||
cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
|
|
||||||
return await self._loop.run_in_executor(self._executor, cb)
|
|
||||||
else:
|
|
||||||
return getattr(self._file, attr_name)(*args, **kwargs)
|
|
||||||
|
|
||||||
return method
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
A. HISTORY OF THE SOFTWARE
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
|
||||||
Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
|
|
||||||
as a successor of a language called ABC. Guido remains Python's
|
|
||||||
principal author, although it includes many contributions from others.
|
|
||||||
|
|
||||||
In 1995, Guido continued his work on Python at the Corporation for
|
|
||||||
National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
|
|
||||||
in Reston, Virginia where he released several versions of the
|
|
||||||
software.
|
|
||||||
|
|
||||||
In May 2000, Guido and the Python core development team moved to
|
|
||||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
|
||||||
year, the PythonLabs team moved to Digital Creations, which became
|
|
||||||
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
|
||||||
https://www.python.org/psf/) was formed, a non-profit organization
|
|
||||||
created specifically to own Python-related Intellectual Property.
|
|
||||||
Zope Corporation was a sponsoring member of the PSF.
|
|
||||||
|
|
||||||
All Python releases are Open Source (see https://opensource.org for
|
|
||||||
the Open Source Definition). Historically, most, but not all, Python
|
|
||||||
releases have also been GPL-compatible; the table below summarizes
|
|
||||||
the various releases.
|
|
||||||
|
|
||||||
Release Derived Year Owner GPL-
|
|
||||||
from compatible? (1)
|
|
||||||
|
|
||||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
|
||||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
|
||||||
1.6 1.5.2 2000 CNRI no
|
|
||||||
2.0 1.6 2000 BeOpen.com no
|
|
||||||
1.6.1 1.6 2001 CNRI yes (2)
|
|
||||||
2.1 2.0+1.6.1 2001 PSF no
|
|
||||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
|
||||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
|
||||||
2.1.2 2.1.1 2002 PSF yes
|
|
||||||
2.1.3 2.1.2 2002 PSF yes
|
|
||||||
2.2 and above 2.1.1 2001-now PSF yes
|
|
||||||
|
|
||||||
Footnotes:
|
|
||||||
|
|
||||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
|
||||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
|
||||||
a modified version without making your changes open source. The
|
|
||||||
GPL-compatible licenses make it possible to combine Python with
|
|
||||||
other software that is released under the GPL; the others don't.
|
|
||||||
|
|
||||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
|
||||||
because its license has a choice of law clause. According to
|
|
||||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
|
||||||
is "not incompatible" with the GPL.
|
|
||||||
|
|
||||||
Thanks to the many outside volunteers who have worked under Guido's
|
|
||||||
direction to make these releases possible.
|
|
||||||
|
|
||||||
|
|
||||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
Python software and documentation are licensed under the
|
|
||||||
Python Software Foundation License Version 2.
|
|
||||||
|
|
||||||
Starting with Python 3.8.6, examples, recipes, and other code in
|
|
||||||
the documentation are dual licensed under the PSF License Version 2
|
|
||||||
and the Zero-Clause BSD license.
|
|
||||||
|
|
||||||
Some software incorporated into Python is under different licenses.
|
|
||||||
The licenses are listed with code falling under that license.
|
|
||||||
|
|
||||||
|
|
||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
|
|
||||||
All Rights Reserved" are retained in Python alone or in any derivative version
|
|
||||||
prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
|
||||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
|
||||||
Individual or Organization ("Licensee") accessing and otherwise using
|
|
||||||
this software in source or binary form and its associated
|
|
||||||
documentation ("the Software").
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this BeOpen Python License
|
|
||||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
|
||||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
|
||||||
and/or display publicly, prepare derivative works, distribute, and
|
|
||||||
otherwise use the Software alone or in any derivative version,
|
|
||||||
provided, however, that the BeOpen Python License is retained in the
|
|
||||||
Software, alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
|
||||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
|
||||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
|
||||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
|
||||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
5. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
6. This License Agreement shall be governed by and interpreted in all
|
|
||||||
respects by the law of the State of California, excluding conflict of
|
|
||||||
law provisions. Nothing in this License Agreement shall be deemed to
|
|
||||||
create any relationship of agency, partnership, or joint venture
|
|
||||||
between BeOpen and Licensee. This License Agreement does not grant
|
|
||||||
permission to use BeOpen trademarks or trade names in a trademark
|
|
||||||
sense to endorse or promote products or services of Licensee, or any
|
|
||||||
third party. As an exception, the "BeOpen Python" logos available at
|
|
||||||
http://www.pythonlabs.com/logos.html may be used according to the
|
|
||||||
permissions granted on that web page.
|
|
||||||
|
|
||||||
7. By copying, installing or otherwise using the software, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
|
||||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
|
||||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
|
||||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
|
||||||
source or binary form and its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
|
||||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
|
||||||
license to reproduce, analyze, test, perform and/or display publicly,
|
|
||||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
|
||||||
alone or in any derivative version, provided, however, that CNRI's
|
|
||||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
|
||||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
|
||||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
|
||||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
|
||||||
Agreement, Licensee may substitute the following text (omitting the
|
|
||||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
|
||||||
conditions in CNRI's License Agreement. This Agreement together with
|
|
||||||
Python 1.6.1 may be located on the internet using the following
|
|
||||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
|
||||||
Agreement may also be obtained from a proxy server on the internet
|
|
||||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python 1.6.1.
|
|
||||||
|
|
||||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
|
||||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. This License Agreement shall be governed by the federal
|
|
||||||
intellectual property law of the United States, including without
|
|
||||||
limitation the federal copyright law, and, to the extent such
|
|
||||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
|
||||||
Virginia, excluding Virginia's conflict of law provisions.
|
|
||||||
Notwithstanding the foregoing, with regard to derivative works based
|
|
||||||
on Python 1.6.1 that incorporate non-separable material that was
|
|
||||||
previously distributed under the GNU General Public License (GPL), the
|
|
||||||
law of the Commonwealth of Virginia shall govern this License
|
|
||||||
Agreement only as to issues arising under or with respect to
|
|
||||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
|
||||||
License Agreement shall be deemed to create any relationship of
|
|
||||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
|
||||||
License Agreement does not grant permission to use CNRI trademarks or
|
|
||||||
trade name in a trademark sense to endorse or promote products or
|
|
||||||
services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
|
||||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
|
||||||
bound by the terms and conditions of this License Agreement.
|
|
||||||
|
|
||||||
ACCEPT
|
|
||||||
|
|
||||||
|
|
||||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
|
||||||
The Netherlands. All rights reserved.
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
|
||||||
documentation for any purpose and without fee is hereby granted,
|
|
||||||
provided that the above copyright notice appear in all copies and that
|
|
||||||
both that copyright notice and this permission notice appear in
|
|
||||||
supporting documentation, and that the name of Stichting Mathematisch
|
|
||||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
|
||||||
distribution of the software without specific, written prior
|
|
||||||
permission.
|
|
||||||
|
|
||||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
|
||||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
||||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
|
||||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
|
||||||
purpose with or without fee is hereby granted.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
||||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
||||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
||||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
||||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
||||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
||||||
PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: aiohappyeyeballs
|
|
||||||
Version: 2.4.3
|
|
||||||
Summary: Happy Eyeballs for asyncio
|
|
||||||
Home-page: https://github.com/aio-libs/aiohappyeyeballs
|
|
||||||
License: PSF-2.0
|
|
||||||
Author: J. Nick Koston
|
|
||||||
Author-email: nick@koston.org
|
|
||||||
Requires-Python: >=3.8
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
|
||||||
Classifier: License :: Other/Proprietary License
|
|
||||||
Classifier: Natural Language :: English
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Programming Language :: Python :: 3.11
|
|
||||||
Classifier: Programming Language :: Python :: 3.12
|
|
||||||
Classifier: Programming Language :: Python :: 3.13
|
|
||||||
Classifier: Topic :: Software Development :: Libraries
|
|
||||||
Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues
|
|
||||||
Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md
|
|
||||||
Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io
|
|
||||||
Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs
|
|
||||||
Description-Content-Type: text/markdown
|
|
||||||
|
|
||||||
# aiohappyeyeballs
|
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://github.com/aio-libs/aiohappyeyeballs/actions/workflows/ci.yml?query=branch%3Amain">
|
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/aio-libs/aiohappyeyeballs/ci-cd.yml?branch=main&label=CI&logo=github&style=flat-square" alt="CI Status" >
|
|
||||||
</a>
|
|
||||||
<a href="https://aiohappyeyeballs.readthedocs.io">
|
|
||||||
<img src="https://img.shields.io/readthedocs/aiohappyeyeballs.svg?logo=read-the-docs&logoColor=fff&style=flat-square" alt="Documentation Status">
|
|
||||||
</a>
|
|
||||||
<a href="https://codecov.io/gh/aio-libs/aiohappyeyeballs">
|
|
||||||
<img src="https://img.shields.io/codecov/c/github/aio-libs/aiohappyeyeballs.svg?logo=codecov&logoColor=fff&style=flat-square" alt="Test coverage percentage">
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://python-poetry.org/">
|
|
||||||
<img src="https://img.shields.io/badge/packaging-poetry-299bd7?style=flat-square&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAASCAYAAABrXO8xAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAJJSURBVHgBfZLPa1NBEMe/s7tNXoxW1KJQKaUHkXhQvHgW6UHQQ09CBS/6V3hKc/AP8CqCrUcpmop3Cx48eDB4yEECjVQrlZb80CRN8t6OM/teagVxYZi38+Yz853dJbzoMV3MM8cJUcLMSUKIE8AzQ2PieZzFxEJOHMOgMQQ+dUgSAckNXhapU/NMhDSWLs1B24A8sO1xrN4NECkcAC9ASkiIJc6k5TRiUDPhnyMMdhKc+Zx19l6SgyeW76BEONY9exVQMzKExGKwwPsCzza7KGSSWRWEQhyEaDXp6ZHEr416ygbiKYOd7TEWvvcQIeusHYMJGhTwF9y7sGnSwaWyFAiyoxzqW0PM/RjghPxF2pWReAowTEXnDh0xgcLs8l2YQmOrj3N7ByiqEoH0cARs4u78WgAVkoEDIDoOi3AkcLOHU60RIg5wC4ZuTC7FaHKQm8Hq1fQuSOBvX/sodmNJSB5geaF5CPIkUeecdMxieoRO5jz9bheL6/tXjrwCyX/UYBUcjCaWHljx1xiX6z9xEjkYAzbGVnB8pvLmyXm9ep+W8CmsSHQQY77Zx1zboxAV0w7ybMhQmfqdmmw3nEp1I0Z+FGO6M8LZdoyZnuzzBdjISicKRnpxzI9fPb+0oYXsNdyi+d3h9bm9MWYHFtPeIZfLwzmFDKy1ai3p+PDls1Llz4yyFpferxjnyjJDSEy9CaCx5m2cJPerq6Xm34eTrZt3PqxYO1XOwDYZrFlH1fWnpU38Y9HRze3lj0vOujZcXKuuXm3jP+s3KbZVra7y2EAAAAAASUVORK5CYII=" alt="Poetry">
|
|
||||||
</a>
|
|
||||||
<a href="https://github.com/astral-sh/ruff">
|
|
||||||
<img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff">
|
|
||||||
</a>
|
|
||||||
<a href="https://github.com/pre-commit/pre-commit">
|
|
||||||
<img src="https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=flat-square" alt="pre-commit">
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://pypi.org/project/aiohappyeyeballs/">
|
|
||||||
<img src="https://img.shields.io/pypi/v/aiohappyeyeballs.svg?logo=python&logoColor=fff&style=flat-square" alt="PyPI Version">
|
|
||||||
</a>
|
|
||||||
<img src="https://img.shields.io/pypi/pyversions/aiohappyeyeballs.svg?style=flat-square&logo=python&logoColor=fff" alt="Supported Python versions">
|
|
||||||
<img src="https://img.shields.io/pypi/l/aiohappyeyeballs.svg?style=flat-square" alt="License">
|
|
||||||
</p>
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Documentation**: <a href="https://aiohappyeyeballs.readthedocs.io" target="_blank">https://aiohappyeyeballs.readthedocs.io </a>
|
|
||||||
|
|
||||||
**Source Code**: <a href="https://github.com/aio-libs/aiohappyeyeballs" target="_blank">https://github.com/aio-libs/aiohappyeyeballs </a>
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
|
|
||||||
([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
|
|
||||||
|
|
||||||
## Use case
|
|
||||||
|
|
||||||
This library exists to allow connecting with
|
|
||||||
[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
|
|
||||||
([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
|
|
||||||
when you
|
|
||||||
already have a list of addrinfo and not a DNS name.
|
|
||||||
|
|
||||||
The stdlib version of `loop.create_connection()`
|
|
||||||
will only work when you pass in an unresolved name which
|
|
||||||
is not a good fit when using DNS caching or resolving
|
|
||||||
names via another method such as `zeroconf`.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
Install this via pip (or your favourite package manager):
|
|
||||||
|
|
||||||
`pip install aiohappyeyeballs`
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
[aiohappyeyeballs is licensed under the same terms as cpython itself.](https://github.com/python/cpython/blob/main/LICENSE)
|
|
||||||
|
|
||||||
## Example usage
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
addr_infos = await loop.getaddrinfo("example.org", 80)
|
|
||||||
|
|
||||||
socket = await start_connection(addr_infos)
|
|
||||||
socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2)
|
|
||||||
|
|
||||||
transport, protocol = await loop.create_connection(
|
|
||||||
MyProtocol, sock=socket, ...)
|
|
||||||
|
|
||||||
# Remove the first address for each family from addr_info
|
|
||||||
pop_addr_infos_interleave(addr_info, 1)
|
|
||||||
|
|
||||||
# Remove all matching address from addr_info
|
|
||||||
remove_addr_infos(addr_info, "dead::beef::")
|
|
||||||
|
|
||||||
# Convert a local_addr to local_addr_infos
|
|
||||||
local_addr_infos = addr_to_addr_infos(("127.0.0.1",0))
|
|
||||||
```
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
|
|
||||||
This package contains code from cpython and is licensed under the same terms as cpython itself.
|
|
||||||
|
|
||||||
This package was created with
|
|
||||||
[Copier](https://copier.readthedocs.io/) and the
|
|
||||||
[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template)
|
|
||||||
project template.
|
|
||||||
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
aiohappyeyeballs-2.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohappyeyeballs-2.4.3.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
|
|
||||||
aiohappyeyeballs-2.4.3.dist-info/METADATA,sha256=jHyieOqom9bAU9J4pjpCJ2JC8_m2101Xi4cKdZJgV8o,6070
|
|
||||||
aiohappyeyeballs-2.4.3.dist-info/RECORD,,
|
|
||||||
aiohappyeyeballs-2.4.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiohappyeyeballs-2.4.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
||||||
aiohappyeyeballs/__init__.py,sha256=OI4WTw8PkRXyNHOk73ykmxdWltT-D3YIdM0xWSGNA4M,317
|
|
||||||
aiohappyeyeballs/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/_staggered.cpython-310.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/impl.cpython-310.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/types.cpython-310.pyc,,
|
|
||||||
aiohappyeyeballs/__pycache__/utils.cpython-310.pyc,,
|
|
||||||
aiohappyeyeballs/_staggered.py,sha256=LbTGSjib2cb11QDE4RlSVQNUauK3X9p1avCR9YuJF7s,6723
|
|
||||||
aiohappyeyeballs/impl.py,sha256=Nd7D1WGCw5S_YAMGOpW8hx8d2VpflNsgi_GUZ2JbiTs,7877
|
|
||||||
aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiohappyeyeballs/types.py,sha256=iYPiBTl5J7YEjnIqEOVUTRPzz2DwqSHBRhvbAlM0zv0,234
|
|
||||||
aiohappyeyeballs/utils.py,sha256=on9GxIR0LhEfZu8P6Twi9hepX9zDanuZM20MWsb3xlQ,3028
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: poetry-core 1.9.0
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
__version__ = "2.4.3"
|
|
||||||
|
|
||||||
from .impl import start_connection
|
|
||||||
from .types import AddrInfoType
|
|
||||||
from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"start_connection",
|
|
||||||
"AddrInfoType",
|
|
||||||
"remove_addr_infos",
|
|
||||||
"pop_addr_infos_interleave",
|
|
||||||
"addr_to_addr_infos",
|
|
||||||
)
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
|
|
||||||
def _set_result(wait_next: "asyncio.Future[None]") -> None:
|
|
||||||
"""Set the result of a future if it is not already done."""
|
|
||||||
if not wait_next.done():
|
|
||||||
wait_next.set_result(None)
|
|
||||||
|
|
||||||
|
|
||||||
async def _wait_one(
|
|
||||||
futures: "Iterable[asyncio.Future[Any]]",
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
) -> _T:
|
|
||||||
"""Wait for the first future to complete."""
|
|
||||||
wait_next = loop.create_future()
|
|
||||||
|
|
||||||
def _on_completion(fut: "asyncio.Future[Any]") -> None:
|
|
||||||
if not wait_next.done():
|
|
||||||
wait_next.set_result(fut)
|
|
||||||
|
|
||||||
for f in futures:
|
|
||||||
f.add_done_callback(_on_completion)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await wait_next
|
|
||||||
finally:
|
|
||||||
for f in futures:
|
|
||||||
f.remove_done_callback(_on_completion)
|
|
||||||
|
|
||||||
|
|
||||||
async def staggered_race(
|
|
||||||
coro_fns: Iterable[Callable[[], Awaitable[_T]]],
|
|
||||||
delay: Optional[float],
|
|
||||||
*,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]:
|
|
||||||
"""
|
|
||||||
Run coroutines with staggered start times and take the first to finish.
|
|
||||||
|
|
||||||
This method takes an iterable of coroutine functions. The first one is
|
|
||||||
started immediately. From then on, whenever the immediately preceding one
|
|
||||||
fails (raises an exception), or when *delay* seconds has passed, the next
|
|
||||||
coroutine is started. This continues until one of the coroutines complete
|
|
||||||
successfully, in which case all others are cancelled, or until all
|
|
||||||
coroutines fail.
|
|
||||||
|
|
||||||
The coroutines provided should be well-behaved in the following way:
|
|
||||||
|
|
||||||
* They should only ``return`` if completed successfully.
|
|
||||||
|
|
||||||
* They should always raise an exception if they did not complete
|
|
||||||
successfully. In particular, if they handle cancellation, they should
|
|
||||||
probably reraise, like this::
|
|
||||||
|
|
||||||
try:
|
|
||||||
# do work
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
# undo partially completed work
|
|
||||||
raise
|
|
||||||
|
|
||||||
Args:
|
|
||||||
----
|
|
||||||
coro_fns: an iterable of coroutine functions, i.e. callables that
|
|
||||||
return a coroutine object when called. Use ``functools.partial`` or
|
|
||||||
lambdas to pass arguments.
|
|
||||||
|
|
||||||
delay: amount of time, in seconds, between starting coroutines. If
|
|
||||||
``None``, the coroutines will run sequentially.
|
|
||||||
|
|
||||||
loop: the event loop to use. If ``None``, the running loop is used.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
-------
|
|
||||||
tuple *(winner_result, winner_index, exceptions)* where
|
|
||||||
|
|
||||||
- *winner_result*: the result of the winning coroutine, or ``None``
|
|
||||||
if no coroutines won.
|
|
||||||
|
|
||||||
- *winner_index*: the index of the winning coroutine in
|
|
||||||
``coro_fns``, or ``None`` if no coroutines won. If the winning
|
|
||||||
coroutine may return None on success, *winner_index* can be used
|
|
||||||
to definitively determine whether any coroutine won.
|
|
||||||
|
|
||||||
- *exceptions*: list of exceptions returned by the coroutines.
|
|
||||||
``len(exceptions)`` is equal to the number of coroutines actually
|
|
||||||
started, and the order is the same as in ``coro_fns``. The winning
|
|
||||||
coroutine's entry is ``None``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
loop = loop or asyncio.get_running_loop()
|
|
||||||
exceptions: List[Optional[BaseException]] = []
|
|
||||||
tasks: Set[asyncio.Task[Optional[Tuple[_T, int]]]] = set()
|
|
||||||
|
|
||||||
async def run_one_coro(
|
|
||||||
coro_fn: Callable[[], Awaitable[_T]],
|
|
||||||
this_index: int,
|
|
||||||
start_next: "asyncio.Future[None]",
|
|
||||||
) -> Optional[Tuple[_T, int]]:
|
|
||||||
"""
|
|
||||||
Run a single coroutine.
|
|
||||||
|
|
||||||
If the coroutine fails, set the exception in the exceptions list and
|
|
||||||
start the next coroutine by setting the result of the start_next.
|
|
||||||
|
|
||||||
If the coroutine succeeds, return the result and the index of the
|
|
||||||
coroutine in the coro_fns list.
|
|
||||||
|
|
||||||
If SystemExit or KeyboardInterrupt is raised, re-raise it.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = await coro_fn()
|
|
||||||
except (SystemExit, KeyboardInterrupt):
|
|
||||||
raise
|
|
||||||
except BaseException as e:
|
|
||||||
exceptions[this_index] = e
|
|
||||||
_set_result(start_next) # Kickstart the next coroutine
|
|
||||||
return None
|
|
||||||
|
|
||||||
return result, this_index
|
|
||||||
|
|
||||||
start_next_timer: Optional[asyncio.TimerHandle] = None
|
|
||||||
start_next: Optional[asyncio.Future[None]]
|
|
||||||
task: asyncio.Task[Optional[Tuple[_T, int]]]
|
|
||||||
done: Union[asyncio.Future[None], asyncio.Task[Optional[Tuple[_T, int]]]]
|
|
||||||
coro_iter = iter(coro_fns)
|
|
||||||
this_index = -1
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
if coro_fn := next(coro_iter, None):
|
|
||||||
this_index += 1
|
|
||||||
exceptions.append(None)
|
|
||||||
start_next = loop.create_future()
|
|
||||||
task = loop.create_task(run_one_coro(coro_fn, this_index, start_next))
|
|
||||||
tasks.add(task)
|
|
||||||
start_next_timer = (
|
|
||||||
loop.call_later(delay, _set_result, start_next) if delay else None
|
|
||||||
)
|
|
||||||
elif not tasks:
|
|
||||||
# We exhausted the coro_fns list and no tasks are running
|
|
||||||
# so we have no winner and all coroutines failed.
|
|
||||||
break
|
|
||||||
|
|
||||||
while tasks:
|
|
||||||
done = await _wait_one(
|
|
||||||
[*tasks, start_next] if start_next else tasks, loop
|
|
||||||
)
|
|
||||||
if done is start_next:
|
|
||||||
# The current task has failed or the timer has expired
|
|
||||||
# so we need to start the next task.
|
|
||||||
start_next = None
|
|
||||||
if start_next_timer:
|
|
||||||
start_next_timer.cancel()
|
|
||||||
start_next_timer = None
|
|
||||||
|
|
||||||
# Break out of the task waiting loop to start the next
|
|
||||||
# task.
|
|
||||||
break
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert isinstance(done, asyncio.Task)
|
|
||||||
|
|
||||||
tasks.remove(done)
|
|
||||||
if winner := done.result():
|
|
||||||
return *winner, exceptions
|
|
||||||
finally:
|
|
||||||
# We either have:
|
|
||||||
# - a winner
|
|
||||||
# - all tasks failed
|
|
||||||
# - a KeyboardInterrupt or SystemExit.
|
|
||||||
|
|
||||||
#
|
|
||||||
# If the timer is still running, cancel it.
|
|
||||||
#
|
|
||||||
if start_next_timer:
|
|
||||||
start_next_timer.cancel()
|
|
||||||
|
|
||||||
#
|
|
||||||
# If there are any tasks left, cancel them and than
|
|
||||||
# wait them so they fill the exceptions list.
|
|
||||||
#
|
|
||||||
for task in tasks:
|
|
||||||
task.cancel()
|
|
||||||
with contextlib.suppress(asyncio.CancelledError):
|
|
||||||
await task
|
|
||||||
|
|
||||||
return None, None, exceptions
|
|
||||||
@@ -1,213 +0,0 @@
|
|||||||
"""Base implementation."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import collections
|
|
||||||
import functools
|
|
||||||
import itertools
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
from typing import List, Optional, Sequence, Union
|
|
||||||
|
|
||||||
from . import _staggered
|
|
||||||
from .types import AddrInfoType
|
|
||||||
|
|
||||||
if sys.version_info < (3, 8, 2): # noqa: UP036
|
|
||||||
# asyncio.staggered is broken in Python 3.8.0 and 3.8.1
|
|
||||||
# so it must be patched:
|
|
||||||
# https://github.com/aio-libs/aiohttp/issues/8556
|
|
||||||
# https://bugs.python.org/issue39129
|
|
||||||
# https://github.com/python/cpython/pull/17693
|
|
||||||
import asyncio.futures
|
|
||||||
|
|
||||||
asyncio.futures.TimeoutError = asyncio.TimeoutError # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
|
|
||||||
async def start_connection(
|
|
||||||
addr_infos: Sequence[AddrInfoType],
|
|
||||||
*,
|
|
||||||
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
|
||||||
happy_eyeballs_delay: Optional[float] = None,
|
|
||||||
interleave: Optional[int] = None,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> socket.socket:
|
|
||||||
"""
|
|
||||||
Connect to a TCP server.
|
|
||||||
|
|
||||||
Create a socket connection to a specified destination. The
|
|
||||||
destination is specified as a list of AddrInfoType tuples as
|
|
||||||
returned from getaddrinfo().
|
|
||||||
|
|
||||||
The arguments are, in order:
|
|
||||||
|
|
||||||
* ``family``: the address family, e.g. ``socket.AF_INET`` or
|
|
||||||
``socket.AF_INET6``.
|
|
||||||
* ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or
|
|
||||||
``socket.SOCK_DGRAM``.
|
|
||||||
* ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or
|
|
||||||
``socket.IPPROTO_UDP``.
|
|
||||||
* ``canonname``: the canonical name of the address, e.g.
|
|
||||||
``"www.python.org"``.
|
|
||||||
* ``sockaddr``: the socket address
|
|
||||||
|
|
||||||
This method is a coroutine which will try to establish the connection
|
|
||||||
in the background. When successful, the coroutine returns a
|
|
||||||
socket.
|
|
||||||
|
|
||||||
The expected use case is to use this method in conjunction with
|
|
||||||
loop.create_connection() to establish a connection to a server::
|
|
||||||
|
|
||||||
socket = await start_connection(addr_infos)
|
|
||||||
transport, protocol = await loop.create_connection(
|
|
||||||
MyProtocol, sock=socket, ...)
|
|
||||||
"""
|
|
||||||
if not (current_loop := loop):
|
|
||||||
current_loop = asyncio.get_running_loop()
|
|
||||||
|
|
||||||
single_addr_info = len(addr_infos) == 1
|
|
||||||
|
|
||||||
if happy_eyeballs_delay is not None and interleave is None:
|
|
||||||
# If using happy eyeballs, default to interleave addresses by family
|
|
||||||
interleave = 1
|
|
||||||
|
|
||||||
if interleave and not single_addr_info:
|
|
||||||
addr_infos = _interleave_addrinfos(addr_infos, interleave)
|
|
||||||
|
|
||||||
sock: Optional[socket.socket] = None
|
|
||||||
# uvloop can raise RuntimeError instead of OSError
|
|
||||||
exceptions: List[List[Union[OSError, RuntimeError]]] = []
|
|
||||||
if happy_eyeballs_delay is None or single_addr_info:
|
|
||||||
# not using happy eyeballs
|
|
||||||
for addrinfo in addr_infos:
|
|
||||||
try:
|
|
||||||
sock = await _connect_sock(
|
|
||||||
current_loop, exceptions, addrinfo, local_addr_infos
|
|
||||||
)
|
|
||||||
break
|
|
||||||
except (RuntimeError, OSError):
|
|
||||||
continue
|
|
||||||
else: # using happy eyeballs
|
|
||||||
sock, _, _ = await _staggered.staggered_race(
|
|
||||||
(
|
|
||||||
functools.partial(
|
|
||||||
_connect_sock, current_loop, exceptions, addrinfo, local_addr_infos
|
|
||||||
)
|
|
||||||
for addrinfo in addr_infos
|
|
||||||
),
|
|
||||||
happy_eyeballs_delay,
|
|
||||||
)
|
|
||||||
|
|
||||||
if sock is None:
|
|
||||||
all_exceptions = [exc for sub in exceptions for exc in sub]
|
|
||||||
try:
|
|
||||||
first_exception = all_exceptions[0]
|
|
||||||
if len(all_exceptions) == 1:
|
|
||||||
raise first_exception
|
|
||||||
else:
|
|
||||||
# If they all have the same str(), raise one.
|
|
||||||
model = str(first_exception)
|
|
||||||
if all(str(exc) == model for exc in all_exceptions):
|
|
||||||
raise first_exception
|
|
||||||
# Raise a combined exception so the user can see all
|
|
||||||
# the various error messages.
|
|
||||||
msg = "Multiple exceptions: {}".format(
|
|
||||||
", ".join(str(exc) for exc in all_exceptions)
|
|
||||||
)
|
|
||||||
# If the errno is the same for all exceptions, raise
|
|
||||||
# an OSError with that errno.
|
|
||||||
if isinstance(first_exception, OSError):
|
|
||||||
first_errno = first_exception.errno
|
|
||||||
if all(
|
|
||||||
isinstance(exc, OSError) and exc.errno == first_errno
|
|
||||||
for exc in all_exceptions
|
|
||||||
):
|
|
||||||
raise OSError(first_errno, msg)
|
|
||||||
elif isinstance(first_exception, RuntimeError) and all(
|
|
||||||
isinstance(exc, RuntimeError) for exc in all_exceptions
|
|
||||||
):
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
# We have a mix of OSError and RuntimeError
|
|
||||||
# so we have to pick which one to raise.
|
|
||||||
# and we raise OSError for compatibility
|
|
||||||
raise OSError(msg)
|
|
||||||
finally:
|
|
||||||
all_exceptions = None # type: ignore[assignment]
|
|
||||||
exceptions = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
return sock
|
|
||||||
|
|
||||||
|
|
||||||
async def _connect_sock(
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
exceptions: List[List[Union[OSError, RuntimeError]]],
|
|
||||||
addr_info: AddrInfoType,
|
|
||||||
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
|
||||||
) -> socket.socket:
|
|
||||||
"""Create, bind and connect one socket."""
|
|
||||||
my_exceptions: List[Union[OSError, RuntimeError]] = []
|
|
||||||
exceptions.append(my_exceptions)
|
|
||||||
family, type_, proto, _, address = addr_info
|
|
||||||
sock = None
|
|
||||||
try:
|
|
||||||
sock = socket.socket(family=family, type=type_, proto=proto)
|
|
||||||
sock.setblocking(False)
|
|
||||||
if local_addr_infos is not None:
|
|
||||||
for lfamily, _, _, _, laddr in local_addr_infos:
|
|
||||||
# skip local addresses of different family
|
|
||||||
if lfamily != family:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
sock.bind(laddr)
|
|
||||||
break
|
|
||||||
except OSError as exc:
|
|
||||||
msg = (
|
|
||||||
f"error while attempting to bind on "
|
|
||||||
f"address {laddr!r}: "
|
|
||||||
f"{exc.strerror.lower()}"
|
|
||||||
)
|
|
||||||
exc = OSError(exc.errno, msg)
|
|
||||||
my_exceptions.append(exc)
|
|
||||||
else: # all bind attempts failed
|
|
||||||
if my_exceptions:
|
|
||||||
raise my_exceptions.pop()
|
|
||||||
else:
|
|
||||||
raise OSError(f"no matching local address with {family=} found")
|
|
||||||
await loop.sock_connect(sock, address)
|
|
||||||
return sock
|
|
||||||
except (RuntimeError, OSError) as exc:
|
|
||||||
my_exceptions.append(exc)
|
|
||||||
if sock is not None:
|
|
||||||
sock.close()
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
if sock is not None:
|
|
||||||
sock.close()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
exceptions = my_exceptions = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
|
|
||||||
def _interleave_addrinfos(
|
|
||||||
addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1
|
|
||||||
) -> List[AddrInfoType]:
|
|
||||||
"""Interleave list of addrinfo tuples by family."""
|
|
||||||
# Group addresses by family
|
|
||||||
addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = (
|
|
||||||
collections.OrderedDict()
|
|
||||||
)
|
|
||||||
for addr in addrinfos:
|
|
||||||
family = addr[0]
|
|
||||||
if family not in addrinfos_by_family:
|
|
||||||
addrinfos_by_family[family] = []
|
|
||||||
addrinfos_by_family[family].append(addr)
|
|
||||||
addrinfos_lists = list(addrinfos_by_family.values())
|
|
||||||
|
|
||||||
reordered: List[AddrInfoType] = []
|
|
||||||
if first_address_family_count > 1:
|
|
||||||
reordered.extend(addrinfos_lists[0][: first_address_family_count - 1])
|
|
||||||
del addrinfos_lists[0][: first_address_family_count - 1]
|
|
||||||
reordered.extend(
|
|
||||||
a
|
|
||||||
for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists))
|
|
||||||
if a is not None
|
|
||||||
)
|
|
||||||
return reordered
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
"""Types for aiohappyeyeballs."""
|
|
||||||
|
|
||||||
import socket
|
|
||||||
from typing import Tuple, Union
|
|
||||||
|
|
||||||
AddrInfoType = Tuple[
|
|
||||||
Union[int, socket.AddressFamily],
|
|
||||||
Union[int, socket.SocketKind],
|
|
||||||
int,
|
|
||||||
str,
|
|
||||||
Tuple, # type: ignore[type-arg]
|
|
||||||
]
|
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
"""Utility functions for aiohappyeyeballs."""
|
|
||||||
|
|
||||||
import ipaddress
|
|
||||||
import socket
|
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
|
||||||
|
|
||||||
from .types import AddrInfoType
|
|
||||||
|
|
||||||
|
|
||||||
def addr_to_addr_infos(
|
|
||||||
addr: Optional[
|
|
||||||
Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]]
|
|
||||||
],
|
|
||||||
) -> Optional[List[AddrInfoType]]:
|
|
||||||
"""Convert an address tuple to a list of addr_info tuples."""
|
|
||||||
if addr is None:
|
|
||||||
return None
|
|
||||||
host = addr[0]
|
|
||||||
port = addr[1]
|
|
||||||
is_ipv6 = ":" in host
|
|
||||||
if is_ipv6:
|
|
||||||
flowinfo = 0
|
|
||||||
scopeid = 0
|
|
||||||
addr_len = len(addr)
|
|
||||||
if addr_len >= 4:
|
|
||||||
scopeid = addr[3] # type: ignore[misc]
|
|
||||||
if addr_len >= 3:
|
|
||||||
flowinfo = addr[2] # type: ignore[misc]
|
|
||||||
addr = (host, port, flowinfo, scopeid)
|
|
||||||
family = socket.AF_INET6
|
|
||||||
else:
|
|
||||||
addr = (host, port)
|
|
||||||
family = socket.AF_INET
|
|
||||||
return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)]
|
|
||||||
|
|
||||||
|
|
||||||
def pop_addr_infos_interleave(
|
|
||||||
addr_infos: List[AddrInfoType], interleave: Optional[int] = None
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Pop addr_info from the list of addr_infos by family up to interleave times.
|
|
||||||
|
|
||||||
The interleave parameter is used to know how many addr_infos for
|
|
||||||
each family should be popped of the top of the list.
|
|
||||||
"""
|
|
||||||
seen: Dict[int, int] = {}
|
|
||||||
if interleave is None:
|
|
||||||
interleave = 1
|
|
||||||
to_remove: List[AddrInfoType] = []
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
family = addr_info[0]
|
|
||||||
if family not in seen:
|
|
||||||
seen[family] = 0
|
|
||||||
if seen[family] < interleave:
|
|
||||||
to_remove.append(addr_info)
|
|
||||||
seen[family] += 1
|
|
||||||
for addr_info in to_remove:
|
|
||||||
addr_infos.remove(addr_info)
|
|
||||||
|
|
||||||
|
|
||||||
def _addr_tuple_to_ip_address(
|
|
||||||
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
|
||||||
) -> Union[
|
|
||||||
Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int]
|
|
||||||
]:
|
|
||||||
"""Convert an address tuple to an IPv4Address."""
|
|
||||||
return (ipaddress.ip_address(addr[0]), *addr[1:])
|
|
||||||
|
|
||||||
|
|
||||||
def remove_addr_infos(
|
|
||||||
addr_infos: List[AddrInfoType],
|
|
||||||
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Remove an address from the list of addr_infos.
|
|
||||||
|
|
||||||
The addr value is typically the return value of
|
|
||||||
sock.getpeername().
|
|
||||||
"""
|
|
||||||
bad_addrs_infos: List[AddrInfoType] = []
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
if addr_info[-1] == addr:
|
|
||||||
bad_addrs_infos.append(addr_info)
|
|
||||||
if bad_addrs_infos:
|
|
||||||
for bad_addr_info in bad_addrs_infos:
|
|
||||||
addr_infos.remove(bad_addr_info)
|
|
||||||
return
|
|
||||||
# Slow path in case addr is formatted differently
|
|
||||||
match_addr = _addr_tuple_to_ip_address(addr)
|
|
||||||
for addr_info in addr_infos:
|
|
||||||
if match_addr == _addr_tuple_to_ip_address(addr_info[-1]):
|
|
||||||
bad_addrs_infos.append(addr_info)
|
|
||||||
if bad_addrs_infos:
|
|
||||||
for bad_addr_info in bad_addrs_infos:
|
|
||||||
addr_infos.remove(bad_addr_info)
|
|
||||||
return
|
|
||||||
raise ValueError(f"Address {addr} not found in addr_infos")
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
Copyright aio-libs contributors.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
@@ -1,243 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: aiohttp
|
|
||||||
Version: 3.9.0
|
|
||||||
Summary: Async http client/server framework (asyncio)
|
|
||||||
Home-page: https://github.com/aio-libs/aiohttp
|
|
||||||
Maintainer: aiohttp team <team@aiohttp.org>
|
|
||||||
Maintainer-email: team@aiohttp.org
|
|
||||||
License: Apache 2
|
|
||||||
Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
|
|
||||||
Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
|
|
||||||
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
|
||||||
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
|
||||||
Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
|
|
||||||
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
|
||||||
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
|
||||||
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Framework :: AsyncIO
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: Apache Software License
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Operating System :: MacOS :: MacOS X
|
|
||||||
Classifier: Operating System :: Microsoft :: Windows
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP
|
|
||||||
Requires-Python: >=3.8
|
|
||||||
Description-Content-Type: text/x-rst
|
|
||||||
License-File: LICENSE.txt
|
|
||||||
Requires-Dist: attrs >=17.3.0
|
|
||||||
Requires-Dist: multidict <7.0,>=4.5
|
|
||||||
Requires-Dist: yarl <2.0,>=1.0
|
|
||||||
Requires-Dist: frozenlist >=1.1.1
|
|
||||||
Requires-Dist: aiosignal >=1.1.2
|
|
||||||
Requires-Dist: async-timeout <5.0,>=4.0 ; python_version < "3.11"
|
|
||||||
Provides-Extra: speedups
|
|
||||||
Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'speedups'
|
|
||||||
Requires-Dist: Brotli ; (platform_python_implementation == "CPython") and extra == 'speedups'
|
|
||||||
Requires-Dist: aiodns ; (sys_platform == "linux" or sys_platform == "darwin") and extra == 'speedups'
|
|
||||||
|
|
||||||
==================================
|
|
||||||
Async http client/server framework
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
|
|
||||||
:height: 64px
|
|
||||||
:width: 64px
|
|
||||||
:alt: aiohttp logo
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
|
|
||||||
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
|
||||||
:alt: GitHub Actions status for master branch
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/aio-libs/aiohttp
|
|
||||||
:alt: codecov.io status for master branch
|
|
||||||
|
|
||||||
.. image:: https://badge.fury.io/py/aiohttp.svg
|
|
||||||
:target: https://pypi.org/project/aiohttp
|
|
||||||
:alt: Latest PyPI package version
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
|
||||||
:target: https://docs.aiohttp.org/
|
|
||||||
:alt: Latest Read The Docs
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
|
||||||
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
|
||||||
:alt: Matrix Room — #aio-libs:matrix.org
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
|
||||||
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
|
||||||
:alt: Matrix Space — #aio-libs-space:matrix.org
|
|
||||||
|
|
||||||
|
|
||||||
Key Features
|
|
||||||
============
|
|
||||||
|
|
||||||
- Supports both client and server side of HTTP protocol.
|
|
||||||
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
|
||||||
Callback Hell.
|
|
||||||
- Provides Web-server with middleware and pluggable routing.
|
|
||||||
|
|
||||||
|
|
||||||
Getting started
|
|
||||||
===============
|
|
||||||
|
|
||||||
Client
|
|
||||||
------
|
|
||||||
|
|
||||||
To get something from the web:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get('http://python.org') as response:
|
|
||||||
|
|
||||||
print("Status:", response.status)
|
|
||||||
print("Content-type:", response.headers['content-type'])
|
|
||||||
|
|
||||||
html = await response.text()
|
|
||||||
print("Body:", html[:15], "...")
|
|
||||||
|
|
||||||
asyncio.run(main())
|
|
||||||
|
|
||||||
This prints:
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
Status: 200
|
|
||||||
Content-type: text/html; charset=utf-8
|
|
||||||
Body: <!doctype html> ...
|
|
||||||
|
|
||||||
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
|
|
||||||
|
|
||||||
Server
|
|
||||||
------
|
|
||||||
|
|
||||||
An example using a simple server:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# examples/server_simple.py
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
async def handle(request):
|
|
||||||
name = request.match_info.get('name', "Anonymous")
|
|
||||||
text = "Hello, " + name
|
|
||||||
return web.Response(text=text)
|
|
||||||
|
|
||||||
async def wshandle(request):
|
|
||||||
ws = web.WebSocketResponse()
|
|
||||||
await ws.prepare(request)
|
|
||||||
|
|
||||||
async for msg in ws:
|
|
||||||
if msg.type == web.WSMsgType.text:
|
|
||||||
await ws.send_str("Hello, {}".format(msg.data))
|
|
||||||
elif msg.type == web.WSMsgType.binary:
|
|
||||||
await ws.send_bytes(msg.data)
|
|
||||||
elif msg.type == web.WSMsgType.close:
|
|
||||||
break
|
|
||||||
|
|
||||||
return ws
|
|
||||||
|
|
||||||
|
|
||||||
app = web.Application()
|
|
||||||
app.add_routes([web.get('/', handle),
|
|
||||||
web.get('/echo', wshandle),
|
|
||||||
web.get('/{name}', handle)])
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
web.run_app(app)
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
=============
|
|
||||||
|
|
||||||
https://aiohttp.readthedocs.io/
|
|
||||||
|
|
||||||
|
|
||||||
Demos
|
|
||||||
=====
|
|
||||||
|
|
||||||
https://github.com/aio-libs/aiohttp-demos
|
|
||||||
|
|
||||||
|
|
||||||
External links
|
|
||||||
==============
|
|
||||||
|
|
||||||
* `Third party libraries
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
|
||||||
* `Built with aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
|
||||||
* `Powered by aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
|
||||||
|
|
||||||
Feel free to make a Pull Request for adding your link to these pages!
|
|
||||||
|
|
||||||
|
|
||||||
Communication channels
|
|
||||||
======================
|
|
||||||
|
|
||||||
*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
|
|
||||||
|
|
||||||
*gitter chat* https://gitter.im/aio-libs/Lobby
|
|
||||||
|
|
||||||
We support `Stack Overflow
|
|
||||||
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
|
||||||
Please add *aiohttp* tag to your question there.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
============
|
|
||||||
|
|
||||||
- async-timeout_
|
|
||||||
- attrs_
|
|
||||||
- multidict_
|
|
||||||
- yarl_
|
|
||||||
- frozenlist_
|
|
||||||
|
|
||||||
Optionally you may install the aiodns_ library (highly recommended for sake of speed).
|
|
||||||
|
|
||||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
|
||||||
.. _attrs: https://github.com/python-attrs/attrs
|
|
||||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
|
||||||
.. _frozenlist: https://pypi.org/project/frozenlist/
|
|
||||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
|
||||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
|
||||||
|
|
||||||
License
|
|
||||||
=======
|
|
||||||
|
|
||||||
``aiohttp`` is offered under the Apache 2 license.
|
|
||||||
|
|
||||||
|
|
||||||
Keepsafe
|
|
||||||
========
|
|
||||||
|
|
||||||
The aiohttp community would like to thank Keepsafe
|
|
||||||
(https://www.getkeepsafe.com) for its support in the early days of
|
|
||||||
the project.
|
|
||||||
|
|
||||||
|
|
||||||
Source code
|
|
||||||
===========
|
|
||||||
|
|
||||||
The latest developer version is available in a GitHub repository:
|
|
||||||
https://github.com/aio-libs/aiohttp
|
|
||||||
|
|
||||||
Benchmarks
|
|
||||||
==========
|
|
||||||
|
|
||||||
If you are interested in efficiency, the AsyncIO community maintains a
|
|
||||||
list of benchmarks on the official wiki:
|
|
||||||
https://github.com/python/asyncio/wiki/Benchmarks
|
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
aiohttp-3.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohttp-3.9.0.dist-info/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601
|
|
||||||
aiohttp-3.9.0.dist-info/METADATA,sha256=LL4HwGz8bBOFohDXsw7RdYkMxUq0Ryj3FpjAULqYVmY,7600
|
|
||||||
aiohttp-3.9.0.dist-info/RECORD,,
|
|
||||||
aiohttp-3.9.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
aiohttp-3.9.0.dist-info/WHEEL,sha256=QZRFZfINHXboTe9fmBqbFIViCGEqzQrgdxTyahla7mM,102
|
|
||||||
aiohttp-3.9.0.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
|
||||||
aiohttp/.hash/_cparser.pxd.hash,sha256=dVGMrCmyJM_owqoRLPezK095md0X5R319koTuhUN6DQ,64
|
|
||||||
aiohttp/.hash/_find_header.pxd.hash,sha256=W5qRPWDc55gArGZkriI5tztmQHkrdwR6NdQfRQfTxIg,64
|
|
||||||
aiohttp/.hash/_helpers.pyi.hash,sha256=bAsxbXsjcZ5gbj1c561GYcRtQ5REXxrCihR-HN0XKPk,64
|
|
||||||
aiohttp/.hash/_helpers.pyx.hash,sha256=-DfrN0XUqBhyb8bp2fJQVb1Lo9S1S-psob-7MJBM18c,64
|
|
||||||
aiohttp/.hash/_http_parser.pyx.hash,sha256=8fpadR-Mg6RMlNNLGfIYRh56a6SedGbXY5x9A40ciRs,64
|
|
||||||
aiohttp/.hash/_http_writer.pyx.hash,sha256=z39c0hUcdud-ZCon2d9bWpxrFMVdW1dvjtCgxW4RDnI,64
|
|
||||||
aiohttp/.hash/_websocket.pyx.hash,sha256=90x5ulhWiFtw2wAri2_82Zas5i3iEkJ-flYJK9Xx-SY,64
|
|
||||||
aiohttp/.hash/hdrs.py.hash,sha256=QBHPUkJcp8iPZv3ENUbevgpJzljxoP2qwkBeX3nQ82o,64
|
|
||||||
aiohttp/__init__.py,sha256=b5WSJZnV8-2Kvt6w-9UEiDUqhWrE0iyxzjrcPdPco-A,8022
|
|
||||||
aiohttp/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/abc.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/base_protocol.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/client.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/client_exceptions.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/client_proto.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/client_reqrep.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/client_ws.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/compression_utils.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/connector.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/cookiejar.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/formdata.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/hdrs.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/helpers.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/http.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/http_exceptions.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/http_parser.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/http_websocket.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/http_writer.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/locks.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/log.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/multipart.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/payload.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/payload_streamer.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/pytest_plugin.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/resolver.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/streams.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/tcp_helpers.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/test_utils.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/tracing.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/typedefs.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_app.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_exceptions.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_fileresponse.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_log.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_middlewares.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_protocol.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_request.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_response.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_routedef.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_runner.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_server.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/web_ws.cpython-310.pyc,,
|
|
||||||
aiohttp/__pycache__/worker.cpython-310.pyc,,
|
|
||||||
aiohttp/_cparser.pxd,sha256=W6-cu0SyHhOEPeb475NvxagQ1Jz9pWqyZJvwEqTLNs0,4476
|
|
||||||
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
|
||||||
aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
|
|
||||||
aiohttp/_helpers.cp310-win_amd64.pyd,sha256=-Wtb-eLvZG3iHaCn7Q3iSP_1P5JWR1vXgPRQT6V4pQo,54272
|
|
||||||
aiohttp/_helpers.pyi,sha256=2Hd5IC0Zf4YTEJ412suyyhsh1kVyVDv5g4stgyo2Ksc,208
|
|
||||||
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
|
|
||||||
aiohttp/_http_parser.cp310-win_amd64.pyd,sha256=CiY4zJfltYg2Y83WGSBgXJ2S38qaKs0Laqagd2WTVXY,254464
|
|
||||||
aiohttp/_http_parser.pyx,sha256=OcojgALQ4EHKQ4VnTud53Wd2t7cH1KCDfBESMqwdO7o,28894
|
|
||||||
aiohttp/_http_writer.cp310-win_amd64.pyd,sha256=ih1Ob7aXxphON9irDY6694ZH0iE0U8A78LTxk-y2U_w,49152
|
|
||||||
aiohttp/_http_writer.pyx,sha256=8CBLytO2rx1kdpWe9HYSznhLXdeZWyE-3xI7jaGasag,4738
|
|
||||||
aiohttp/_websocket.cp310-win_amd64.pyd,sha256=wVbqnKqNk4goI6ubSlRbgGLnu1B5QTi5BocLI4AFimY,36352
|
|
||||||
aiohttp/_websocket.pyx,sha256=o9J7yi9c2-jTBjE3dUkXxhDWKvRWJz5GZfyLsgJQa38,1617
|
|
||||||
aiohttp/abc.py,sha256=ldgKHjMqAyuaN9-abjUAjyU6wX8tjBCK9pU_2P4KUkQ,5749
|
|
||||||
aiohttp/base_protocol.py,sha256=u4ITEnXHJ88gNDngHxiU01ZPQhMy_m2eQTJx0cqwvXA,2831
|
|
||||||
aiohttp/client.py,sha256=xq3sM8GMpVyOROT4GJfZmCU7edBQbkq2BVcU2u7cwg8,48274
|
|
||||||
aiohttp/client_exceptions.py,sha256=ZS-W_vhJr29PS1YXhV4AA6X34Vte6KlEs5-CifUVWSQ,9791
|
|
||||||
aiohttp/client_proto.py,sha256=Sthd7znJfMDqsX3ZKg-TI3HCbZzHuv7yQv28WeJsNWw,8888
|
|
||||||
aiohttp/client_reqrep.py,sha256=Pi9PWKGxX_3VNIZVvBz5RMz2Nf4uw6IgXRaPzcfYjJU,40849
|
|
||||||
aiohttp/client_ws.py,sha256=mi8iVYQR25Hi20AQing6T1BZBcO24NsQIKubhsR8izM,11325
|
|
||||||
aiohttp/compression_utils.py,sha256=Y9WQM4MGTqEMlu-Abnhaz40eRtXjZps05jfoezkTtGs,4838
|
|
||||||
aiohttp/connector.py,sha256=Ry3DsH2AKqvsyPCkV3SIJ3dnBm1LWWD4p5su__3BKBw,54185
|
|
||||||
aiohttp/cookiejar.py,sha256=Q1u0mx96qXDo75n5ymrcZUWQr4RaqgBkFkl9tYFEC4s,14352
|
|
||||||
aiohttp/formdata.py,sha256=iySnD63XJwo4l1TT_KZiJGNzzmn0RtvvF2_FnJM32Uo,6278
|
|
||||||
aiohttp/hdrs.py,sha256=_JN4MBE-UoBXGWGoSCKhIviTRc2IXS4fyk5nnuox0Ak,4721
|
|
||||||
aiohttp/helpers.py,sha256=or4ZTfv64_hNgSygxXCWU5sbvsM1YvP6M4S4c-eCLeU,31254
|
|
||||||
aiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914
|
|
||||||
aiohttp/http_exceptions.py,sha256=GJYn38j4sI4KdUh993VnZlbgVHOUNI_Z9-ASDTjl5aU,2822
|
|
||||||
aiohttp/http_parser.py,sha256=izUsiFn1xLsEW-xZnbvrb3dEu6aFt-XquJ53wTqhX3E,36057
|
|
||||||
aiohttp/http_websocket.py,sha256=eJb1OTXfYu-V2xg4tRC4t2o-P_7LqfRddr-m6T1DQtA,27089
|
|
||||||
aiohttp/http_writer.py,sha256=p8H39HhtilQEE90njvtJHc94Am95zjHNoS8T1JcNXJc,6131
|
|
||||||
aiohttp/locks.py,sha256=vp1Z4zx0SvooSffw88dkZ-7qpk2CqRf5vWh2dpKagTA,1177
|
|
||||||
aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
|
|
||||||
aiohttp/multipart.py,sha256=wLIcJk6q6U7mCcia4hlqKHRbUMFaPzaiXkKAKWeXJvg,33461
|
|
||||||
aiohttp/payload.py,sha256=w2kRU8QVQegEoWkC-XFQRenlMUfj2twnlgCCYfhi8WM,14045
|
|
||||||
aiohttp/payload_streamer.py,sha256=rBb3jAFcwAK1QOgbhya2y4zGjhT11oQrepdcffA1_jM,2162
|
|
||||||
aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
|
|
||||||
aiohttp/pytest_plugin.py,sha256=fJxoTu3NI1wDjFIV1FDmx0oGdRU7r6baovTwJneR6J4,11986
|
|
||||||
aiohttp/resolver.py,sha256=k5cVNWiiCHqKDGko7UZNu2y-j6SrU7vQBx-omwHPhso,5230
|
|
||||||
aiohttp/streams.py,sha256=Yy5Sj8ewDvbQTKe2BGnUGmCcj0bbl0EeKBT3uMK4K6M,21502
|
|
||||||
aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998
|
|
||||||
aiohttp/test_utils.py,sha256=5lve_wZyTUljwNzdYx72UVr7o4n_rYry5Er2uGvJwPA,20880
|
|
||||||
aiohttp/tracing.py,sha256=6WifsvgLXEHksxaDnoP3EgWk5ttrTSyGsn4cq6FPO_M,15623
|
|
||||||
aiohttp/typedefs.py,sha256=WEEyATjc7mAxTnXeai8ihaAJeI_dASs_n0KSqwkGdGo,1545
|
|
||||||
aiohttp/web.py,sha256=8rghTkpERz14vRQA4oyXfGZLWMIBMrrHXxcvtZZ_fAU,19879
|
|
||||||
aiohttp/web_app.py,sha256=43RQib4RKrGyqjV04JmtbZZKo9sXME_0C3E70Y6yhOk,18947
|
|
||||||
aiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812
|
|
||||||
aiohttp/web_fileresponse.py,sha256=PBtymygkisslfA_vZ8QxkR062trsUc_hGgOuV_L2ByQ,10990
|
|
||||||
aiohttp/web_log.py,sha256=w81HIudhfSxfodo2Fjkok7jWT56XXIrVMJN6ihYnLo0,8014
|
|
||||||
aiohttp/web_middlewares.py,sha256=PDLC_h2xS5oY_0O5Nlwa1q71RfhTYM_-i_PmBR4MCUg,4168
|
|
||||||
aiohttp/web_protocol.py,sha256=-1pSEgI7vsb-B9Go39XssTRcZvIKv2gBHYpH7HqHwcQ,23762
|
|
||||||
aiohttp/web_request.py,sha256=HM9YrGPx79LIw478yW77Sq_lcIwaA7CiSA7Bcc0TOlI,29674
|
|
||||||
aiohttp/web_response.py,sha256=e8qfIRCx69d41b18vvj9dh4ZfMzaR9dLAWtyl1HhLDg,28566
|
|
||||||
aiohttp/web_routedef.py,sha256=KrQtG7OzlE1ylPZf54yDJjBLl36nGvYl8Z9mA6SWayk,6368
|
|
||||||
aiohttp/web_runner.py,sha256=PUmmc3deHj1Iae2PYlPvSedD2dLTXgbumRjRl8QGLOc,12142
|
|
||||||
aiohttp/web_server.py,sha256=kOlImrScEbvkGHG7i-N-7eqf55f2zC_J2BZcJanGGmU,2664
|
|
||||||
aiohttp/web_urldispatcher.py,sha256=tFZ7O1wuMGw9-besWqDrPM42N4RYHptsaxjQMpbK-kQ,40779
|
|
||||||
aiohttp/web_ws.py,sha256=CWrWdRN5pH55en22Gt0HZ0CleO8PBfP8ucI5-dv9Oe4,18554
|
|
||||||
aiohttp/worker.py,sha256=vDMxlk-Mo3rzN4yubw2-c8T6yg7PRY8Mv0NLuRm8lWw,8212
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.41.3)
|
|
||||||
Root-Is-Purelib: false
|
|
||||||
Tag: cp310-cp310-win_amd64
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
aiohttp
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
5baf9cbb44b21e13843de6f8ef936fc5a810d49cfda56ab2649bf012a4cb36cd
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
39ca238002d0e041ca4385674ee779dd6776b7b707d4a0837c111232ac1d3bba
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
f0204bcad3b6af1d6476959ef47612ce784b5dd7995b213edf123b8da19ab1a8
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
fc937830113e5280571961a84822a122f89345cd885d2e1fca4e679eea31d009
|
|
||||||
@@ -1,240 +0,0 @@
|
|||||||
__version__ = "3.9.0"
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Tuple
|
|
||||||
|
|
||||||
from . import hdrs as hdrs
|
|
||||||
from .client import (
|
|
||||||
BaseConnector as BaseConnector,
|
|
||||||
ClientConnectionError as ClientConnectionError,
|
|
||||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
|
||||||
ClientConnectorError as ClientConnectorError,
|
|
||||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
|
||||||
ClientError as ClientError,
|
|
||||||
ClientHttpProxyError as ClientHttpProxyError,
|
|
||||||
ClientOSError as ClientOSError,
|
|
||||||
ClientPayloadError as ClientPayloadError,
|
|
||||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
|
||||||
ClientRequest as ClientRequest,
|
|
||||||
ClientResponse as ClientResponse,
|
|
||||||
ClientResponseError as ClientResponseError,
|
|
||||||
ClientSession as ClientSession,
|
|
||||||
ClientSSLError as ClientSSLError,
|
|
||||||
ClientTimeout as ClientTimeout,
|
|
||||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
|
||||||
ContentTypeError as ContentTypeError,
|
|
||||||
Fingerprint as Fingerprint,
|
|
||||||
InvalidURL as InvalidURL,
|
|
||||||
NamedPipeConnector as NamedPipeConnector,
|
|
||||||
RequestInfo as RequestInfo,
|
|
||||||
ServerConnectionError as ServerConnectionError,
|
|
||||||
ServerDisconnectedError as ServerDisconnectedError,
|
|
||||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
|
||||||
ServerTimeoutError as ServerTimeoutError,
|
|
||||||
TCPConnector as TCPConnector,
|
|
||||||
TooManyRedirects as TooManyRedirects,
|
|
||||||
UnixConnector as UnixConnector,
|
|
||||||
WSServerHandshakeError as WSServerHandshakeError,
|
|
||||||
request as request,
|
|
||||||
)
|
|
||||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
|
||||||
from .formdata import FormData as FormData
|
|
||||||
from .helpers import BasicAuth, ChainMapProxy, ETag
|
|
||||||
from .http import (
|
|
||||||
HttpVersion as HttpVersion,
|
|
||||||
HttpVersion10 as HttpVersion10,
|
|
||||||
HttpVersion11 as HttpVersion11,
|
|
||||||
WebSocketError as WebSocketError,
|
|
||||||
WSCloseCode as WSCloseCode,
|
|
||||||
WSMessage as WSMessage,
|
|
||||||
WSMsgType as WSMsgType,
|
|
||||||
)
|
|
||||||
from .multipart import (
|
|
||||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
|
||||||
BadContentDispositionParam as BadContentDispositionParam,
|
|
||||||
BodyPartReader as BodyPartReader,
|
|
||||||
MultipartReader as MultipartReader,
|
|
||||||
MultipartWriter as MultipartWriter,
|
|
||||||
content_disposition_filename as content_disposition_filename,
|
|
||||||
parse_content_disposition as parse_content_disposition,
|
|
||||||
)
|
|
||||||
from .payload import (
|
|
||||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
|
||||||
AsyncIterablePayload as AsyncIterablePayload,
|
|
||||||
BufferedReaderPayload as BufferedReaderPayload,
|
|
||||||
BytesIOPayload as BytesIOPayload,
|
|
||||||
BytesPayload as BytesPayload,
|
|
||||||
IOBasePayload as IOBasePayload,
|
|
||||||
JsonPayload as JsonPayload,
|
|
||||||
Payload as Payload,
|
|
||||||
StringIOPayload as StringIOPayload,
|
|
||||||
StringPayload as StringPayload,
|
|
||||||
TextIOPayload as TextIOPayload,
|
|
||||||
get_payload as get_payload,
|
|
||||||
payload_type as payload_type,
|
|
||||||
)
|
|
||||||
from .payload_streamer import streamer as streamer
|
|
||||||
from .resolver import (
|
|
||||||
AsyncResolver as AsyncResolver,
|
|
||||||
DefaultResolver as DefaultResolver,
|
|
||||||
ThreadedResolver as ThreadedResolver,
|
|
||||||
)
|
|
||||||
from .streams import (
|
|
||||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
|
||||||
DataQueue as DataQueue,
|
|
||||||
EofStream as EofStream,
|
|
||||||
FlowControlDataQueue as FlowControlDataQueue,
|
|
||||||
StreamReader as StreamReader,
|
|
||||||
)
|
|
||||||
from .tracing import (
|
|
||||||
TraceConfig as TraceConfig,
|
|
||||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
|
||||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
|
||||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
|
||||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
|
||||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
|
||||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
|
||||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
|
||||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
|
||||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
|
||||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
|
||||||
TraceRequestEndParams as TraceRequestEndParams,
|
|
||||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
|
||||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
|
||||||
TraceRequestStartParams as TraceRequestStartParams,
|
|
||||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
# At runtime these are lazy-loaded at the bottom of the file.
|
|
||||||
from .worker import (
|
|
||||||
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
|
||||||
GunicornWebWorker as GunicornWebWorker,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__: Tuple[str, ...] = (
|
|
||||||
"hdrs",
|
|
||||||
# client
|
|
||||||
"BaseConnector",
|
|
||||||
"ClientConnectionError",
|
|
||||||
"ClientConnectorCertificateError",
|
|
||||||
"ClientConnectorError",
|
|
||||||
"ClientConnectorSSLError",
|
|
||||||
"ClientError",
|
|
||||||
"ClientHttpProxyError",
|
|
||||||
"ClientOSError",
|
|
||||||
"ClientPayloadError",
|
|
||||||
"ClientProxyConnectionError",
|
|
||||||
"ClientResponse",
|
|
||||||
"ClientRequest",
|
|
||||||
"ClientResponseError",
|
|
||||||
"ClientSSLError",
|
|
||||||
"ClientSession",
|
|
||||||
"ClientTimeout",
|
|
||||||
"ClientWebSocketResponse",
|
|
||||||
"ContentTypeError",
|
|
||||||
"Fingerprint",
|
|
||||||
"InvalidURL",
|
|
||||||
"RequestInfo",
|
|
||||||
"ServerConnectionError",
|
|
||||||
"ServerDisconnectedError",
|
|
||||||
"ServerFingerprintMismatch",
|
|
||||||
"ServerTimeoutError",
|
|
||||||
"TCPConnector",
|
|
||||||
"TooManyRedirects",
|
|
||||||
"UnixConnector",
|
|
||||||
"NamedPipeConnector",
|
|
||||||
"WSServerHandshakeError",
|
|
||||||
"request",
|
|
||||||
# cookiejar
|
|
||||||
"CookieJar",
|
|
||||||
"DummyCookieJar",
|
|
||||||
# formdata
|
|
||||||
"FormData",
|
|
||||||
# helpers
|
|
||||||
"BasicAuth",
|
|
||||||
"ChainMapProxy",
|
|
||||||
"ETag",
|
|
||||||
# http
|
|
||||||
"HttpVersion",
|
|
||||||
"HttpVersion10",
|
|
||||||
"HttpVersion11",
|
|
||||||
"WSMsgType",
|
|
||||||
"WSCloseCode",
|
|
||||||
"WSMessage",
|
|
||||||
"WebSocketError",
|
|
||||||
# multipart
|
|
||||||
"BadContentDispositionHeader",
|
|
||||||
"BadContentDispositionParam",
|
|
||||||
"BodyPartReader",
|
|
||||||
"MultipartReader",
|
|
||||||
"MultipartWriter",
|
|
||||||
"content_disposition_filename",
|
|
||||||
"parse_content_disposition",
|
|
||||||
# payload
|
|
||||||
"AsyncIterablePayload",
|
|
||||||
"BufferedReaderPayload",
|
|
||||||
"BytesIOPayload",
|
|
||||||
"BytesPayload",
|
|
||||||
"IOBasePayload",
|
|
||||||
"JsonPayload",
|
|
||||||
"PAYLOAD_REGISTRY",
|
|
||||||
"Payload",
|
|
||||||
"StringIOPayload",
|
|
||||||
"StringPayload",
|
|
||||||
"TextIOPayload",
|
|
||||||
"get_payload",
|
|
||||||
"payload_type",
|
|
||||||
# payload_streamer
|
|
||||||
"streamer",
|
|
||||||
# resolver
|
|
||||||
"AsyncResolver",
|
|
||||||
"DefaultResolver",
|
|
||||||
"ThreadedResolver",
|
|
||||||
# streams
|
|
||||||
"DataQueue",
|
|
||||||
"EMPTY_PAYLOAD",
|
|
||||||
"EofStream",
|
|
||||||
"FlowControlDataQueue",
|
|
||||||
"StreamReader",
|
|
||||||
# tracing
|
|
||||||
"TraceConfig",
|
|
||||||
"TraceConnectionCreateEndParams",
|
|
||||||
"TraceConnectionCreateStartParams",
|
|
||||||
"TraceConnectionQueuedEndParams",
|
|
||||||
"TraceConnectionQueuedStartParams",
|
|
||||||
"TraceConnectionReuseconnParams",
|
|
||||||
"TraceDnsCacheHitParams",
|
|
||||||
"TraceDnsCacheMissParams",
|
|
||||||
"TraceDnsResolveHostEndParams",
|
|
||||||
"TraceDnsResolveHostStartParams",
|
|
||||||
"TraceRequestChunkSentParams",
|
|
||||||
"TraceRequestEndParams",
|
|
||||||
"TraceRequestExceptionParams",
|
|
||||||
"TraceRequestRedirectParams",
|
|
||||||
"TraceRequestStartParams",
|
|
||||||
"TraceResponseChunkReceivedParams",
|
|
||||||
# workers (imported lazily with __getattr__)
|
|
||||||
"GunicornUVLoopWebWorker",
|
|
||||||
"GunicornWebWorker",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__() -> Tuple[str, ...]:
|
|
||||||
return __all__ + ("__author__", "__doc__")
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name: str) -> object:
|
|
||||||
global GunicornUVLoopWebWorker, GunicornWebWorker
|
|
||||||
|
|
||||||
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
|
||||||
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
|
||||||
try:
|
|
||||||
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
|
||||||
GunicornWebWorker = gw # type: ignore[misc]
|
|
||||||
return guv if name == "GunicornUVLoopWebWorker" else gw
|
|
||||||
|
|
||||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
|
||||||
|
|
||||||
struct llhttp__internal_s:
|
|
||||||
int32_t _index
|
|
||||||
void* _span_pos0
|
|
||||||
void* _span_cb0
|
|
||||||
int32_t error
|
|
||||||
const char* reason
|
|
||||||
const char* error_pos
|
|
||||||
void* data
|
|
||||||
void* _current
|
|
||||||
uint64_t content_length
|
|
||||||
uint8_t type
|
|
||||||
uint8_t method
|
|
||||||
uint8_t http_major
|
|
||||||
uint8_t http_minor
|
|
||||||
uint8_t header_state
|
|
||||||
uint8_t lenient_flags
|
|
||||||
uint8_t upgrade
|
|
||||||
uint8_t finish
|
|
||||||
uint16_t flags
|
|
||||||
uint16_t status_code
|
|
||||||
void* settings
|
|
||||||
|
|
||||||
ctypedef llhttp__internal_s llhttp__internal_t
|
|
||||||
ctypedef llhttp__internal_t llhttp_t
|
|
||||||
|
|
||||||
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
|
||||||
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
|
||||||
|
|
||||||
struct llhttp_settings_s:
|
|
||||||
llhttp_cb on_message_begin
|
|
||||||
llhttp_data_cb on_url
|
|
||||||
llhttp_data_cb on_status
|
|
||||||
llhttp_data_cb on_header_field
|
|
||||||
llhttp_data_cb on_header_value
|
|
||||||
llhttp_cb on_headers_complete
|
|
||||||
llhttp_data_cb on_body
|
|
||||||
llhttp_cb on_message_complete
|
|
||||||
llhttp_cb on_chunk_header
|
|
||||||
llhttp_cb on_chunk_complete
|
|
||||||
|
|
||||||
llhttp_cb on_url_complete
|
|
||||||
llhttp_cb on_status_complete
|
|
||||||
llhttp_cb on_header_field_complete
|
|
||||||
llhttp_cb on_header_value_complete
|
|
||||||
|
|
||||||
ctypedef llhttp_settings_s llhttp_settings_t
|
|
||||||
|
|
||||||
enum llhttp_errno:
|
|
||||||
HPE_OK,
|
|
||||||
HPE_INTERNAL,
|
|
||||||
HPE_STRICT,
|
|
||||||
HPE_LF_EXPECTED,
|
|
||||||
HPE_UNEXPECTED_CONTENT_LENGTH,
|
|
||||||
HPE_CLOSED_CONNECTION,
|
|
||||||
HPE_INVALID_METHOD,
|
|
||||||
HPE_INVALID_URL,
|
|
||||||
HPE_INVALID_CONSTANT,
|
|
||||||
HPE_INVALID_VERSION,
|
|
||||||
HPE_INVALID_HEADER_TOKEN,
|
|
||||||
HPE_INVALID_CONTENT_LENGTH,
|
|
||||||
HPE_INVALID_CHUNK_SIZE,
|
|
||||||
HPE_INVALID_STATUS,
|
|
||||||
HPE_INVALID_EOF_STATE,
|
|
||||||
HPE_INVALID_TRANSFER_ENCODING,
|
|
||||||
HPE_CB_MESSAGE_BEGIN,
|
|
||||||
HPE_CB_HEADERS_COMPLETE,
|
|
||||||
HPE_CB_MESSAGE_COMPLETE,
|
|
||||||
HPE_CB_CHUNK_HEADER,
|
|
||||||
HPE_CB_CHUNK_COMPLETE,
|
|
||||||
HPE_PAUSED,
|
|
||||||
HPE_PAUSED_UPGRADE,
|
|
||||||
HPE_USER
|
|
||||||
|
|
||||||
ctypedef llhttp_errno llhttp_errno_t
|
|
||||||
|
|
||||||
enum llhttp_flags:
|
|
||||||
F_CHUNKED,
|
|
||||||
F_CONTENT_LENGTH
|
|
||||||
|
|
||||||
enum llhttp_type:
|
|
||||||
HTTP_REQUEST,
|
|
||||||
HTTP_RESPONSE,
|
|
||||||
HTTP_BOTH
|
|
||||||
|
|
||||||
enum llhttp_method:
|
|
||||||
HTTP_DELETE,
|
|
||||||
HTTP_GET,
|
|
||||||
HTTP_HEAD,
|
|
||||||
HTTP_POST,
|
|
||||||
HTTP_PUT,
|
|
||||||
HTTP_CONNECT,
|
|
||||||
HTTP_OPTIONS,
|
|
||||||
HTTP_TRACE,
|
|
||||||
HTTP_COPY,
|
|
||||||
HTTP_LOCK,
|
|
||||||
HTTP_MKCOL,
|
|
||||||
HTTP_MOVE,
|
|
||||||
HTTP_PROPFIND,
|
|
||||||
HTTP_PROPPATCH,
|
|
||||||
HTTP_SEARCH,
|
|
||||||
HTTP_UNLOCK,
|
|
||||||
HTTP_BIND,
|
|
||||||
HTTP_REBIND,
|
|
||||||
HTTP_UNBIND,
|
|
||||||
HTTP_ACL,
|
|
||||||
HTTP_REPORT,
|
|
||||||
HTTP_MKACTIVITY,
|
|
||||||
HTTP_CHECKOUT,
|
|
||||||
HTTP_MERGE,
|
|
||||||
HTTP_MSEARCH,
|
|
||||||
HTTP_NOTIFY,
|
|
||||||
HTTP_SUBSCRIBE,
|
|
||||||
HTTP_UNSUBSCRIBE,
|
|
||||||
HTTP_PATCH,
|
|
||||||
HTTP_PURGE,
|
|
||||||
HTTP_MKCALENDAR,
|
|
||||||
HTTP_LINK,
|
|
||||||
HTTP_UNLINK,
|
|
||||||
HTTP_SOURCE,
|
|
||||||
HTTP_PRI,
|
|
||||||
HTTP_DESCRIBE,
|
|
||||||
HTTP_ANNOUNCE,
|
|
||||||
HTTP_SETUP,
|
|
||||||
HTTP_PLAY,
|
|
||||||
HTTP_PAUSE,
|
|
||||||
HTTP_TEARDOWN,
|
|
||||||
HTTP_GET_PARAMETER,
|
|
||||||
HTTP_SET_PARAMETER,
|
|
||||||
HTTP_REDIRECT,
|
|
||||||
HTTP_RECORD,
|
|
||||||
HTTP_FLUSH
|
|
||||||
|
|
||||||
ctypedef llhttp_method llhttp_method_t;
|
|
||||||
|
|
||||||
void llhttp_settings_init(llhttp_settings_t* settings)
|
|
||||||
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
|
||||||
const llhttp_settings_t* settings)
|
|
||||||
|
|
||||||
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
|
||||||
|
|
||||||
int llhttp_should_keep_alive(const llhttp_t* parser)
|
|
||||||
|
|
||||||
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
|
||||||
|
|
||||||
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
|
||||||
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
|
||||||
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
|
||||||
|
|
||||||
const char* llhttp_method_name(llhttp_method_t method)
|
|
||||||
|
|
||||||
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
|
||||||
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
|
||||||
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
cdef extern from "_find_header.h":
|
|
||||||
int find_header(char *, int)
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
# The file is autogenerated from aiohttp/hdrs.py
|
|
||||||
# Run ./tools/gen.py to update it after the origin changing.
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
cdef tuple headers = (
|
|
||||||
hdrs.ACCEPT,
|
|
||||||
hdrs.ACCEPT_CHARSET,
|
|
||||||
hdrs.ACCEPT_ENCODING,
|
|
||||||
hdrs.ACCEPT_LANGUAGE,
|
|
||||||
hdrs.ACCEPT_RANGES,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
|
||||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
|
||||||
hdrs.AGE,
|
|
||||||
hdrs.ALLOW,
|
|
||||||
hdrs.AUTHORIZATION,
|
|
||||||
hdrs.CACHE_CONTROL,
|
|
||||||
hdrs.CONNECTION,
|
|
||||||
hdrs.CONTENT_DISPOSITION,
|
|
||||||
hdrs.CONTENT_ENCODING,
|
|
||||||
hdrs.CONTENT_LANGUAGE,
|
|
||||||
hdrs.CONTENT_LENGTH,
|
|
||||||
hdrs.CONTENT_LOCATION,
|
|
||||||
hdrs.CONTENT_MD5,
|
|
||||||
hdrs.CONTENT_RANGE,
|
|
||||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
|
||||||
hdrs.CONTENT_TYPE,
|
|
||||||
hdrs.COOKIE,
|
|
||||||
hdrs.DATE,
|
|
||||||
hdrs.DESTINATION,
|
|
||||||
hdrs.DIGEST,
|
|
||||||
hdrs.ETAG,
|
|
||||||
hdrs.EXPECT,
|
|
||||||
hdrs.EXPIRES,
|
|
||||||
hdrs.FORWARDED,
|
|
||||||
hdrs.FROM,
|
|
||||||
hdrs.HOST,
|
|
||||||
hdrs.IF_MATCH,
|
|
||||||
hdrs.IF_MODIFIED_SINCE,
|
|
||||||
hdrs.IF_NONE_MATCH,
|
|
||||||
hdrs.IF_RANGE,
|
|
||||||
hdrs.IF_UNMODIFIED_SINCE,
|
|
||||||
hdrs.KEEP_ALIVE,
|
|
||||||
hdrs.LAST_EVENT_ID,
|
|
||||||
hdrs.LAST_MODIFIED,
|
|
||||||
hdrs.LINK,
|
|
||||||
hdrs.LOCATION,
|
|
||||||
hdrs.MAX_FORWARDS,
|
|
||||||
hdrs.ORIGIN,
|
|
||||||
hdrs.PRAGMA,
|
|
||||||
hdrs.PROXY_AUTHENTICATE,
|
|
||||||
hdrs.PROXY_AUTHORIZATION,
|
|
||||||
hdrs.RANGE,
|
|
||||||
hdrs.REFERER,
|
|
||||||
hdrs.RETRY_AFTER,
|
|
||||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
|
||||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY1,
|
|
||||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
|
||||||
hdrs.SEC_WEBSOCKET_VERSION,
|
|
||||||
hdrs.SERVER,
|
|
||||||
hdrs.SET_COOKIE,
|
|
||||||
hdrs.TE,
|
|
||||||
hdrs.TRAILER,
|
|
||||||
hdrs.TRANSFER_ENCODING,
|
|
||||||
hdrs.URI,
|
|
||||||
hdrs.UPGRADE,
|
|
||||||
hdrs.USER_AGENT,
|
|
||||||
hdrs.VARY,
|
|
||||||
hdrs.VIA,
|
|
||||||
hdrs.WWW_AUTHENTICATE,
|
|
||||||
hdrs.WANT_DIGEST,
|
|
||||||
hdrs.WARNING,
|
|
||||||
hdrs.X_FORWARDED_FOR,
|
|
||||||
hdrs.X_FORWARDED_HOST,
|
|
||||||
hdrs.X_FORWARDED_PROTO,
|
|
||||||
)
|
|
||||||
Binary file not shown.
@@ -1,6 +0,0 @@
|
|||||||
from typing import Any
|
|
||||||
|
|
||||||
class reify:
|
|
||||||
def __init__(self, wrapped: Any) -> None: ...
|
|
||||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
|
||||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
cdef class reify:
|
|
||||||
"""Use as a class method decorator. It operates almost exactly like
|
|
||||||
the Python `@property` decorator, but it puts the result of the
|
|
||||||
method it decorates into the instance dict after the first call,
|
|
||||||
effectively replacing the function it decorates with an instance
|
|
||||||
variable. It is, in Python parlance, a data descriptor.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
cdef object wrapped
|
|
||||||
cdef object name
|
|
||||||
|
|
||||||
def __init__(self, wrapped):
|
|
||||||
self.wrapped = wrapped
|
|
||||||
self.name = wrapped.__name__
|
|
||||||
|
|
||||||
@property
|
|
||||||
def __doc__(self):
|
|
||||||
return self.wrapped.__doc__
|
|
||||||
|
|
||||||
def __get__(self, inst, owner):
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
return inst._cache[self.name]
|
|
||||||
except KeyError:
|
|
||||||
val = self.wrapped(inst)
|
|
||||||
inst._cache[self.name] = val
|
|
||||||
return val
|
|
||||||
except AttributeError:
|
|
||||||
if inst is None:
|
|
||||||
return self
|
|
||||||
raise
|
|
||||||
|
|
||||||
def __set__(self, inst, value):
|
|
||||||
raise AttributeError("reified property is read-only")
|
|
||||||
Binary file not shown.
@@ -1,836 +0,0 @@
|
|||||||
#cython: language_level=3
|
|
||||||
#
|
|
||||||
# Based on https://github.com/MagicStack/httptools
|
|
||||||
#
|
|
||||||
|
|
||||||
from cpython cimport (
|
|
||||||
Py_buffer,
|
|
||||||
PyBUF_SIMPLE,
|
|
||||||
PyBuffer_Release,
|
|
||||||
PyBytes_AsString,
|
|
||||||
PyBytes_AsStringAndSize,
|
|
||||||
PyObject_GetBuffer,
|
|
||||||
)
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
|
||||||
from libc.limits cimport ULLONG_MAX
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
|
||||||
from yarl import URL as _URL
|
|
||||||
|
|
||||||
from aiohttp import hdrs
|
|
||||||
from aiohttp.helpers import DEBUG
|
|
||||||
|
|
||||||
from .http_exceptions import (
|
|
||||||
BadHttpMessage,
|
|
||||||
BadStatusLine,
|
|
||||||
ContentLengthError,
|
|
||||||
InvalidHeader,
|
|
||||||
InvalidURLError,
|
|
||||||
LineTooLong,
|
|
||||||
PayloadEncodingError,
|
|
||||||
TransferEncodingError,
|
|
||||||
)
|
|
||||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
|
||||||
from .http_writer import (
|
|
||||||
HttpVersion as _HttpVersion,
|
|
||||||
HttpVersion10 as _HttpVersion10,
|
|
||||||
HttpVersion11 as _HttpVersion11,
|
|
||||||
)
|
|
||||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
|
||||||
|
|
||||||
cimport cython
|
|
||||||
|
|
||||||
from aiohttp cimport _cparser as cparser
|
|
||||||
|
|
||||||
include "_headers.pxi"
|
|
||||||
|
|
||||||
from aiohttp cimport _find_header
|
|
||||||
|
|
||||||
DEF DEFAULT_FREELIST_SIZE = 250
|
|
||||||
|
|
||||||
cdef extern from "Python.h":
|
|
||||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
|
||||||
Py_ssize_t PyByteArray_Size(object) except -1
|
|
||||||
char* PyByteArray_AsString(object)
|
|
||||||
|
|
||||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
|
||||||
'RawRequestMessage', 'RawResponseMessage')
|
|
||||||
|
|
||||||
cdef object URL = _URL
|
|
||||||
cdef object URL_build = URL.build
|
|
||||||
cdef object CIMultiDict = _CIMultiDict
|
|
||||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
|
||||||
cdef object HttpVersion = _HttpVersion
|
|
||||||
cdef object HttpVersion10 = _HttpVersion10
|
|
||||||
cdef object HttpVersion11 = _HttpVersion11
|
|
||||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
|
||||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
|
||||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
|
||||||
cdef object StreamReader = _StreamReader
|
|
||||||
cdef object DeflateBuffer = _DeflateBuffer
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline object extend(object buf, const char* at, size_t length):
|
|
||||||
cdef Py_ssize_t s
|
|
||||||
cdef char* ptr
|
|
||||||
s = PyByteArray_Size(buf)
|
|
||||||
PyByteArray_Resize(buf, s + length)
|
|
||||||
ptr = PyByteArray_AsString(buf)
|
|
||||||
memcpy(ptr + s, at, length)
|
|
||||||
|
|
||||||
|
|
||||||
DEF METHODS_COUNT = 46;
|
|
||||||
|
|
||||||
cdef list _http_method = []
|
|
||||||
|
|
||||||
for i in range(METHODS_COUNT):
|
|
||||||
_http_method.append(
|
|
||||||
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline str http_method_str(int i):
|
|
||||||
if i < METHODS_COUNT:
|
|
||||||
return <str>_http_method[i]
|
|
||||||
else:
|
|
||||||
return "<unknown>"
|
|
||||||
|
|
||||||
cdef inline object find_header(bytes raw_header):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
cdef int idx
|
|
||||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
|
||||||
idx = _find_header.find_header(buf, size)
|
|
||||||
if idx == -1:
|
|
||||||
return raw_header.decode('utf-8', 'surrogateescape')
|
|
||||||
return headers[idx]
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawRequestMessage:
|
|
||||||
cdef readonly str method
|
|
||||||
cdef readonly str path
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
cdef readonly object url # yarl.URL
|
|
||||||
|
|
||||||
def __init__(self, method, path, version, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked, url):
|
|
||||||
self.method = method
|
|
||||||
self.path = path
|
|
||||||
self.version = version
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("method", self.method))
|
|
||||||
info.append(("path", self.path))
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
info.append(("url", self.url))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawRequestMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
def _replace(self, **dct):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = _new_request_message(self.method,
|
|
||||||
self.path,
|
|
||||||
self.version,
|
|
||||||
self.headers,
|
|
||||||
self.raw_headers,
|
|
||||||
self.should_close,
|
|
||||||
self.compression,
|
|
||||||
self.upgrade,
|
|
||||||
self.chunked,
|
|
||||||
self.url)
|
|
||||||
if "method" in dct:
|
|
||||||
ret.method = dct["method"]
|
|
||||||
if "path" in dct:
|
|
||||||
ret.path = dct["path"]
|
|
||||||
if "version" in dct:
|
|
||||||
ret.version = dct["version"]
|
|
||||||
if "headers" in dct:
|
|
||||||
ret.headers = dct["headers"]
|
|
||||||
if "raw_headers" in dct:
|
|
||||||
ret.raw_headers = dct["raw_headers"]
|
|
||||||
if "should_close" in dct:
|
|
||||||
ret.should_close = dct["should_close"]
|
|
||||||
if "compression" in dct:
|
|
||||||
ret.compression = dct["compression"]
|
|
||||||
if "upgrade" in dct:
|
|
||||||
ret.upgrade = dct["upgrade"]
|
|
||||||
if "chunked" in dct:
|
|
||||||
ret.chunked = dct["chunked"]
|
|
||||||
if "url" in dct:
|
|
||||||
ret.url = dct["url"]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
cdef _new_request_message(str method,
|
|
||||||
str path,
|
|
||||||
object version,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked,
|
|
||||||
object url):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
|
||||||
ret.method = method
|
|
||||||
ret.path = path
|
|
||||||
ret.version = version
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
ret.url = url
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawResponseMessage:
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly int code
|
|
||||||
cdef readonly str reason
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
|
|
||||||
def __init__(self, version, code, reason, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked):
|
|
||||||
self.version = version
|
|
||||||
self.code = code
|
|
||||||
self.reason = reason
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("code", self.code))
|
|
||||||
info.append(("reason", self.reason))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawResponseMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
|
|
||||||
cdef _new_response_message(object version,
|
|
||||||
int code,
|
|
||||||
str reason,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked):
|
|
||||||
cdef RawResponseMessage ret
|
|
||||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
|
||||||
ret.version = version
|
|
||||||
ret.code = code
|
|
||||||
ret.reason = reason
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.internal
|
|
||||||
cdef class HttpParser:
|
|
||||||
|
|
||||||
cdef:
|
|
||||||
cparser.llhttp_t* _cparser
|
|
||||||
cparser.llhttp_settings_t* _csettings
|
|
||||||
|
|
||||||
bytearray _raw_name
|
|
||||||
bytearray _raw_value
|
|
||||||
bint _has_value
|
|
||||||
|
|
||||||
object _protocol
|
|
||||||
object _loop
|
|
||||||
object _timer
|
|
||||||
|
|
||||||
size_t _max_line_size
|
|
||||||
size_t _max_field_size
|
|
||||||
size_t _max_headers
|
|
||||||
bint _response_with_body
|
|
||||||
bint _read_until_eof
|
|
||||||
|
|
||||||
bint _started
|
|
||||||
object _url
|
|
||||||
bytearray _buf
|
|
||||||
str _path
|
|
||||||
str _reason
|
|
||||||
object _headers
|
|
||||||
list _raw_headers
|
|
||||||
bint _upgraded
|
|
||||||
list _messages
|
|
||||||
object _payload
|
|
||||||
bint _payload_error
|
|
||||||
object _payload_exception
|
|
||||||
object _last_error
|
|
||||||
bint _auto_decompress
|
|
||||||
int _limit
|
|
||||||
|
|
||||||
str _content_encoding
|
|
||||||
|
|
||||||
Py_buffer py_buf
|
|
||||||
|
|
||||||
def __cinit__(self):
|
|
||||||
self._cparser = <cparser.llhttp_t*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
|
||||||
if self._cparser is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
self._csettings = <cparser.llhttp_settings_t*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
|
||||||
if self._csettings is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
def __dealloc__(self):
|
|
||||||
PyMem_Free(self._cparser)
|
|
||||||
PyMem_Free(self._csettings)
|
|
||||||
|
|
||||||
cdef _init(
|
|
||||||
self, cparser.llhttp_type mode,
|
|
||||||
object protocol, object loop, int limit,
|
|
||||||
object timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True,
|
|
||||||
):
|
|
||||||
cparser.llhttp_settings_init(self._csettings)
|
|
||||||
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
|
||||||
self._cparser.data = <void*>self
|
|
||||||
self._cparser.content_length = 0
|
|
||||||
|
|
||||||
self._protocol = protocol
|
|
||||||
self._loop = loop
|
|
||||||
self._timer = timer
|
|
||||||
|
|
||||||
self._buf = bytearray()
|
|
||||||
self._payload = None
|
|
||||||
self._payload_error = 0
|
|
||||||
self._payload_exception = payload_exception
|
|
||||||
self._messages = []
|
|
||||||
|
|
||||||
self._raw_name = bytearray()
|
|
||||||
self._raw_value = bytearray()
|
|
||||||
self._has_value = False
|
|
||||||
|
|
||||||
self._max_line_size = max_line_size
|
|
||||||
self._max_headers = max_headers
|
|
||||||
self._max_field_size = max_field_size
|
|
||||||
self._response_with_body = response_with_body
|
|
||||||
self._read_until_eof = read_until_eof
|
|
||||||
self._upgraded = False
|
|
||||||
self._auto_decompress = auto_decompress
|
|
||||||
self._content_encoding = None
|
|
||||||
|
|
||||||
self._csettings.on_url = cb_on_url
|
|
||||||
self._csettings.on_status = cb_on_status
|
|
||||||
self._csettings.on_header_field = cb_on_header_field
|
|
||||||
self._csettings.on_header_value = cb_on_header_value
|
|
||||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
|
||||||
self._csettings.on_body = cb_on_body
|
|
||||||
self._csettings.on_message_begin = cb_on_message_begin
|
|
||||||
self._csettings.on_message_complete = cb_on_message_complete
|
|
||||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
|
||||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
|
||||||
|
|
||||||
self._last_error = None
|
|
||||||
self._limit = limit
|
|
||||||
|
|
||||||
cdef _process_header(self):
|
|
||||||
if self._raw_name:
|
|
||||||
raw_name = bytes(self._raw_name)
|
|
||||||
raw_value = bytes(self._raw_value)
|
|
||||||
|
|
||||||
name = find_header(raw_name)
|
|
||||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
|
||||||
|
|
||||||
self._headers.add(name, value)
|
|
||||||
|
|
||||||
if name is CONTENT_ENCODING:
|
|
||||||
self._content_encoding = value
|
|
||||||
|
|
||||||
PyByteArray_Resize(self._raw_name, 0)
|
|
||||||
PyByteArray_Resize(self._raw_value, 0)
|
|
||||||
self._has_value = False
|
|
||||||
self._raw_headers.append((raw_name, raw_value))
|
|
||||||
|
|
||||||
cdef _on_header_field(self, char* at, size_t length):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
if self._has_value:
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
size = PyByteArray_Size(self._raw_name)
|
|
||||||
PyByteArray_Resize(self._raw_name, size + length)
|
|
||||||
buf = PyByteArray_AsString(self._raw_name)
|
|
||||||
memcpy(buf + size, at, length)
|
|
||||||
|
|
||||||
cdef _on_header_value(self, char* at, size_t length):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
|
|
||||||
size = PyByteArray_Size(self._raw_value)
|
|
||||||
PyByteArray_Resize(self._raw_value, size + length)
|
|
||||||
buf = PyByteArray_AsString(self._raw_value)
|
|
||||||
memcpy(buf + size, at, length)
|
|
||||||
self._has_value = True
|
|
||||||
|
|
||||||
cdef _on_headers_complete(self):
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
method = http_method_str(self._cparser.method)
|
|
||||||
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
|
||||||
upgrade = self._cparser.upgrade
|
|
||||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
|
||||||
|
|
||||||
raw_headers = tuple(self._raw_headers)
|
|
||||||
headers = CIMultiDictProxy(self._headers)
|
|
||||||
|
|
||||||
if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
|
|
||||||
self._upgraded = True
|
|
||||||
|
|
||||||
# do not support old websocket spec
|
|
||||||
if SEC_WEBSOCKET_KEY1 in headers:
|
|
||||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
|
||||||
|
|
||||||
encoding = None
|
|
||||||
enc = self._content_encoding
|
|
||||||
if enc is not None:
|
|
||||||
self._content_encoding = None
|
|
||||||
enc = enc.lower()
|
|
||||||
if enc in ('gzip', 'deflate', 'br'):
|
|
||||||
encoding = enc
|
|
||||||
|
|
||||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
|
||||||
msg = _new_request_message(
|
|
||||||
method, self._path,
|
|
||||||
self.http_version(), headers, raw_headers,
|
|
||||||
should_close, encoding, upgrade, chunked, self._url)
|
|
||||||
else:
|
|
||||||
msg = _new_response_message(
|
|
||||||
self.http_version(), self._cparser.status_code, self._reason,
|
|
||||||
headers, raw_headers, should_close, encoding,
|
|
||||||
upgrade, chunked)
|
|
||||||
|
|
||||||
if (
|
|
||||||
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
|
||||||
self._cparser.method == cparser.HTTP_CONNECT or
|
|
||||||
(self._cparser.status_code >= 199 and
|
|
||||||
self._cparser.content_length == 0 and
|
|
||||||
self._read_until_eof)
|
|
||||||
):
|
|
||||||
payload = StreamReader(
|
|
||||||
self._protocol, timer=self._timer, loop=self._loop,
|
|
||||||
limit=self._limit)
|
|
||||||
else:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
if encoding is not None and self._auto_decompress:
|
|
||||||
self._payload = DeflateBuffer(payload, encoding)
|
|
||||||
|
|
||||||
if not self._response_with_body:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._messages.append((msg, payload))
|
|
||||||
|
|
||||||
cdef _on_message_complete(self):
|
|
||||||
self._payload.feed_eof()
|
|
||||||
self._payload = None
|
|
||||||
|
|
||||||
cdef _on_chunk_header(self):
|
|
||||||
self._payload.begin_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef _on_chunk_complete(self):
|
|
||||||
self._payload.end_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
cdef inline http_version(self):
|
|
||||||
cdef cparser.llhttp_t* parser = self._cparser
|
|
||||||
|
|
||||||
if parser.http_major == 1:
|
|
||||||
if parser.http_minor == 0:
|
|
||||||
return HttpVersion10
|
|
||||||
elif parser.http_minor == 1:
|
|
||||||
return HttpVersion11
|
|
||||||
|
|
||||||
return HttpVersion(parser.http_major, parser.http_minor)
|
|
||||||
|
|
||||||
### Public API ###
|
|
||||||
|
|
||||||
def feed_eof(self):
|
|
||||||
cdef bytes desc
|
|
||||||
|
|
||||||
if self._payload is not None:
|
|
||||||
if self._cparser.flags & cparser.F_CHUNKED:
|
|
||||||
raise TransferEncodingError(
|
|
||||||
"Not enough data for satisfy transfer length header.")
|
|
||||||
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
|
||||||
raise ContentLengthError(
|
|
||||||
"Not enough data for satisfy content length header.")
|
|
||||||
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
|
||||||
desc = cparser.llhttp_get_error_reason(self._cparser)
|
|
||||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
|
||||||
else:
|
|
||||||
self._payload.feed_eof()
|
|
||||||
elif self._started:
|
|
||||||
self._on_headers_complete()
|
|
||||||
if self._messages:
|
|
||||||
return self._messages[-1][0]
|
|
||||||
|
|
||||||
def feed_data(self, data):
|
|
||||||
cdef:
|
|
||||||
size_t data_len
|
|
||||||
size_t nb
|
|
||||||
cdef cparser.llhttp_errno_t errno
|
|
||||||
|
|
||||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
|
||||||
data_len = <size_t>self.py_buf.len
|
|
||||||
|
|
||||||
errno = cparser.llhttp_execute(
|
|
||||||
self._cparser,
|
|
||||||
<char*>self.py_buf.buf,
|
|
||||||
data_len)
|
|
||||||
|
|
||||||
if errno is cparser.HPE_PAUSED_UPGRADE:
|
|
||||||
cparser.llhttp_resume_after_upgrade(self._cparser)
|
|
||||||
|
|
||||||
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
|
||||||
|
|
||||||
PyBuffer_Release(&self.py_buf)
|
|
||||||
|
|
||||||
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
|
||||||
if self._payload_error == 0:
|
|
||||||
if self._last_error is not None:
|
|
||||||
ex = self._last_error
|
|
||||||
self._last_error = None
|
|
||||||
else:
|
|
||||||
after = cparser.llhttp_get_error_pos(self._cparser)
|
|
||||||
before = data[:after - <char*>self.py_buf.buf]
|
|
||||||
after_b = after.split(b"\r\n", 1)[0]
|
|
||||||
before = before.rsplit(b"\r\n", 1)[-1]
|
|
||||||
data = before + after_b
|
|
||||||
pointer = " " * (len(repr(before))-1) + "^"
|
|
||||||
ex = parser_error_from_errno(self._cparser, data, pointer)
|
|
||||||
self._payload = None
|
|
||||||
raise ex
|
|
||||||
|
|
||||||
if self._messages:
|
|
||||||
messages = self._messages
|
|
||||||
self._messages = []
|
|
||||||
else:
|
|
||||||
messages = ()
|
|
||||||
|
|
||||||
if self._upgraded:
|
|
||||||
return messages, True, data[nb:]
|
|
||||||
else:
|
|
||||||
return messages, False, b''
|
|
||||||
|
|
||||||
def set_upgraded(self, val):
|
|
||||||
self._upgraded = val
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpRequestParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True,
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof,
|
|
||||||
auto_decompress)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
cdef int idx1, idx2
|
|
||||||
if not self._buf:
|
|
||||||
return
|
|
||||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
try:
|
|
||||||
idx3 = len(self._path)
|
|
||||||
if self._cparser.method == cparser.HTTP_CONNECT:
|
|
||||||
# authority-form,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
|
||||||
self._url = URL.build(authority=self._path, encoded=True)
|
|
||||||
elif idx3 > 1 and self._path[0] == '/':
|
|
||||||
# origin-form,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
|
||||||
idx1 = self._path.find("?")
|
|
||||||
if idx1 == -1:
|
|
||||||
query = ""
|
|
||||||
idx2 = self._path.find("#")
|
|
||||||
if idx2 == -1:
|
|
||||||
path = self._path
|
|
||||||
fragment = ""
|
|
||||||
else:
|
|
||||||
path = self._path[0: idx2]
|
|
||||||
fragment = self._path[idx2+1:]
|
|
||||||
|
|
||||||
else:
|
|
||||||
path = self._path[0:idx1]
|
|
||||||
idx1 += 1
|
|
||||||
idx2 = self._path.find("#", idx1+1)
|
|
||||||
if idx2 == -1:
|
|
||||||
query = self._path[idx1:]
|
|
||||||
fragment = ""
|
|
||||||
else:
|
|
||||||
query = self._path[idx1: idx2]
|
|
||||||
fragment = self._path[idx2+1:]
|
|
||||||
|
|
||||||
self._url = URL.build(
|
|
||||||
path=path,
|
|
||||||
query_string=query,
|
|
||||||
fragment=fragment,
|
|
||||||
encoded=True,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# absolute-form for proxy maybe,
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
|
||||||
self._url = URL(self._path, encoded=True)
|
|
||||||
finally:
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpResponseParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof,
|
|
||||||
auto_decompress)
|
|
||||||
# Use strict parsing on dev mode, so users are warned about broken servers.
|
|
||||||
if not DEBUG:
|
|
||||||
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
|
||||||
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
|
||||||
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
if self._buf:
|
|
||||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
else:
|
|
||||||
self._reason = self._reason or ''
|
|
||||||
|
|
||||||
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
|
|
||||||
pyparser._started = True
|
|
||||||
pyparser._headers = CIMultiDict()
|
|
||||||
pyparser._raw_headers = []
|
|
||||||
PyByteArray_Resize(pyparser._buf, 0)
|
|
||||||
pyparser._path = None
|
|
||||||
pyparser._reason = None
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_url(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_status(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef str reason
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
size = len(pyparser._raw_name) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header name is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_field(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
size = len(pyparser._raw_value) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header value is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_value(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
pyparser._on_headers_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
if (
|
|
||||||
pyparser._cparser.upgrade or
|
|
||||||
pyparser._cparser.method == cparser.HTTP_CONNECT
|
|
||||||
):
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_body(cparser.llhttp_t* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef bytes body = at[:length]
|
|
||||||
try:
|
|
||||||
pyparser._payload.feed_data(body, length)
|
|
||||||
except BaseException as exc:
|
|
||||||
if pyparser._payload_exception is not None:
|
|
||||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
|
||||||
else:
|
|
||||||
pyparser._payload.set_exception(exc)
|
|
||||||
pyparser._payload_error = 1
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._started = False
|
|
||||||
pyparser._on_message_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_header()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
|
||||||
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
|
||||||
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
|
||||||
|
|
||||||
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
|
||||||
|
|
||||||
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
|
||||||
cparser.HPE_CB_HEADERS_COMPLETE,
|
|
||||||
cparser.HPE_CB_MESSAGE_COMPLETE,
|
|
||||||
cparser.HPE_CB_CHUNK_HEADER,
|
|
||||||
cparser.HPE_CB_CHUNK_COMPLETE,
|
|
||||||
cparser.HPE_INVALID_CONSTANT,
|
|
||||||
cparser.HPE_INVALID_HEADER_TOKEN,
|
|
||||||
cparser.HPE_INVALID_CONTENT_LENGTH,
|
|
||||||
cparser.HPE_INVALID_CHUNK_SIZE,
|
|
||||||
cparser.HPE_INVALID_EOF_STATE,
|
|
||||||
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
|
||||||
return BadHttpMessage(err_msg)
|
|
||||||
elif errno in {cparser.HPE_INVALID_STATUS,
|
|
||||||
cparser.HPE_INVALID_METHOD,
|
|
||||||
cparser.HPE_INVALID_VERSION}:
|
|
||||||
return BadStatusLine(error=err_msg)
|
|
||||||
elif errno == cparser.HPE_INVALID_URL:
|
|
||||||
return InvalidURLError(err_msg)
|
|
||||||
|
|
||||||
return BadHttpMessage(err_msg)
|
|
||||||
Binary file not shown.
@@ -1,163 +0,0 @@
|
|||||||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
|
||||||
from cpython.exc cimport PyErr_NoMemory
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
|
||||||
from cpython.object cimport PyObject_Str
|
|
||||||
from libc.stdint cimport uint8_t, uint64_t
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import istr
|
|
||||||
|
|
||||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
|
||||||
cdef char BUFFER[BUF_SIZE]
|
|
||||||
|
|
||||||
cdef object _istr = istr
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------- writer ---------------------------
|
|
||||||
|
|
||||||
cdef struct Writer:
|
|
||||||
char *buf
|
|
||||||
Py_ssize_t size
|
|
||||||
Py_ssize_t pos
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _init_writer(Writer* writer):
|
|
||||||
writer.buf = &BUFFER[0]
|
|
||||||
writer.size = BUF_SIZE
|
|
||||||
writer.pos = 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _release_writer(Writer* writer):
|
|
||||||
if writer.buf != BUFFER:
|
|
||||||
PyMem_Free(writer.buf)
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
|
||||||
cdef char * buf
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
|
|
||||||
if writer.pos == writer.size:
|
|
||||||
# reallocate
|
|
||||||
size = writer.size + BUF_SIZE
|
|
||||||
if writer.buf == BUFFER:
|
|
||||||
buf = <char*>PyMem_Malloc(size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
memcpy(buf, writer.buf, writer.size)
|
|
||||||
else:
|
|
||||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
writer.buf = buf
|
|
||||||
writer.size = size
|
|
||||||
writer.buf[writer.pos] = <char>ch
|
|
||||||
writer.pos += 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
|
||||||
cdef uint64_t utf = <uint64_t> symbol
|
|
||||||
|
|
||||||
if utf < 0x80:
|
|
||||||
return _write_byte(writer, <uint8_t>utf)
|
|
||||||
elif utf < 0x800:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif 0xD800 <= utf <= 0xDFFF:
|
|
||||||
# surogate pair, ignored
|
|
||||||
return 0
|
|
||||||
elif utf < 0x10000:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif utf > 0x10FFFF:
|
|
||||||
# symbol is too large
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_str(Writer* writer, str s):
|
|
||||||
cdef Py_UCS4 ch
|
|
||||||
for ch in s:
|
|
||||||
if _write_utf8(writer, ch) < 0:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
# --------------- _serialize_headers ----------------------
|
|
||||||
|
|
||||||
cdef str to_str(object s):
|
|
||||||
typ = type(s)
|
|
||||||
if typ is str:
|
|
||||||
return <str>s
|
|
||||||
elif typ is _istr:
|
|
||||||
return PyObject_Str(s)
|
|
||||||
elif not isinstance(s, str):
|
|
||||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
|
||||||
else:
|
|
||||||
return str(s)
|
|
||||||
|
|
||||||
|
|
||||||
cdef void _safe_header(str string) except *:
|
|
||||||
if "\r" in string or "\n" in string:
|
|
||||||
raise ValueError(
|
|
||||||
"Newline or carriage return character detected in HTTP status message or "
|
|
||||||
"header. This is a potential security issue."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _serialize_headers(str status_line, headers):
|
|
||||||
cdef Writer writer
|
|
||||||
cdef object key
|
|
||||||
cdef object val
|
|
||||||
cdef bytes ret
|
|
||||||
|
|
||||||
_init_writer(&writer)
|
|
||||||
|
|
||||||
for key, val in headers.items():
|
|
||||||
_safe_header(to_str(key))
|
|
||||||
_safe_header(to_str(val))
|
|
||||||
|
|
||||||
try:
|
|
||||||
if _write_str(&writer, status_line) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
for key, val in headers.items():
|
|
||||||
if _write_str(&writer, to_str(key)) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b':') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b' ') < 0:
|
|
||||||
raise
|
|
||||||
if _write_str(&writer, to_str(val)) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
|
||||||
finally:
|
|
||||||
_release_writer(&writer)
|
|
||||||
Binary file not shown.
@@ -1,56 +0,0 @@
|
|||||||
from cpython cimport PyBytes_AsString
|
|
||||||
|
|
||||||
|
|
||||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
|
||||||
cdef extern from "Python.h":
|
|
||||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
|
||||||
|
|
||||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
|
||||||
|
|
||||||
|
|
||||||
def _websocket_mask_cython(object mask, object data):
|
|
||||||
"""Note, this function mutates its `data` argument
|
|
||||||
"""
|
|
||||||
cdef:
|
|
||||||
Py_ssize_t data_len, i
|
|
||||||
# bit operations on signed integers are implementation-specific
|
|
||||||
unsigned char * in_buf
|
|
||||||
const unsigned char * mask_buf
|
|
||||||
uint32_t uint32_msk
|
|
||||||
uint64_t uint64_msk
|
|
||||||
|
|
||||||
assert len(mask) == 4
|
|
||||||
|
|
||||||
if not isinstance(mask, bytes):
|
|
||||||
mask = bytes(mask)
|
|
||||||
|
|
||||||
if isinstance(data, bytearray):
|
|
||||||
data = <bytearray>data
|
|
||||||
else:
|
|
||||||
data = bytearray(data)
|
|
||||||
|
|
||||||
data_len = len(data)
|
|
||||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
|
||||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
|
||||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
|
||||||
|
|
||||||
# TODO: align in_data ptr to achieve even faster speeds
|
|
||||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
|
||||||
|
|
||||||
if sizeof(size_t) >= 8:
|
|
||||||
uint64_msk = uint32_msk
|
|
||||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
|
||||||
|
|
||||||
while data_len >= 8:
|
|
||||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
|
||||||
in_buf += 8
|
|
||||||
data_len -= 8
|
|
||||||
|
|
||||||
|
|
||||||
while data_len >= 4:
|
|
||||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
|
||||||
in_buf += 4
|
|
||||||
data_len -= 4
|
|
||||||
|
|
||||||
for i in range(0, data_len):
|
|
||||||
in_buf[i] ^= mask_buf[i]
|
|
||||||
@@ -1,209 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from collections.abc import Sized
|
|
||||||
from http.cookies import BaseCookie, Morsel
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Generator,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
)
|
|
||||||
|
|
||||||
from multidict import CIMultiDict
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .helpers import get_running_loop
|
|
||||||
from .typedefs import LooseCookies
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_app import Application
|
|
||||||
from .web_exceptions import HTTPException
|
|
||||||
from .web_request import BaseRequest, Request
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
else:
|
|
||||||
BaseRequest = Request = Application = StreamResponse = None
|
|
||||||
HTTPException = None
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractRouter(ABC):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._frozen = False
|
|
||||||
|
|
||||||
def post_init(self, app: Application) -> None:
|
|
||||||
"""Post init stage.
|
|
||||||
|
|
||||||
Not an abstract method for sake of backward compatibility,
|
|
||||||
but if the router wants to be aware of the application
|
|
||||||
it can override this.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def frozen(self) -> bool:
|
|
||||||
return self._frozen
|
|
||||||
|
|
||||||
def freeze(self) -> None:
|
|
||||||
"""Freeze router."""
|
|
||||||
self._frozen = True
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
|
||||||
"""Return MATCH_INFO for given request"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractMatchInfo(ABC):
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
|
||||||
"""Execute matched request handler"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def expect_handler(
|
|
||||||
self,
|
|
||||||
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
|
||||||
"""Expect handler for 100-continue processing"""
|
|
||||||
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def http_exception(self) -> Optional[HTTPException]:
|
|
||||||
"""HTTPException instance raised on router's resolving, or None"""
|
|
||||||
|
|
||||||
@abstractmethod # pragma: no branch
|
|
||||||
def get_info(self) -> Dict[str, Any]:
|
|
||||||
"""Return a dict with additional info useful for introspection"""
|
|
||||||
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def apps(self) -> Tuple[Application, ...]:
|
|
||||||
"""Stack of nested applications.
|
|
||||||
|
|
||||||
Top level application is left-most element.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def add_app(self, app: Application) -> None:
|
|
||||||
"""Add application to the nested apps stack."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def freeze(self) -> None:
|
|
||||||
"""Freeze the match info.
|
|
||||||
|
|
||||||
The method is called after route resolution.
|
|
||||||
|
|
||||||
After the call .add_app() is forbidden.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractView(ABC):
|
|
||||||
"""Abstract class based view."""
|
|
||||||
|
|
||||||
def __init__(self, request: Request) -> None:
|
|
||||||
self._request = request
|
|
||||||
|
|
||||||
@property
|
|
||||||
def request(self) -> Request:
|
|
||||||
"""Request instance."""
|
|
||||||
return self._request
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
|
||||||
"""Execute the view handler."""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractResolver(ABC):
|
|
||||||
"""Abstract DNS resolver."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
|
||||||
"""Return IP address for given hostname"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def close(self) -> None:
|
|
||||||
"""Release resolver"""
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
IterableBase = Iterable[Morsel[str]]
|
|
||||||
else:
|
|
||||||
IterableBase = Iterable
|
|
||||||
|
|
||||||
|
|
||||||
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractCookieJar(Sized, IterableBase):
|
|
||||||
"""Abstract Cookie Jar."""
|
|
||||||
|
|
||||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
|
||||||
self._loop = get_running_loop(loop)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
|
||||||
"""Clear all cookies if no predicate is passed."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def clear_domain(self, domain: str) -> None:
|
|
||||||
"""Clear all cookies for domain and all subdomains."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
"""Update cookies."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
|
||||||
"""Return the jar's cookies filtered by their attributes."""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractStreamWriter(ABC):
|
|
||||||
"""Abstract stream writer."""
|
|
||||||
|
|
||||||
buffer_size = 0
|
|
||||||
output_size = 0
|
|
||||||
length: Optional[int] = 0
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write(self, chunk: bytes) -> None:
|
|
||||||
"""Write chunk into stream."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
|
||||||
"""Write last chunk."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def drain(self) -> None:
|
|
||||||
"""Flush the write buffer."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
|
||||||
"""Enable HTTP body compression"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def enable_chunking(self) -> None:
|
|
||||||
"""Enable HTTP chunked mode"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write_headers(
|
|
||||||
self, status_line: str, headers: "CIMultiDict[str]"
|
|
||||||
) -> None:
|
|
||||||
"""Write HTTP headers"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractAccessLogger(ABC):
|
|
||||||
"""Abstract writer to access log."""
|
|
||||||
|
|
||||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
|
||||||
self.logger = logger
|
|
||||||
self.log_format = log_format
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
|
||||||
"""Emit log to logger."""
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from typing import Optional, cast
|
|
||||||
|
|
||||||
from .tcp_helpers import tcp_nodelay
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProtocol(asyncio.Protocol):
|
|
||||||
__slots__ = (
|
|
||||||
"_loop",
|
|
||||||
"_paused",
|
|
||||||
"_drain_waiter",
|
|
||||||
"_connection_lost",
|
|
||||||
"_reading_paused",
|
|
||||||
"transport",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
self._loop: asyncio.AbstractEventLoop = loop
|
|
||||||
self._paused = False
|
|
||||||
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
self.transport: Optional[asyncio.Transport] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def connected(self) -> bool:
|
|
||||||
"""Return True if the connection is open."""
|
|
||||||
return self.transport is not None
|
|
||||||
|
|
||||||
def pause_writing(self) -> None:
|
|
||||||
assert not self._paused
|
|
||||||
self._paused = True
|
|
||||||
|
|
||||||
def resume_writing(self) -> None:
|
|
||||||
assert self._paused
|
|
||||||
self._paused = False
|
|
||||||
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._drain_waiter = None
|
|
||||||
if not waiter.done():
|
|
||||||
waiter.set_result(None)
|
|
||||||
|
|
||||||
def pause_reading(self) -> None:
|
|
||||||
if not self._reading_paused and self.transport is not None:
|
|
||||||
try:
|
|
||||||
self.transport.pause_reading()
|
|
||||||
except (AttributeError, NotImplementedError, RuntimeError):
|
|
||||||
pass
|
|
||||||
self._reading_paused = True
|
|
||||||
|
|
||||||
def resume_reading(self) -> None:
|
|
||||||
if self._reading_paused and self.transport is not None:
|
|
||||||
try:
|
|
||||||
self.transport.resume_reading()
|
|
||||||
except (AttributeError, NotImplementedError, RuntimeError):
|
|
||||||
pass
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
|
||||||
tr = cast(asyncio.Transport, transport)
|
|
||||||
tcp_nodelay(tr, True)
|
|
||||||
self.transport = tr
|
|
||||||
|
|
||||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
|
||||||
# Wake up the writer if currently paused.
|
|
||||||
self.transport = None
|
|
||||||
if not self._paused:
|
|
||||||
return
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
if waiter is None:
|
|
||||||
return
|
|
||||||
self._drain_waiter = None
|
|
||||||
if waiter.done():
|
|
||||||
return
|
|
||||||
if exc is None:
|
|
||||||
waiter.set_result(None)
|
|
||||||
else:
|
|
||||||
waiter.set_exception(exc)
|
|
||||||
|
|
||||||
async def _drain_helper(self) -> None:
|
|
||||||
if not self.connected:
|
|
||||||
raise ConnectionResetError("Connection lost")
|
|
||||||
if not self._paused:
|
|
||||||
return
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
if waiter is None:
|
|
||||||
waiter = self._loop.create_future()
|
|
||||||
self._drain_waiter = waiter
|
|
||||||
await asyncio.shield(waiter)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,346 +0,0 @@
|
|||||||
"""HTTP related errors."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import warnings
|
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
|
||||||
|
|
||||||
from .http_parser import RawResponseMessage
|
|
||||||
from .typedefs import LooseHeaders
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
SSLContext = ssl.SSLContext
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
ssl = SSLContext = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
|
||||||
else:
|
|
||||||
RequestInfo = ClientResponse = ConnectionKey = None
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"ClientError",
|
|
||||||
"ClientConnectionError",
|
|
||||||
"ClientOSError",
|
|
||||||
"ClientConnectorError",
|
|
||||||
"ClientProxyConnectionError",
|
|
||||||
"ClientSSLError",
|
|
||||||
"ClientConnectorSSLError",
|
|
||||||
"ClientConnectorCertificateError",
|
|
||||||
"ServerConnectionError",
|
|
||||||
"ServerTimeoutError",
|
|
||||||
"ServerDisconnectedError",
|
|
||||||
"ServerFingerprintMismatch",
|
|
||||||
"ClientResponseError",
|
|
||||||
"ClientHttpProxyError",
|
|
||||||
"WSServerHandshakeError",
|
|
||||||
"ContentTypeError",
|
|
||||||
"ClientPayloadError",
|
|
||||||
"InvalidURL",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientError(Exception):
|
|
||||||
"""Base class for client connection errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientResponseError(ClientError):
|
|
||||||
"""Base class for exceptions that occur after getting a response.
|
|
||||||
|
|
||||||
request_info: An instance of RequestInfo.
|
|
||||||
history: A sequence of responses, if redirects occurred.
|
|
||||||
status: HTTP status code.
|
|
||||||
message: Error message.
|
|
||||||
headers: Response headers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
request_info: RequestInfo,
|
|
||||||
history: Tuple[ClientResponse, ...],
|
|
||||||
*,
|
|
||||||
code: Optional[int] = None,
|
|
||||||
status: Optional[int] = None,
|
|
||||||
message: str = "",
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
) -> None:
|
|
||||||
self.request_info = request_info
|
|
||||||
if code is not None:
|
|
||||||
if status is not None:
|
|
||||||
raise ValueError(
|
|
||||||
"Both code and status arguments are provided; "
|
|
||||||
"code is deprecated, use status instead"
|
|
||||||
)
|
|
||||||
warnings.warn(
|
|
||||||
"code argument is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
if status is not None:
|
|
||||||
self.status = status
|
|
||||||
elif code is not None:
|
|
||||||
self.status = code
|
|
||||||
else:
|
|
||||||
self.status = 0
|
|
||||||
self.message = message
|
|
||||||
self.headers = headers
|
|
||||||
self.history = history
|
|
||||||
self.args = (request_info, history)
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return "{}, message={!r}, url={!r}".format(
|
|
||||||
self.status,
|
|
||||||
self.message,
|
|
||||||
self.request_info.real_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
args = f"{self.request_info!r}, {self.history!r}"
|
|
||||||
if self.status != 0:
|
|
||||||
args += f", status={self.status!r}"
|
|
||||||
if self.message != "":
|
|
||||||
args += f", message={self.message!r}"
|
|
||||||
if self.headers is not None:
|
|
||||||
args += f", headers={self.headers!r}"
|
|
||||||
return f"{type(self).__name__}({args})"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def code(self) -> int:
|
|
||||||
warnings.warn(
|
|
||||||
"code property is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return self.status
|
|
||||||
|
|
||||||
@code.setter
|
|
||||||
def code(self, value: int) -> None:
|
|
||||||
warnings.warn(
|
|
||||||
"code property is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
self.status = value
|
|
||||||
|
|
||||||
|
|
||||||
class ContentTypeError(ClientResponseError):
|
|
||||||
"""ContentType found is not valid."""
|
|
||||||
|
|
||||||
|
|
||||||
class WSServerHandshakeError(ClientResponseError):
|
|
||||||
"""websocket server handshake error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientHttpProxyError(ClientResponseError):
|
|
||||||
"""HTTP proxy error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
proxy responds with status other than ``200 OK``
|
|
||||||
on ``CONNECT`` request.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class TooManyRedirects(ClientResponseError):
|
|
||||||
"""Client was redirected too many times."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectionError(ClientError):
|
|
||||||
"""Base class for client socket errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientOSError(ClientConnectionError, OSError):
|
|
||||||
"""OSError error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorError(ClientOSError):
|
|
||||||
"""Client connector error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
a connection can not be established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
|
||||||
self._conn_key = connection_key
|
|
||||||
self._os_error = os_error
|
|
||||||
super().__init__(os_error.errno, os_error.strerror)
|
|
||||||
self.args = (connection_key, os_error)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def os_error(self) -> OSError:
|
|
||||||
return self._os_error
|
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self) -> str:
|
|
||||||
return self._conn_key.host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> Optional[int]:
|
|
||||||
return self._conn_key.port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
|
||||||
return self._conn_key.ssl
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
|
||||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
|
||||||
)
|
|
||||||
|
|
||||||
# OSError.__reduce__ does too much black magick
|
|
||||||
__reduce__ = BaseException.__reduce__
|
|
||||||
|
|
||||||
|
|
||||||
class ClientProxyConnectionError(ClientConnectorError):
|
|
||||||
"""Proxy connection error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
connection to proxy can not be established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class UnixClientConnectorError(ClientConnectorError):
|
|
||||||
"""Unix connector error.
|
|
||||||
|
|
||||||
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
|
||||||
if connection to unix socket can not be established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
|
||||||
) -> None:
|
|
||||||
self._path = path
|
|
||||||
super().__init__(connection_key, os_error)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self) -> str:
|
|
||||||
return self._path
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
|
||||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ServerConnectionError(ClientConnectionError):
|
|
||||||
"""Server connection errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ServerDisconnectedError(ServerConnectionError):
|
|
||||||
"""Server disconnected."""
|
|
||||||
|
|
||||||
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
|
||||||
if message is None:
|
|
||||||
message = "Server disconnected"
|
|
||||||
|
|
||||||
self.args = (message,)
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
|
|
||||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
|
||||||
"""Server timeout error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ServerFingerprintMismatch(ServerConnectionError):
|
|
||||||
"""SSL certificate does not match expected fingerprint."""
|
|
||||||
|
|
||||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
|
||||||
self.expected = expected
|
|
||||||
self.got = got
|
|
||||||
self.host = host
|
|
||||||
self.port = port
|
|
||||||
self.args = (expected, got, host, port)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
|
||||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientPayloadError(ClientError):
|
|
||||||
"""Response payload error."""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidURL(ClientError, ValueError):
|
|
||||||
"""Invalid URL.
|
|
||||||
|
|
||||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
|
||||||
part.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Derive from ValueError for backward compatibility
|
|
||||||
|
|
||||||
def __init__(self, url: Any) -> None:
|
|
||||||
# The type of url is not yarl.URL because the exception can be raised
|
|
||||||
# on URL(url) call
|
|
||||||
super().__init__(url)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self) -> Any:
|
|
||||||
return self.args[0]
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<{self.__class__.__name__} {self.url}>"
|
|
||||||
|
|
||||||
|
|
||||||
class ClientSSLError(ClientConnectorError):
|
|
||||||
"""Base error for ssl.*Errors."""
|
|
||||||
|
|
||||||
|
|
||||||
if ssl is not None:
|
|
||||||
cert_errors = (ssl.CertificateError,)
|
|
||||||
cert_errors_bases = (
|
|
||||||
ClientSSLError,
|
|
||||||
ssl.CertificateError,
|
|
||||||
)
|
|
||||||
|
|
||||||
ssl_errors = (ssl.SSLError,)
|
|
||||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
|
||||||
else: # pragma: no cover
|
|
||||||
cert_errors = tuple()
|
|
||||||
cert_errors_bases = (
|
|
||||||
ClientSSLError,
|
|
||||||
ValueError,
|
|
||||||
)
|
|
||||||
|
|
||||||
ssl_errors = tuple()
|
|
||||||
ssl_error_bases = (ClientSSLError,)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
|
||||||
"""Response ssl error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
|
||||||
"""Response certificate error."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
|
||||||
) -> None:
|
|
||||||
self._conn_key = connection_key
|
|
||||||
self._certificate_error = certificate_error
|
|
||||||
self.args = (connection_key, certificate_error)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def certificate_error(self) -> Exception:
|
|
||||||
return self._certificate_error
|
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self) -> str:
|
|
||||||
return self._conn_key.host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> Optional[int]:
|
|
||||||
return self._conn_key.port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl(self) -> bool:
|
|
||||||
return self._conn_key.is_ssl
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return (
|
|
||||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
|
||||||
"[{0.certificate_error.__class__.__name__}: "
|
|
||||||
"{0.certificate_error.args}]".format(self)
|
|
||||||
)
|
|
||||||
@@ -1,264 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
|
||||||
from typing import Any, Optional, Tuple
|
|
||||||
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .client_exceptions import (
|
|
||||||
ClientOSError,
|
|
||||||
ClientPayloadError,
|
|
||||||
ServerDisconnectedError,
|
|
||||||
ServerTimeoutError,
|
|
||||||
)
|
|
||||||
from .helpers import BaseTimerContext, status_code_must_be_empty_body
|
|
||||||
from .http import HttpResponseParser, RawResponseMessage
|
|
||||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
|
||||||
|
|
||||||
|
|
||||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
|
||||||
"""Helper class to adapt between Protocol and StreamReader."""
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
BaseProtocol.__init__(self, loop=loop)
|
|
||||||
DataQueue.__init__(self, loop)
|
|
||||||
|
|
||||||
self._should_close = False
|
|
||||||
|
|
||||||
self._payload: Optional[StreamReader] = None
|
|
||||||
self._skip_payload = False
|
|
||||||
self._payload_parser = None
|
|
||||||
|
|
||||||
self._timer = None
|
|
||||||
|
|
||||||
self._tail = b""
|
|
||||||
self._upgraded = False
|
|
||||||
self._parser: Optional[HttpResponseParser] = None
|
|
||||||
|
|
||||||
self._read_timeout: Optional[float] = None
|
|
||||||
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
|
||||||
|
|
||||||
self._timeout_ceil_threshold: Optional[float] = 5
|
|
||||||
|
|
||||||
@property
|
|
||||||
def upgraded(self) -> bool:
|
|
||||||
return self._upgraded
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_close(self) -> bool:
|
|
||||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return (
|
|
||||||
self._should_close
|
|
||||||
or self._upgraded
|
|
||||||
or self.exception() is not None
|
|
||||||
or self._payload_parser is not None
|
|
||||||
or len(self) > 0
|
|
||||||
or bool(self._tail)
|
|
||||||
)
|
|
||||||
|
|
||||||
def force_close(self) -> None:
|
|
||||||
self._should_close = True
|
|
||||||
|
|
||||||
def close(self) -> None:
|
|
||||||
transport = self.transport
|
|
||||||
if transport is not None:
|
|
||||||
transport.close()
|
|
||||||
self.transport = None
|
|
||||||
self._payload = None
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def is_connected(self) -> bool:
|
|
||||||
return self.transport is not None and not self.transport.is_closing()
|
|
||||||
|
|
||||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if self._payload_parser is not None:
|
|
||||||
with suppress(Exception):
|
|
||||||
self._payload_parser.feed_eof()
|
|
||||||
|
|
||||||
uncompleted = None
|
|
||||||
if self._parser is not None:
|
|
||||||
try:
|
|
||||||
uncompleted = self._parser.feed_eof()
|
|
||||||
except Exception:
|
|
||||||
if self._payload is not None:
|
|
||||||
self._payload.set_exception(
|
|
||||||
ClientPayloadError("Response payload is not completed")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.is_eof():
|
|
||||||
if isinstance(exc, OSError):
|
|
||||||
exc = ClientOSError(*exc.args)
|
|
||||||
if exc is None:
|
|
||||||
exc = ServerDisconnectedError(uncompleted)
|
|
||||||
# assigns self._should_close to True as side effect,
|
|
||||||
# we do it anyway below
|
|
||||||
self.set_exception(exc)
|
|
||||||
|
|
||||||
self._should_close = True
|
|
||||||
self._parser = None
|
|
||||||
self._payload = None
|
|
||||||
self._payload_parser = None
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
super().connection_lost(exc)
|
|
||||||
|
|
||||||
def eof_received(self) -> None:
|
|
||||||
# should call parser.feed_eof() most likely
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def pause_reading(self) -> None:
|
|
||||||
super().pause_reading()
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def resume_reading(self) -> None:
|
|
||||||
super().resume_reading()
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
def set_exception(self, exc: BaseException) -> None:
|
|
||||||
self._should_close = True
|
|
||||||
self._drop_timeout()
|
|
||||||
super().set_exception(exc)
|
|
||||||
|
|
||||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
|
||||||
# TODO: actual types are:
|
|
||||||
# parser: WebSocketReader
|
|
||||||
# payload: FlowControlDataQueue
|
|
||||||
# but they are not generi enough
|
|
||||||
# Need an ABC for both types
|
|
||||||
self._payload = payload
|
|
||||||
self._payload_parser = parser
|
|
||||||
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if self._tail:
|
|
||||||
data, self._tail = self._tail, b""
|
|
||||||
self.data_received(data)
|
|
||||||
|
|
||||||
def set_response_params(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
timer: Optional[BaseTimerContext] = None,
|
|
||||||
skip_payload: bool = False,
|
|
||||||
read_until_eof: bool = False,
|
|
||||||
auto_decompress: bool = True,
|
|
||||||
read_timeout: Optional[float] = None,
|
|
||||||
read_bufsize: int = 2**16,
|
|
||||||
timeout_ceil_threshold: float = 5,
|
|
||||||
max_line_size: int = 8190,
|
|
||||||
max_field_size: int = 8190,
|
|
||||||
) -> None:
|
|
||||||
self._skip_payload = skip_payload
|
|
||||||
|
|
||||||
self._read_timeout = read_timeout
|
|
||||||
|
|
||||||
self._timeout_ceil_threshold = timeout_ceil_threshold
|
|
||||||
|
|
||||||
self._parser = HttpResponseParser(
|
|
||||||
self,
|
|
||||||
self._loop,
|
|
||||||
read_bufsize,
|
|
||||||
timer=timer,
|
|
||||||
payload_exception=ClientPayloadError,
|
|
||||||
response_with_body=not skip_payload,
|
|
||||||
read_until_eof=read_until_eof,
|
|
||||||
auto_decompress=auto_decompress,
|
|
||||||
max_line_size=max_line_size,
|
|
||||||
max_field_size=max_field_size,
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._tail:
|
|
||||||
data, self._tail = self._tail, b""
|
|
||||||
self.data_received(data)
|
|
||||||
|
|
||||||
def _drop_timeout(self) -> None:
|
|
||||||
if self._read_timeout_handle is not None:
|
|
||||||
self._read_timeout_handle.cancel()
|
|
||||||
self._read_timeout_handle = None
|
|
||||||
|
|
||||||
def _reschedule_timeout(self) -> None:
|
|
||||||
timeout = self._read_timeout
|
|
||||||
if self._read_timeout_handle is not None:
|
|
||||||
self._read_timeout_handle.cancel()
|
|
||||||
|
|
||||||
if timeout:
|
|
||||||
self._read_timeout_handle = self._loop.call_later(
|
|
||||||
timeout, self._on_read_timeout
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._read_timeout_handle = None
|
|
||||||
|
|
||||||
def start_timeout(self) -> None:
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
def _on_read_timeout(self) -> None:
|
|
||||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
|
||||||
self.set_exception(exc)
|
|
||||||
if self._payload is not None:
|
|
||||||
self._payload.set_exception(exc)
|
|
||||||
|
|
||||||
def data_received(self, data: bytes) -> None:
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
return
|
|
||||||
|
|
||||||
# custom payload parser
|
|
||||||
if self._payload_parser is not None:
|
|
||||||
eof, tail = self._payload_parser.feed_data(data)
|
|
||||||
if eof:
|
|
||||||
self._payload = None
|
|
||||||
self._payload_parser = None
|
|
||||||
|
|
||||||
if tail:
|
|
||||||
self.data_received(tail)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
if self._upgraded or self._parser is None:
|
|
||||||
# i.e. websocket connection, websocket parser is not set yet
|
|
||||||
self._tail += data
|
|
||||||
else:
|
|
||||||
# parse http messages
|
|
||||||
try:
|
|
||||||
messages, upgraded, tail = self._parser.feed_data(data)
|
|
||||||
except BaseException as exc:
|
|
||||||
if self.transport is not None:
|
|
||||||
# connection.release() could be called BEFORE
|
|
||||||
# data_received(), the transport is already
|
|
||||||
# closed in this case
|
|
||||||
self.transport.close()
|
|
||||||
# should_close is True after the call
|
|
||||||
self.set_exception(exc)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._upgraded = upgraded
|
|
||||||
|
|
||||||
payload: Optional[StreamReader] = None
|
|
||||||
for message, payload in messages:
|
|
||||||
if message.should_close:
|
|
||||||
self._should_close = True
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
|
|
||||||
if self._skip_payload or status_code_must_be_empty_body(
|
|
||||||
message.code
|
|
||||||
):
|
|
||||||
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
|
||||||
else:
|
|
||||||
self.feed_data((message, payload), 0)
|
|
||||||
if payload is not None:
|
|
||||||
# new message(s) was processed
|
|
||||||
# register timeout handler unsubscribing
|
|
||||||
# either on end-of-stream or immediately for
|
|
||||||
# EMPTY_PAYLOAD
|
|
||||||
if payload is not EMPTY_PAYLOAD:
|
|
||||||
payload.on_eof(self._drop_timeout)
|
|
||||||
else:
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if tail:
|
|
||||||
if upgraded:
|
|
||||||
self.data_received(tail)
|
|
||||||
else:
|
|
||||||
self._tail = tail
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,315 +0,0 @@
|
|||||||
"""WebSocket client for asyncio."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import sys
|
|
||||||
from typing import Any, Optional, cast
|
|
||||||
|
|
||||||
from .client_exceptions import ClientError
|
|
||||||
from .client_reqrep import ClientResponse
|
|
||||||
from .helpers import call_later, set_result
|
|
||||||
from .http import (
|
|
||||||
WS_CLOSED_MESSAGE,
|
|
||||||
WS_CLOSING_MESSAGE,
|
|
||||||
WebSocketError,
|
|
||||||
WSCloseCode,
|
|
||||||
WSMessage,
|
|
||||||
WSMsgType,
|
|
||||||
)
|
|
||||||
from .http_websocket import WebSocketWriter # WSMessage
|
|
||||||
from .streams import EofStream, FlowControlDataQueue
|
|
||||||
from .typedefs import (
|
|
||||||
DEFAULT_JSON_DECODER,
|
|
||||||
DEFAULT_JSON_ENCODER,
|
|
||||||
JSONDecoder,
|
|
||||||
JSONEncoder,
|
|
||||||
)
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
import asyncio as async_timeout
|
|
||||||
else:
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
|
|
||||||
class ClientWebSocketResponse:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reader: "FlowControlDataQueue[WSMessage]",
|
|
||||||
writer: WebSocketWriter,
|
|
||||||
protocol: Optional[str],
|
|
||||||
response: ClientResponse,
|
|
||||||
timeout: float,
|
|
||||||
autoclose: bool,
|
|
||||||
autoping: bool,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
*,
|
|
||||||
receive_timeout: Optional[float] = None,
|
|
||||||
heartbeat: Optional[float] = None,
|
|
||||||
compress: int = 0,
|
|
||||||
client_notakeover: bool = False,
|
|
||||||
) -> None:
|
|
||||||
self._response = response
|
|
||||||
self._conn = response.connection
|
|
||||||
|
|
||||||
self._writer = writer
|
|
||||||
self._reader = reader
|
|
||||||
self._protocol = protocol
|
|
||||||
self._closed = False
|
|
||||||
self._closing = False
|
|
||||||
self._close_code: Optional[int] = None
|
|
||||||
self._timeout = timeout
|
|
||||||
self._receive_timeout = receive_timeout
|
|
||||||
self._autoclose = autoclose
|
|
||||||
self._autoping = autoping
|
|
||||||
self._heartbeat = heartbeat
|
|
||||||
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
|
||||||
if heartbeat is not None:
|
|
||||||
self._pong_heartbeat = heartbeat / 2.0
|
|
||||||
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
|
||||||
self._loop = loop
|
|
||||||
self._waiting: Optional[asyncio.Future[bool]] = None
|
|
||||||
self._exception: Optional[BaseException] = None
|
|
||||||
self._compress = compress
|
|
||||||
self._client_notakeover = client_notakeover
|
|
||||||
|
|
||||||
self._reset_heartbeat()
|
|
||||||
|
|
||||||
def _cancel_heartbeat(self) -> None:
|
|
||||||
if self._pong_response_cb is not None:
|
|
||||||
self._pong_response_cb.cancel()
|
|
||||||
self._pong_response_cb = None
|
|
||||||
|
|
||||||
if self._heartbeat_cb is not None:
|
|
||||||
self._heartbeat_cb.cancel()
|
|
||||||
self._heartbeat_cb = None
|
|
||||||
|
|
||||||
def _reset_heartbeat(self) -> None:
|
|
||||||
self._cancel_heartbeat()
|
|
||||||
|
|
||||||
if self._heartbeat is not None:
|
|
||||||
self._heartbeat_cb = call_later(
|
|
||||||
self._send_heartbeat,
|
|
||||||
self._heartbeat,
|
|
||||||
self._loop,
|
|
||||||
timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
|
|
||||||
if self._conn is not None
|
|
||||||
else 5,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _send_heartbeat(self) -> None:
|
|
||||||
if self._heartbeat is not None and not self._closed:
|
|
||||||
# fire-and-forget a task is not perfect but maybe ok for
|
|
||||||
# sending ping. Otherwise we need a long-living heartbeat
|
|
||||||
# task in the class.
|
|
||||||
self._loop.create_task(self._writer.ping())
|
|
||||||
|
|
||||||
if self._pong_response_cb is not None:
|
|
||||||
self._pong_response_cb.cancel()
|
|
||||||
self._pong_response_cb = call_later(
|
|
||||||
self._pong_not_received,
|
|
||||||
self._pong_heartbeat,
|
|
||||||
self._loop,
|
|
||||||
timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
|
|
||||||
if self._conn is not None
|
|
||||||
else 5,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _pong_not_received(self) -> None:
|
|
||||||
if not self._closed:
|
|
||||||
self._closed = True
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
self._exception = asyncio.TimeoutError()
|
|
||||||
self._response.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def closed(self) -> bool:
|
|
||||||
return self._closed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def close_code(self) -> Optional[int]:
|
|
||||||
return self._close_code
|
|
||||||
|
|
||||||
@property
|
|
||||||
def protocol(self) -> Optional[str]:
|
|
||||||
return self._protocol
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compress(self) -> int:
|
|
||||||
return self._compress
|
|
||||||
|
|
||||||
@property
|
|
||||||
def client_notakeover(self) -> bool:
|
|
||||||
return self._client_notakeover
|
|
||||||
|
|
||||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
|
||||||
"""extra info from connection transport"""
|
|
||||||
conn = self._response.connection
|
|
||||||
if conn is None:
|
|
||||||
return default
|
|
||||||
transport = conn.transport
|
|
||||||
if transport is None:
|
|
||||||
return default
|
|
||||||
return transport.get_extra_info(name, default)
|
|
||||||
|
|
||||||
def exception(self) -> Optional[BaseException]:
|
|
||||||
return self._exception
|
|
||||||
|
|
||||||
async def ping(self, message: bytes = b"") -> None:
|
|
||||||
await self._writer.ping(message)
|
|
||||||
|
|
||||||
async def pong(self, message: bytes = b"") -> None:
|
|
||||||
await self._writer.pong(message)
|
|
||||||
|
|
||||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
|
||||||
if not isinstance(data, str):
|
|
||||||
raise TypeError("data argument must be str (%r)" % type(data))
|
|
||||||
await self._writer.send(data, binary=False, compress=compress)
|
|
||||||
|
|
||||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
|
||||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
||||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
|
||||||
await self._writer.send(data, binary=True, compress=compress)
|
|
||||||
|
|
||||||
async def send_json(
|
|
||||||
self,
|
|
||||||
data: Any,
|
|
||||||
compress: Optional[int] = None,
|
|
||||||
*,
|
|
||||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
|
||||||
) -> None:
|
|
||||||
await self.send_str(dumps(data), compress=compress)
|
|
||||||
|
|
||||||
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
|
||||||
# we need to break `receive()` cycle first,
|
|
||||||
# `close()` may be called from different task
|
|
||||||
if self._waiting is not None and not self._closing:
|
|
||||||
self._closing = True
|
|
||||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
|
||||||
await self._waiting
|
|
||||||
|
|
||||||
if not self._closed:
|
|
||||||
self._cancel_heartbeat()
|
|
||||||
self._closed = True
|
|
||||||
try:
|
|
||||||
await self._writer.close(code, message)
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
self._response.close()
|
|
||||||
raise
|
|
||||||
except Exception as exc:
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
self._exception = exc
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._close_code:
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
async with async_timeout.timeout(self._timeout):
|
|
||||||
msg = await self._reader.read()
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
self._response.close()
|
|
||||||
raise
|
|
||||||
except Exception as exc:
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
self._exception = exc
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
if msg.type == WSMsgType.CLOSE:
|
|
||||||
self._close_code = msg.data
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
|
||||||
while True:
|
|
||||||
if self._waiting is not None:
|
|
||||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
|
||||||
|
|
||||||
if self._closed:
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
elif self._closing:
|
|
||||||
await self.close()
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._waiting = self._loop.create_future()
|
|
||||||
try:
|
|
||||||
async with async_timeout.timeout(timeout or self._receive_timeout):
|
|
||||||
msg = await self._reader.read()
|
|
||||||
self._reset_heartbeat()
|
|
||||||
finally:
|
|
||||||
waiter = self._waiting
|
|
||||||
self._waiting = None
|
|
||||||
set_result(waiter, True)
|
|
||||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
raise
|
|
||||||
except EofStream:
|
|
||||||
self._close_code = WSCloseCode.OK
|
|
||||||
await self.close()
|
|
||||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
|
||||||
except ClientError:
|
|
||||||
self._closed = True
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
except WebSocketError as exc:
|
|
||||||
self._close_code = exc.code
|
|
||||||
await self.close(code=exc.code)
|
|
||||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
|
||||||
except Exception as exc:
|
|
||||||
self._exception = exc
|
|
||||||
self._closing = True
|
|
||||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
|
||||||
await self.close()
|
|
||||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
|
||||||
|
|
||||||
if msg.type == WSMsgType.CLOSE:
|
|
||||||
self._closing = True
|
|
||||||
self._close_code = msg.data
|
|
||||||
if not self._closed and self._autoclose:
|
|
||||||
await self.close()
|
|
||||||
elif msg.type == WSMsgType.CLOSING:
|
|
||||||
self._closing = True
|
|
||||||
elif msg.type == WSMsgType.PING and self._autoping:
|
|
||||||
await self.pong(msg.data)
|
|
||||||
continue
|
|
||||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return msg
|
|
||||||
|
|
||||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
|
||||||
msg = await self.receive(timeout)
|
|
||||||
if msg.type != WSMsgType.TEXT:
|
|
||||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
|
||||||
return cast(str, msg.data)
|
|
||||||
|
|
||||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
|
||||||
msg = await self.receive(timeout)
|
|
||||||
if msg.type != WSMsgType.BINARY:
|
|
||||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
|
||||||
return cast(bytes, msg.data)
|
|
||||||
|
|
||||||
async def receive_json(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
|
||||||
timeout: Optional[float] = None,
|
|
||||||
) -> Any:
|
|
||||||
data = await self.receive_str(timeout=timeout)
|
|
||||||
return loads(data)
|
|
||||||
|
|
||||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self) -> WSMessage:
|
|
||||||
msg = await self.receive()
|
|
||||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return msg
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import zlib
|
|
||||||
from concurrent.futures import Executor
|
|
||||||
from typing import Optional, cast
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
import brotlicffi as brotli
|
|
||||||
except ImportError:
|
|
||||||
import brotli
|
|
||||||
|
|
||||||
HAS_BROTLI = True
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
HAS_BROTLI = False
|
|
||||||
|
|
||||||
MAX_SYNC_CHUNK_SIZE = 1024
|
|
||||||
|
|
||||||
|
|
||||||
def encoding_to_mode(
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
suppress_deflate_header: bool = False,
|
|
||||||
) -> int:
|
|
||||||
if encoding == "gzip":
|
|
||||||
return 16 + zlib.MAX_WBITS
|
|
||||||
|
|
||||||
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
|
||||||
|
|
||||||
|
|
||||||
class ZlibBaseHandler:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
mode: int,
|
|
||||||
executor: Optional[Executor] = None,
|
|
||||||
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
|
||||||
):
|
|
||||||
self._mode = mode
|
|
||||||
self._executor = executor
|
|
||||||
self._max_sync_chunk_size = max_sync_chunk_size
|
|
||||||
|
|
||||||
|
|
||||||
class ZLibCompressor(ZlibBaseHandler):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
suppress_deflate_header: bool = False,
|
|
||||||
level: Optional[int] = None,
|
|
||||||
wbits: Optional[int] = None,
|
|
||||||
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
|
||||||
executor: Optional[Executor] = None,
|
|
||||||
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
|
||||||
):
|
|
||||||
super().__init__(
|
|
||||||
mode=encoding_to_mode(encoding, suppress_deflate_header)
|
|
||||||
if wbits is None
|
|
||||||
else wbits,
|
|
||||||
executor=executor,
|
|
||||||
max_sync_chunk_size=max_sync_chunk_size,
|
|
||||||
)
|
|
||||||
if level is None:
|
|
||||||
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
|
||||||
else:
|
|
||||||
self._compressor = zlib.compressobj(
|
|
||||||
wbits=self._mode, strategy=strategy, level=level
|
|
||||||
)
|
|
||||||
|
|
||||||
def compress_sync(self, data: bytes) -> bytes:
|
|
||||||
return self._compressor.compress(data)
|
|
||||||
|
|
||||||
async def compress(self, data: bytes) -> bytes:
|
|
||||||
if (
|
|
||||||
self._max_sync_chunk_size is not None
|
|
||||||
and len(data) > self._max_sync_chunk_size
|
|
||||||
):
|
|
||||||
return await asyncio.get_event_loop().run_in_executor(
|
|
||||||
self._executor, self.compress_sync, data
|
|
||||||
)
|
|
||||||
return self.compress_sync(data)
|
|
||||||
|
|
||||||
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
|
||||||
return self._compressor.flush(mode)
|
|
||||||
|
|
||||||
|
|
||||||
class ZLibDecompressor(ZlibBaseHandler):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
suppress_deflate_header: bool = False,
|
|
||||||
executor: Optional[Executor] = None,
|
|
||||||
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
|
||||||
):
|
|
||||||
super().__init__(
|
|
||||||
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
|
||||||
executor=executor,
|
|
||||||
max_sync_chunk_size=max_sync_chunk_size,
|
|
||||||
)
|
|
||||||
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
|
||||||
|
|
||||||
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
|
||||||
return self._decompressor.decompress(data, max_length)
|
|
||||||
|
|
||||||
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
|
||||||
if (
|
|
||||||
self._max_sync_chunk_size is not None
|
|
||||||
and len(data) > self._max_sync_chunk_size
|
|
||||||
):
|
|
||||||
return await asyncio.get_event_loop().run_in_executor(
|
|
||||||
self._executor, self.decompress_sync, data, max_length
|
|
||||||
)
|
|
||||||
return self.decompress_sync(data, max_length)
|
|
||||||
|
|
||||||
def flush(self, length: int = 0) -> bytes:
|
|
||||||
return (
|
|
||||||
self._decompressor.flush(length)
|
|
||||||
if length > 0
|
|
||||||
else self._decompressor.flush()
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def eof(self) -> bool:
|
|
||||||
return self._decompressor.eof
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unconsumed_tail(self) -> bytes:
|
|
||||||
return self._decompressor.unconsumed_tail
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unused_data(self) -> bytes:
|
|
||||||
return self._decompressor.unused_data
|
|
||||||
|
|
||||||
|
|
||||||
class BrotliDecompressor:
|
|
||||||
# Supports both 'brotlipy' and 'Brotli' packages
|
|
||||||
# since they share an import name. The top branches
|
|
||||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
|
||||||
def __init__(self) -> None:
|
|
||||||
if not HAS_BROTLI:
|
|
||||||
raise RuntimeError(
|
|
||||||
"The brotli decompression is not available. "
|
|
||||||
"Please install `Brotli` module"
|
|
||||||
)
|
|
||||||
self._obj = brotli.Decompressor()
|
|
||||||
|
|
||||||
def decompress_sync(self, data: bytes) -> bytes:
|
|
||||||
if hasattr(self._obj, "decompress"):
|
|
||||||
return cast(bytes, self._obj.decompress(data))
|
|
||||||
return cast(bytes, self._obj.process(data))
|
|
||||||
|
|
||||||
def flush(self) -> bytes:
|
|
||||||
if hasattr(self._obj, "flush"):
|
|
||||||
return cast(bytes, self._obj.flush())
|
|
||||||
return b""
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,418 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import calendar
|
|
||||||
import contextlib
|
|
||||||
import datetime
|
|
||||||
import os # noqa
|
|
||||||
import pathlib
|
|
||||||
import pickle
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
from collections import defaultdict
|
|
||||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
|
||||||
from math import ceil
|
|
||||||
from typing import ( # noqa
|
|
||||||
DefaultDict,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .abc import AbstractCookieJar, ClearCookiePredicate
|
|
||||||
from .helpers import is_ip_address
|
|
||||||
from .typedefs import LooseCookies, PathLike, StrOrURL
|
|
||||||
|
|
||||||
__all__ = ("CookieJar", "DummyCookieJar")
|
|
||||||
|
|
||||||
|
|
||||||
CookieItem = Union[str, "Morsel[str]"]
|
|
||||||
|
|
||||||
|
|
||||||
class CookieJar(AbstractCookieJar):
|
|
||||||
"""Implements cookie storage adhering to RFC 6265."""
|
|
||||||
|
|
||||||
DATE_TOKENS_RE = re.compile(
|
|
||||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
|
||||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
|
||||||
)
|
|
||||||
|
|
||||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
|
||||||
|
|
||||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
|
||||||
|
|
||||||
DATE_MONTH_RE = re.compile(
|
|
||||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
|
||||||
re.I,
|
|
||||||
)
|
|
||||||
|
|
||||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
|
||||||
|
|
||||||
# calendar.timegm() fails for timestamps after datetime.datetime.max
|
|
||||||
# Minus one as a loss of precision occurs when timestamp() is called.
|
|
||||||
MAX_TIME = (
|
|
||||||
int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
calendar.timegm(time.gmtime(MAX_TIME))
|
|
||||||
except OSError:
|
|
||||||
# Hit the maximum representable time on Windows
|
|
||||||
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
|
|
||||||
MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
|
|
||||||
except OverflowError:
|
|
||||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
|
||||||
MAX_TIME = 2**31 - 1
|
|
||||||
# Avoid minuses in the future, 3x faster
|
|
||||||
SUB_MAX_TIME = MAX_TIME - 1
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
unsafe: bool = False,
|
|
||||||
quote_cookie: bool = True,
|
|
||||||
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(loop=loop)
|
|
||||||
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
|
|
||||||
SimpleCookie
|
|
||||||
)
|
|
||||||
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
|
||||||
self._unsafe = unsafe
|
|
||||||
self._quote_cookie = quote_cookie
|
|
||||||
if treat_as_secure_origin is None:
|
|
||||||
treat_as_secure_origin = []
|
|
||||||
elif isinstance(treat_as_secure_origin, URL):
|
|
||||||
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
|
||||||
elif isinstance(treat_as_secure_origin, str):
|
|
||||||
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
|
||||||
else:
|
|
||||||
treat_as_secure_origin = [
|
|
||||||
URL(url).origin() if isinstance(url, str) else url.origin()
|
|
||||||
for url in treat_as_secure_origin
|
|
||||||
]
|
|
||||||
self._treat_as_secure_origin = treat_as_secure_origin
|
|
||||||
self._next_expiration: float = ceil(time.time())
|
|
||||||
self._expirations: Dict[Tuple[str, str, str], float] = {}
|
|
||||||
|
|
||||||
def save(self, file_path: PathLike) -> None:
|
|
||||||
file_path = pathlib.Path(file_path)
|
|
||||||
with file_path.open(mode="wb") as f:
|
|
||||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
|
||||||
|
|
||||||
def load(self, file_path: PathLike) -> None:
|
|
||||||
file_path = pathlib.Path(file_path)
|
|
||||||
with file_path.open(mode="rb") as f:
|
|
||||||
self._cookies = pickle.load(f)
|
|
||||||
|
|
||||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
|
||||||
if predicate is None:
|
|
||||||
self._next_expiration = ceil(time.time())
|
|
||||||
self._cookies.clear()
|
|
||||||
self._host_only_cookies.clear()
|
|
||||||
self._expirations.clear()
|
|
||||||
return
|
|
||||||
|
|
||||||
to_del = []
|
|
||||||
now = time.time()
|
|
||||||
for (domain, path), cookie in self._cookies.items():
|
|
||||||
for name, morsel in cookie.items():
|
|
||||||
key = (domain, path, name)
|
|
||||||
if (
|
|
||||||
key in self._expirations and self._expirations[key] <= now
|
|
||||||
) or predicate(morsel):
|
|
||||||
to_del.append(key)
|
|
||||||
|
|
||||||
for domain, path, name in to_del:
|
|
||||||
self._host_only_cookies.discard((domain, name))
|
|
||||||
key = (domain, path, name)
|
|
||||||
if key in self._expirations:
|
|
||||||
del self._expirations[(domain, path, name)]
|
|
||||||
self._cookies[(domain, path)].pop(name, None)
|
|
||||||
|
|
||||||
self._next_expiration = (
|
|
||||||
min(*self._expirations.values(), self.SUB_MAX_TIME) + 1
|
|
||||||
if self._expirations
|
|
||||||
else self.MAX_TIME
|
|
||||||
)
|
|
||||||
|
|
||||||
def clear_domain(self, domain: str) -> None:
|
|
||||||
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
|
||||||
|
|
||||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
|
||||||
self._do_expiration()
|
|
||||||
for val in self._cookies.values():
|
|
||||||
yield from val.values()
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return sum(1 for i in self)
|
|
||||||
|
|
||||||
def _do_expiration(self) -> None:
|
|
||||||
self.clear(lambda x: False)
|
|
||||||
|
|
||||||
def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
|
|
||||||
self._next_expiration = min(self._next_expiration, when)
|
|
||||||
self._expirations[(domain, path, name)] = when
|
|
||||||
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
"""Update cookies."""
|
|
||||||
hostname = response_url.raw_host
|
|
||||||
|
|
||||||
if not self._unsafe and is_ip_address(hostname):
|
|
||||||
# Don't accept cookies from IPs
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(cookies, Mapping):
|
|
||||||
cookies = cookies.items()
|
|
||||||
|
|
||||||
for name, cookie in cookies:
|
|
||||||
if not isinstance(cookie, Morsel):
|
|
||||||
tmp = SimpleCookie()
|
|
||||||
tmp[name] = cookie # type: ignore[assignment]
|
|
||||||
cookie = tmp[name]
|
|
||||||
|
|
||||||
domain = cookie["domain"]
|
|
||||||
|
|
||||||
# ignore domains with trailing dots
|
|
||||||
if domain.endswith("."):
|
|
||||||
domain = ""
|
|
||||||
del cookie["domain"]
|
|
||||||
|
|
||||||
if not domain and hostname is not None:
|
|
||||||
# Set the cookie's domain to the response hostname
|
|
||||||
# and set its host-only-flag
|
|
||||||
self._host_only_cookies.add((hostname, name))
|
|
||||||
domain = cookie["domain"] = hostname
|
|
||||||
|
|
||||||
if domain.startswith("."):
|
|
||||||
# Remove leading dot
|
|
||||||
domain = domain[1:]
|
|
||||||
cookie["domain"] = domain
|
|
||||||
|
|
||||||
if hostname and not self._is_domain_match(domain, hostname):
|
|
||||||
# Setting cookies for different domains is not allowed
|
|
||||||
continue
|
|
||||||
|
|
||||||
path = cookie["path"]
|
|
||||||
if not path or not path.startswith("/"):
|
|
||||||
# Set the cookie's path to the response path
|
|
||||||
path = response_url.path
|
|
||||||
if not path.startswith("/"):
|
|
||||||
path = "/"
|
|
||||||
else:
|
|
||||||
# Cut everything from the last slash to the end
|
|
||||||
path = "/" + path[1 : path.rfind("/")]
|
|
||||||
cookie["path"] = path
|
|
||||||
|
|
||||||
max_age = cookie["max-age"]
|
|
||||||
if max_age:
|
|
||||||
try:
|
|
||||||
delta_seconds = int(max_age)
|
|
||||||
max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
|
|
||||||
self._expire_cookie(max_age_expiration, domain, path, name)
|
|
||||||
except ValueError:
|
|
||||||
cookie["max-age"] = ""
|
|
||||||
|
|
||||||
else:
|
|
||||||
expires = cookie["expires"]
|
|
||||||
if expires:
|
|
||||||
expire_time = self._parse_date(expires)
|
|
||||||
if expire_time:
|
|
||||||
self._expire_cookie(expire_time, domain, path, name)
|
|
||||||
else:
|
|
||||||
cookie["expires"] = ""
|
|
||||||
|
|
||||||
self._cookies[(domain, path)][name] = cookie
|
|
||||||
|
|
||||||
self._do_expiration()
|
|
||||||
|
|
||||||
def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
|
|
||||||
"""Returns this jar's cookies filtered by their attributes."""
|
|
||||||
filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
|
|
||||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
|
||||||
)
|
|
||||||
if not self._cookies:
|
|
||||||
# Skip do_expiration() if there are no cookies.
|
|
||||||
return filtered
|
|
||||||
self._do_expiration()
|
|
||||||
if not self._cookies:
|
|
||||||
# Skip rest of function if no non-expired cookies.
|
|
||||||
return filtered
|
|
||||||
request_url = URL(request_url)
|
|
||||||
hostname = request_url.raw_host or ""
|
|
||||||
|
|
||||||
is_not_secure = request_url.scheme not in ("https", "wss")
|
|
||||||
if is_not_secure and self._treat_as_secure_origin:
|
|
||||||
request_origin = URL()
|
|
||||||
with contextlib.suppress(ValueError):
|
|
||||||
request_origin = request_url.origin()
|
|
||||||
is_not_secure = request_origin not in self._treat_as_secure_origin
|
|
||||||
|
|
||||||
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
|
||||||
for cookie in sorted(self, key=lambda c: len(c["path"])):
|
|
||||||
name = cookie.key
|
|
||||||
domain = cookie["domain"]
|
|
||||||
|
|
||||||
# Send shared cookies
|
|
||||||
if not domain:
|
|
||||||
filtered[name] = cookie.value
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not self._unsafe and is_ip_address(hostname):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (domain, name) in self._host_only_cookies:
|
|
||||||
if domain != hostname:
|
|
||||||
continue
|
|
||||||
elif not self._is_domain_match(domain, hostname):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if is_not_secure and cookie["secure"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# It's critical we use the Morsel so the coded_value
|
|
||||||
# (based on cookie version) is preserved
|
|
||||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
|
||||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
|
||||||
filtered[name] = mrsl_val
|
|
||||||
|
|
||||||
return filtered
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
|
||||||
"""Implements domain matching adhering to RFC 6265."""
|
|
||||||
if hostname == domain:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not hostname.endswith(domain):
|
|
||||||
return False
|
|
||||||
|
|
||||||
non_matching = hostname[: -len(domain)]
|
|
||||||
|
|
||||||
if not non_matching.endswith("."):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return not is_ip_address(hostname)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
|
||||||
"""Implements path matching adhering to RFC 6265."""
|
|
||||||
if not req_path.startswith("/"):
|
|
||||||
req_path = "/"
|
|
||||||
|
|
||||||
if req_path == cookie_path:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not req_path.startswith(cookie_path):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if cookie_path.endswith("/"):
|
|
||||||
return True
|
|
||||||
|
|
||||||
non_matching = req_path[len(cookie_path) :]
|
|
||||||
|
|
||||||
return non_matching.startswith("/")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _parse_date(cls, date_str: str) -> Optional[int]:
|
|
||||||
"""Implements date string parsing adhering to RFC 6265."""
|
|
||||||
if not date_str:
|
|
||||||
return None
|
|
||||||
|
|
||||||
found_time = False
|
|
||||||
found_day = False
|
|
||||||
found_month = False
|
|
||||||
found_year = False
|
|
||||||
|
|
||||||
hour = minute = second = 0
|
|
||||||
day = 0
|
|
||||||
month = 0
|
|
||||||
year = 0
|
|
||||||
|
|
||||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
|
||||||
|
|
||||||
token = token_match.group("token")
|
|
||||||
|
|
||||||
if not found_time:
|
|
||||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
|
||||||
if time_match:
|
|
||||||
found_time = True
|
|
||||||
hour, minute, second = (int(s) for s in time_match.groups())
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_day:
|
|
||||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
|
||||||
if day_match:
|
|
||||||
found_day = True
|
|
||||||
day = int(day_match.group())
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_month:
|
|
||||||
month_match = cls.DATE_MONTH_RE.match(token)
|
|
||||||
if month_match:
|
|
||||||
found_month = True
|
|
||||||
assert month_match.lastindex is not None
|
|
||||||
month = month_match.lastindex
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_year:
|
|
||||||
year_match = cls.DATE_YEAR_RE.match(token)
|
|
||||||
if year_match:
|
|
||||||
found_year = True
|
|
||||||
year = int(year_match.group())
|
|
||||||
|
|
||||||
if 70 <= year <= 99:
|
|
||||||
year += 1900
|
|
||||||
elif 0 <= year <= 69:
|
|
||||||
year += 2000
|
|
||||||
|
|
||||||
if False in (found_day, found_month, found_year, found_time):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not 1 <= day <= 31:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
|
|
||||||
|
|
||||||
|
|
||||||
class DummyCookieJar(AbstractCookieJar):
|
|
||||||
"""Implements a dummy cookie storage.
|
|
||||||
|
|
||||||
It can be used with the ClientSession when no cookie processing is needed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
|
||||||
super().__init__(loop=loop)
|
|
||||||
|
|
||||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
|
||||||
while False:
|
|
||||||
yield None
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clear_domain(self, domain: str) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
|
||||||
return SimpleCookie()
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
import io
|
|
||||||
from typing import Any, Iterable, List, Optional
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from multidict import MultiDict, MultiDictProxy
|
|
||||||
|
|
||||||
from . import hdrs, multipart, payload
|
|
||||||
from .helpers import guess_filename
|
|
||||||
from .payload import Payload
|
|
||||||
|
|
||||||
__all__ = ("FormData",)
|
|
||||||
|
|
||||||
|
|
||||||
class FormData:
|
|
||||||
"""Helper class for form body generation.
|
|
||||||
|
|
||||||
Supports multipart/form-data and application/x-www-form-urlencoded.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
fields: Iterable[Any] = (),
|
|
||||||
quote_fields: bool = True,
|
|
||||||
charset: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
self._writer = multipart.MultipartWriter("form-data")
|
|
||||||
self._fields: List[Any] = []
|
|
||||||
self._is_multipart = False
|
|
||||||
self._is_processed = False
|
|
||||||
self._quote_fields = quote_fields
|
|
||||||
self._charset = charset
|
|
||||||
|
|
||||||
if isinstance(fields, dict):
|
|
||||||
fields = list(fields.items())
|
|
||||||
elif not isinstance(fields, (list, tuple)):
|
|
||||||
fields = (fields,)
|
|
||||||
self.add_fields(*fields)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_multipart(self) -> bool:
|
|
||||||
return self._is_multipart
|
|
||||||
|
|
||||||
def add_field(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
value: Any,
|
|
||||||
*,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
filename: Optional[str] = None,
|
|
||||||
content_transfer_encoding: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
if isinstance(value, io.IOBase):
|
|
||||||
self._is_multipart = True
|
|
||||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
|
||||||
if filename is None and content_transfer_encoding is None:
|
|
||||||
filename = name
|
|
||||||
|
|
||||||
type_options: MultiDict[str] = MultiDict({"name": name})
|
|
||||||
if filename is not None and not isinstance(filename, str):
|
|
||||||
raise TypeError(
|
|
||||||
"filename must be an instance of str. " "Got: %s" % filename
|
|
||||||
)
|
|
||||||
if filename is None and isinstance(value, io.IOBase):
|
|
||||||
filename = guess_filename(value, name)
|
|
||||||
if filename is not None:
|
|
||||||
type_options["filename"] = filename
|
|
||||||
self._is_multipart = True
|
|
||||||
|
|
||||||
headers = {}
|
|
||||||
if content_type is not None:
|
|
||||||
if not isinstance(content_type, str):
|
|
||||||
raise TypeError(
|
|
||||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
|
||||||
)
|
|
||||||
headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
self._is_multipart = True
|
|
||||||
if content_transfer_encoding is not None:
|
|
||||||
if not isinstance(content_transfer_encoding, str):
|
|
||||||
raise TypeError(
|
|
||||||
"content_transfer_encoding must be an instance"
|
|
||||||
" of str. Got: %s" % content_transfer_encoding
|
|
||||||
)
|
|
||||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
|
||||||
self._is_multipart = True
|
|
||||||
|
|
||||||
self._fields.append((type_options, headers, value))
|
|
||||||
|
|
||||||
def add_fields(self, *fields: Any) -> None:
|
|
||||||
to_add = list(fields)
|
|
||||||
|
|
||||||
while to_add:
|
|
||||||
rec = to_add.pop(0)
|
|
||||||
|
|
||||||
if isinstance(rec, io.IOBase):
|
|
||||||
k = guess_filename(rec, "unknown")
|
|
||||||
self.add_field(k, rec) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
|
||||||
to_add.extend(rec.items())
|
|
||||||
|
|
||||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
|
||||||
k, fp = rec
|
|
||||||
self.add_field(k, fp) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise TypeError(
|
|
||||||
"Only io.IOBase, multidict and (name, file) "
|
|
||||||
"pairs allowed, use .add_field() for passing "
|
|
||||||
"more complex parameters, got {!r}".format(rec)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
|
||||||
# form data (x-www-form-urlencoded)
|
|
||||||
data = []
|
|
||||||
for type_options, _, value in self._fields:
|
|
||||||
data.append((type_options["name"], value))
|
|
||||||
|
|
||||||
charset = self._charset if self._charset is not None else "utf-8"
|
|
||||||
|
|
||||||
if charset == "utf-8":
|
|
||||||
content_type = "application/x-www-form-urlencoded"
|
|
||||||
else:
|
|
||||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
|
||||||
|
|
||||||
return payload.BytesPayload(
|
|
||||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
|
||||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
|
||||||
if self._is_processed:
|
|
||||||
raise RuntimeError("Form data has been processed already")
|
|
||||||
for dispparams, headers, value in self._fields:
|
|
||||||
try:
|
|
||||||
if hdrs.CONTENT_TYPE in headers:
|
|
||||||
part = payload.get_payload(
|
|
||||||
value,
|
|
||||||
content_type=headers[hdrs.CONTENT_TYPE],
|
|
||||||
headers=headers,
|
|
||||||
encoding=self._charset,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
part = payload.get_payload(
|
|
||||||
value, headers=headers, encoding=self._charset
|
|
||||||
)
|
|
||||||
except Exception as exc:
|
|
||||||
raise TypeError(
|
|
||||||
"Can not serialize value type: %r\n "
|
|
||||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
if dispparams:
|
|
||||||
part.set_content_disposition(
|
|
||||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
|
||||||
)
|
|
||||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
|
||||||
# Content-Length which were sent via chunked transfer encoding
|
|
||||||
assert part.headers is not None
|
|
||||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
|
||||||
|
|
||||||
self._writer.append_payload(part)
|
|
||||||
|
|
||||||
self._is_processed = True
|
|
||||||
return self._writer
|
|
||||||
|
|
||||||
def __call__(self) -> Payload:
|
|
||||||
if self._is_multipart:
|
|
||||||
return self._gen_form_data()
|
|
||||||
else:
|
|
||||||
return self._gen_form_urlencoded()
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
"""HTTP Headers constants."""
|
|
||||||
|
|
||||||
# After changing the file content call ./tools/gen.py
|
|
||||||
# to regenerate the headers parser
|
|
||||||
from typing import Final, Set
|
|
||||||
|
|
||||||
from multidict import istr
|
|
||||||
|
|
||||||
METH_ANY: Final[str] = "*"
|
|
||||||
METH_CONNECT: Final[str] = "CONNECT"
|
|
||||||
METH_HEAD: Final[str] = "HEAD"
|
|
||||||
METH_GET: Final[str] = "GET"
|
|
||||||
METH_DELETE: Final[str] = "DELETE"
|
|
||||||
METH_OPTIONS: Final[str] = "OPTIONS"
|
|
||||||
METH_PATCH: Final[str] = "PATCH"
|
|
||||||
METH_POST: Final[str] = "POST"
|
|
||||||
METH_PUT: Final[str] = "PUT"
|
|
||||||
METH_TRACE: Final[str] = "TRACE"
|
|
||||||
|
|
||||||
METH_ALL: Final[Set[str]] = {
|
|
||||||
METH_CONNECT,
|
|
||||||
METH_HEAD,
|
|
||||||
METH_GET,
|
|
||||||
METH_DELETE,
|
|
||||||
METH_OPTIONS,
|
|
||||||
METH_PATCH,
|
|
||||||
METH_POST,
|
|
||||||
METH_PUT,
|
|
||||||
METH_TRACE,
|
|
||||||
}
|
|
||||||
|
|
||||||
ACCEPT: Final[istr] = istr("Accept")
|
|
||||||
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
|
||||||
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
|
||||||
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
|
||||||
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
|
||||||
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
|
||||||
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
|
||||||
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
|
||||||
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
|
||||||
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
|
||||||
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
|
||||||
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
|
||||||
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
|
||||||
AGE: Final[istr] = istr("Age")
|
|
||||||
ALLOW: Final[istr] = istr("Allow")
|
|
||||||
AUTHORIZATION: Final[istr] = istr("Authorization")
|
|
||||||
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
|
||||||
CONNECTION: Final[istr] = istr("Connection")
|
|
||||||
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
|
||||||
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
|
||||||
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
|
||||||
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
|
||||||
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
|
||||||
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
|
||||||
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
|
||||||
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
|
||||||
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
|
||||||
COOKIE: Final[istr] = istr("Cookie")
|
|
||||||
DATE: Final[istr] = istr("Date")
|
|
||||||
DESTINATION: Final[istr] = istr("Destination")
|
|
||||||
DIGEST: Final[istr] = istr("Digest")
|
|
||||||
ETAG: Final[istr] = istr("Etag")
|
|
||||||
EXPECT: Final[istr] = istr("Expect")
|
|
||||||
EXPIRES: Final[istr] = istr("Expires")
|
|
||||||
FORWARDED: Final[istr] = istr("Forwarded")
|
|
||||||
FROM: Final[istr] = istr("From")
|
|
||||||
HOST: Final[istr] = istr("Host")
|
|
||||||
IF_MATCH: Final[istr] = istr("If-Match")
|
|
||||||
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
|
||||||
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
|
||||||
IF_RANGE: Final[istr] = istr("If-Range")
|
|
||||||
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
|
||||||
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
|
||||||
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
|
||||||
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
|
||||||
LINK: Final[istr] = istr("Link")
|
|
||||||
LOCATION: Final[istr] = istr("Location")
|
|
||||||
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
|
||||||
ORIGIN: Final[istr] = istr("Origin")
|
|
||||||
PRAGMA: Final[istr] = istr("Pragma")
|
|
||||||
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
|
||||||
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
|
||||||
RANGE: Final[istr] = istr("Range")
|
|
||||||
REFERER: Final[istr] = istr("Referer")
|
|
||||||
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
|
||||||
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
|
||||||
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
|
||||||
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
|
||||||
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
|
||||||
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
|
||||||
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
|
||||||
SERVER: Final[istr] = istr("Server")
|
|
||||||
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
|
||||||
TE: Final[istr] = istr("TE")
|
|
||||||
TRAILER: Final[istr] = istr("Trailer")
|
|
||||||
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
|
||||||
UPGRADE: Final[istr] = istr("Upgrade")
|
|
||||||
URI: Final[istr] = istr("URI")
|
|
||||||
USER_AGENT: Final[istr] = istr("User-Agent")
|
|
||||||
VARY: Final[istr] = istr("Vary")
|
|
||||||
VIA: Final[istr] = istr("Via")
|
|
||||||
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
|
||||||
WARNING: Final[istr] = istr("Warning")
|
|
||||||
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
|
||||||
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
|
||||||
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
|
||||||
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
|
||||||
@@ -1,999 +0,0 @@
|
|||||||
"""Various helper functions"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import contextlib
|
|
||||||
import datetime
|
|
||||||
import enum
|
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import netrc
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import warnings
|
|
||||||
import weakref
|
|
||||||
from collections import namedtuple
|
|
||||||
from contextlib import suppress
|
|
||||||
from email.parser import HeaderParser
|
|
||||||
from email.utils import parsedate
|
|
||||||
from math import ceil
|
|
||||||
from pathlib import Path
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
ContextManager,
|
|
||||||
Dict,
|
|
||||||
Generator,
|
|
||||||
Generic,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
Optional,
|
|
||||||
Pattern,
|
|
||||||
Protocol,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
get_args,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
from urllib.parse import quote
|
|
||||||
from urllib.request import getproxies, proxy_bypass
|
|
||||||
|
|
||||||
import attr
|
|
||||||
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .log import client_logger, internal_logger
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
import asyncio as async_timeout
|
|
||||||
else:
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
|
||||||
|
|
||||||
IS_MACOS = platform.system() == "Darwin"
|
|
||||||
IS_WINDOWS = platform.system() == "Windows"
|
|
||||||
|
|
||||||
PY_310 = sys.version_info >= (3, 10)
|
|
||||||
PY_311 = sys.version_info >= (3, 11)
|
|
||||||
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_S = TypeVar("_S")
|
|
||||||
|
|
||||||
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
|
||||||
sentinel = _SENTINEL.sentinel
|
|
||||||
|
|
||||||
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
|
||||||
|
|
||||||
DEBUG = sys.flags.dev_mode or (
|
|
||||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
CHAR = {chr(i) for i in range(0, 128)}
|
|
||||||
CTL = {chr(i) for i in range(0, 32)} | {
|
|
||||||
chr(127),
|
|
||||||
}
|
|
||||||
SEPARATORS = {
|
|
||||||
"(",
|
|
||||||
")",
|
|
||||||
"<",
|
|
||||||
">",
|
|
||||||
"@",
|
|
||||||
",",
|
|
||||||
";",
|
|
||||||
":",
|
|
||||||
"\\",
|
|
||||||
'"',
|
|
||||||
"/",
|
|
||||||
"[",
|
|
||||||
"]",
|
|
||||||
"?",
|
|
||||||
"=",
|
|
||||||
"{",
|
|
||||||
"}",
|
|
||||||
" ",
|
|
||||||
chr(9),
|
|
||||||
}
|
|
||||||
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
|
||||||
|
|
||||||
|
|
||||||
class noop:
|
|
||||||
def __await__(self) -> Generator[None, None, None]:
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
|
||||||
"""Http basic authentication helper."""
|
|
||||||
|
|
||||||
def __new__(
|
|
||||||
cls, login: str, password: str = "", encoding: str = "latin1"
|
|
||||||
) -> "BasicAuth":
|
|
||||||
if login is None:
|
|
||||||
raise ValueError("None is not allowed as login value")
|
|
||||||
|
|
||||||
if password is None:
|
|
||||||
raise ValueError("None is not allowed as password value")
|
|
||||||
|
|
||||||
if ":" in login:
|
|
||||||
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
|
||||||
|
|
||||||
return super().__new__(cls, login, password, encoding)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
|
||||||
"""Create a BasicAuth object from an Authorization HTTP header."""
|
|
||||||
try:
|
|
||||||
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
|
||||||
except ValueError:
|
|
||||||
raise ValueError("Could not parse authorization header.")
|
|
||||||
|
|
||||||
if auth_type.lower() != "basic":
|
|
||||||
raise ValueError("Unknown authorization method %s" % auth_type)
|
|
||||||
|
|
||||||
try:
|
|
||||||
decoded = base64.b64decode(
|
|
||||||
encoded_credentials.encode("ascii"), validate=True
|
|
||||||
).decode(encoding)
|
|
||||||
except binascii.Error:
|
|
||||||
raise ValueError("Invalid base64 encoding.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# RFC 2617 HTTP Authentication
|
|
||||||
# https://www.ietf.org/rfc/rfc2617.txt
|
|
||||||
# the colon must be present, but the username and password may be
|
|
||||||
# otherwise blank.
|
|
||||||
username, password = decoded.split(":", 1)
|
|
||||||
except ValueError:
|
|
||||||
raise ValueError("Invalid credentials.")
|
|
||||||
|
|
||||||
return cls(username, password, encoding=encoding)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
|
||||||
"""Create BasicAuth from url."""
|
|
||||||
if not isinstance(url, URL):
|
|
||||||
raise TypeError("url should be yarl.URL instance")
|
|
||||||
if url.user is None:
|
|
||||||
return None
|
|
||||||
return cls(url.user, url.password or "", encoding=encoding)
|
|
||||||
|
|
||||||
def encode(self) -> str:
|
|
||||||
"""Encode credentials."""
|
|
||||||
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
|
||||||
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
|
||||||
|
|
||||||
|
|
||||||
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
|
||||||
auth = BasicAuth.from_url(url)
|
|
||||||
if auth is None:
|
|
||||||
return url, None
|
|
||||||
else:
|
|
||||||
return url.with_user(None), auth
|
|
||||||
|
|
||||||
|
|
||||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
|
||||||
"""Load netrc from file.
|
|
||||||
|
|
||||||
Attempt to load it from the path specified by the env-var
|
|
||||||
NETRC or in the default location in the user's home directory.
|
|
||||||
|
|
||||||
Returns None if it couldn't be found or fails to parse.
|
|
||||||
"""
|
|
||||||
netrc_env = os.environ.get("NETRC")
|
|
||||||
|
|
||||||
if netrc_env is not None:
|
|
||||||
netrc_path = Path(netrc_env)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
home_dir = Path.home()
|
|
||||||
except RuntimeError as e: # pragma: no cover
|
|
||||||
# if pathlib can't resolve home, it may raise a RuntimeError
|
|
||||||
client_logger.debug(
|
|
||||||
"Could not resolve home directory when "
|
|
||||||
"trying to look for .netrc file: %s",
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
|
||||||
|
|
||||||
try:
|
|
||||||
return netrc.netrc(str(netrc_path))
|
|
||||||
except netrc.NetrcParseError as e:
|
|
||||||
client_logger.warning("Could not parse .netrc file: %s", e)
|
|
||||||
except OSError as e:
|
|
||||||
netrc_exists = False
|
|
||||||
with contextlib.suppress(OSError):
|
|
||||||
netrc_exists = netrc_path.is_file()
|
|
||||||
# we couldn't read the file (doesn't exist, permissions, etc.)
|
|
||||||
if netrc_env or netrc_exists:
|
|
||||||
# only warn if the environment wanted us to load it,
|
|
||||||
# or it appears like the default file does actually exist
|
|
||||||
client_logger.warning("Could not read .netrc file: %s", e)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class ProxyInfo:
|
|
||||||
proxy: URL
|
|
||||||
proxy_auth: Optional[BasicAuth]
|
|
||||||
|
|
||||||
|
|
||||||
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
|
||||||
"""
|
|
||||||
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
|
||||||
|
|
||||||
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
|
||||||
entry is found for the ``host``.
|
|
||||||
"""
|
|
||||||
if netrc_obj is None:
|
|
||||||
raise LookupError("No .netrc file found")
|
|
||||||
auth_from_netrc = netrc_obj.authenticators(host)
|
|
||||||
|
|
||||||
if auth_from_netrc is None:
|
|
||||||
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
|
||||||
login, account, password = auth_from_netrc
|
|
||||||
|
|
||||||
# TODO(PY311): username = login or account
|
|
||||||
# Up to python 3.10, account could be None if not specified,
|
|
||||||
# and login will be empty string if not specified. From 3.11,
|
|
||||||
# login and account will be empty string if not specified.
|
|
||||||
username = login if (login or account is None) else account
|
|
||||||
|
|
||||||
# TODO(PY311): Remove this, as password will be empty string
|
|
||||||
# if not specified
|
|
||||||
if password is None:
|
|
||||||
password = ""
|
|
||||||
|
|
||||||
return BasicAuth(username, password)
|
|
||||||
|
|
||||||
|
|
||||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
|
||||||
proxy_urls = {
|
|
||||||
k: URL(v)
|
|
||||||
for k, v in getproxies().items()
|
|
||||||
if k in ("http", "https", "ws", "wss")
|
|
||||||
}
|
|
||||||
netrc_obj = netrc_from_env()
|
|
||||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
|
||||||
ret = {}
|
|
||||||
for proto, val in stripped.items():
|
|
||||||
proxy, auth = val
|
|
||||||
if proxy.scheme in ("https", "wss"):
|
|
||||||
client_logger.warning(
|
|
||||||
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if netrc_obj and auth is None:
|
|
||||||
if proxy.host is not None:
|
|
||||||
try:
|
|
||||||
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
|
||||||
except LookupError:
|
|
||||||
auth = None
|
|
||||||
ret[proto] = ProxyInfo(proxy, auth)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def current_task(
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> "Optional[asyncio.Task[Any]]":
|
|
||||||
return asyncio.current_task(loop=loop)
|
|
||||||
|
|
||||||
|
|
||||||
def get_running_loop(
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> asyncio.AbstractEventLoop:
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
if not loop.is_running():
|
|
||||||
warnings.warn(
|
|
||||||
"The object should be created within an async function",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=3,
|
|
||||||
)
|
|
||||||
if loop.get_debug():
|
|
||||||
internal_logger.warning(
|
|
||||||
"The object should be created within an async function", stack_info=True
|
|
||||||
)
|
|
||||||
return loop
|
|
||||||
|
|
||||||
|
|
||||||
def isasyncgenfunction(obj: Any) -> bool:
|
|
||||||
func = getattr(inspect, "isasyncgenfunction", None)
|
|
||||||
if func is not None:
|
|
||||||
return func(obj) # type: ignore[no-any-return]
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
|
||||||
"""Get a permitted proxy for the given URL from the env."""
|
|
||||||
if url.host is not None and proxy_bypass(url.host):
|
|
||||||
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
|
||||||
|
|
||||||
proxies_in_env = proxies_from_env()
|
|
||||||
try:
|
|
||||||
proxy_info = proxies_in_env[url.scheme]
|
|
||||||
except KeyError:
|
|
||||||
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
|
||||||
else:
|
|
||||||
return proxy_info.proxy, proxy_info.proxy_auth
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class MimeType:
|
|
||||||
type: str
|
|
||||||
subtype: str
|
|
||||||
suffix: str
|
|
||||||
parameters: "MultiDictProxy[str]"
|
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=56)
|
|
||||||
def parse_mimetype(mimetype: str) -> MimeType:
|
|
||||||
"""Parses a MIME type into its components.
|
|
||||||
|
|
||||||
mimetype is a MIME type string.
|
|
||||||
|
|
||||||
Returns a MimeType object.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
>>> parse_mimetype('text/html; charset=utf-8')
|
|
||||||
MimeType(type='text', subtype='html', suffix='',
|
|
||||||
parameters={'charset': 'utf-8'})
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not mimetype:
|
|
||||||
return MimeType(
|
|
||||||
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
|
||||||
)
|
|
||||||
|
|
||||||
parts = mimetype.split(";")
|
|
||||||
params: MultiDict[str] = MultiDict()
|
|
||||||
for item in parts[1:]:
|
|
||||||
if not item:
|
|
||||||
continue
|
|
||||||
key, _, value = item.partition("=")
|
|
||||||
params.add(key.lower().strip(), value.strip(' "'))
|
|
||||||
|
|
||||||
fulltype = parts[0].strip().lower()
|
|
||||||
if fulltype == "*":
|
|
||||||
fulltype = "*/*"
|
|
||||||
|
|
||||||
mtype, _, stype = fulltype.partition("/")
|
|
||||||
stype, _, suffix = stype.partition("+")
|
|
||||||
|
|
||||||
return MimeType(
|
|
||||||
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
|
||||||
name = getattr(obj, "name", None)
|
|
||||||
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
|
||||||
return Path(name).name
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
|
||||||
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
|
||||||
|
|
||||||
|
|
||||||
def quoted_string(content: str) -> str:
|
|
||||||
"""Return 7-bit content as quoted-string.
|
|
||||||
|
|
||||||
Format content into a quoted-string as defined in RFC5322 for
|
|
||||||
Internet Message Format. Notice that this is not the 8-bit HTTP
|
|
||||||
format, but the 7-bit email format. Content must be in usascii or
|
|
||||||
a ValueError is raised.
|
|
||||||
"""
|
|
||||||
if not (QCONTENT > set(content)):
|
|
||||||
raise ValueError(f"bad content for quoted-string {content!r}")
|
|
||||||
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
|
||||||
|
|
||||||
|
|
||||||
def content_disposition_header(
|
|
||||||
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
|
||||||
) -> str:
|
|
||||||
"""Sets ``Content-Disposition`` header for MIME.
|
|
||||||
|
|
||||||
This is the MIME payload Content-Disposition header from RFC 2183
|
|
||||||
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
|
||||||
RFC 6266.
|
|
||||||
|
|
||||||
disptype is a disposition type: inline, attachment, form-data.
|
|
||||||
Should be valid extension token (see RFC 2183)
|
|
||||||
|
|
||||||
quote_fields performs value quoting to 7-bit MIME headers
|
|
||||||
according to RFC 7578. Set to quote_fields to False if recipient
|
|
||||||
can take 8-bit file names and field values.
|
|
||||||
|
|
||||||
_charset specifies the charset to use when quote_fields is True.
|
|
||||||
|
|
||||||
params is a dict with disposition params.
|
|
||||||
"""
|
|
||||||
if not disptype or not (TOKEN > set(disptype)):
|
|
||||||
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
|
||||||
|
|
||||||
value = disptype
|
|
||||||
if params:
|
|
||||||
lparams = []
|
|
||||||
for key, val in params.items():
|
|
||||||
if not key or not (TOKEN > set(key)):
|
|
||||||
raise ValueError(
|
|
||||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
|
||||||
)
|
|
||||||
if quote_fields:
|
|
||||||
if key.lower() == "filename":
|
|
||||||
qval = quote(val, "", encoding=_charset)
|
|
||||||
lparams.append((key, '"%s"' % qval))
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
qval = quoted_string(val)
|
|
||||||
except ValueError:
|
|
||||||
qval = "".join(
|
|
||||||
(_charset, "''", quote(val, "", encoding=_charset))
|
|
||||||
)
|
|
||||||
lparams.append((key + "*", qval))
|
|
||||||
else:
|
|
||||||
lparams.append((key, '"%s"' % qval))
|
|
||||||
else:
|
|
||||||
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
|
||||||
lparams.append((key, '"%s"' % qval))
|
|
||||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
|
||||||
value = "; ".join((value, sparams))
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class _TSelf(Protocol, Generic[_T]):
|
|
||||||
_cache: Dict[str, _T]
|
|
||||||
|
|
||||||
|
|
||||||
class reify(Generic[_T]):
|
|
||||||
"""Use as a class method decorator.
|
|
||||||
|
|
||||||
It operates almost exactly like
|
|
||||||
the Python `@property` decorator, but it puts the result of the
|
|
||||||
method it decorates into the instance dict after the first call,
|
|
||||||
effectively replacing the function it decorates with an instance
|
|
||||||
variable. It is, in Python parlance, a data descriptor.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
|
||||||
self.wrapped = wrapped
|
|
||||||
self.__doc__ = wrapped.__doc__
|
|
||||||
self.name = wrapped.__name__
|
|
||||||
|
|
||||||
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
return inst._cache[self.name]
|
|
||||||
except KeyError:
|
|
||||||
val = self.wrapped(inst)
|
|
||||||
inst._cache[self.name] = val
|
|
||||||
return val
|
|
||||||
except AttributeError:
|
|
||||||
if inst is None:
|
|
||||||
return self
|
|
||||||
raise
|
|
||||||
|
|
||||||
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
|
||||||
raise AttributeError("reified property is read-only")
|
|
||||||
|
|
||||||
|
|
||||||
reify_py = reify
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ._helpers import reify as reify_c
|
|
||||||
|
|
||||||
if not NO_EXTENSIONS:
|
|
||||||
reify = reify_c # type: ignore[misc,assignment]
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
_ipv4_pattern = (
|
|
||||||
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
|
||||||
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
|
||||||
)
|
|
||||||
_ipv6_pattern = (
|
|
||||||
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
|
||||||
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
|
||||||
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
|
||||||
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
|
||||||
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
|
||||||
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
|
||||||
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
|
||||||
r":|:(:[A-F0-9]{1,4}){7})$"
|
|
||||||
)
|
|
||||||
_ipv4_regex = re.compile(_ipv4_pattern)
|
|
||||||
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
|
||||||
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
|
||||||
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_ip_address(
|
|
||||||
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
|
||||||
) -> bool:
|
|
||||||
if host is None:
|
|
||||||
return False
|
|
||||||
if isinstance(host, str):
|
|
||||||
return bool(regex.match(host))
|
|
||||||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
|
||||||
return bool(regexb.match(host))
|
|
||||||
else:
|
|
||||||
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
|
||||||
|
|
||||||
|
|
||||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
|
||||||
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
|
||||||
|
|
||||||
|
|
||||||
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
|
||||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
|
||||||
|
|
||||||
|
|
||||||
_cached_current_datetime: Optional[int] = None
|
|
||||||
_cached_formatted_datetime = ""
|
|
||||||
|
|
||||||
|
|
||||||
def rfc822_formatted_time() -> str:
|
|
||||||
global _cached_current_datetime
|
|
||||||
global _cached_formatted_datetime
|
|
||||||
|
|
||||||
now = int(time.time())
|
|
||||||
if now != _cached_current_datetime:
|
|
||||||
# Weekday and month names for HTTP date/time formatting;
|
|
||||||
# always English!
|
|
||||||
# Tuples are constants stored in codeobject!
|
|
||||||
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
|
||||||
_monthname = (
|
|
||||||
"", # Dummy so we can use 1-based month numbers
|
|
||||||
"Jan",
|
|
||||||
"Feb",
|
|
||||||
"Mar",
|
|
||||||
"Apr",
|
|
||||||
"May",
|
|
||||||
"Jun",
|
|
||||||
"Jul",
|
|
||||||
"Aug",
|
|
||||||
"Sep",
|
|
||||||
"Oct",
|
|
||||||
"Nov",
|
|
||||||
"Dec",
|
|
||||||
)
|
|
||||||
|
|
||||||
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
|
||||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
|
||||||
_weekdayname[wd],
|
|
||||||
day,
|
|
||||||
_monthname[month],
|
|
||||||
year,
|
|
||||||
hh,
|
|
||||||
mm,
|
|
||||||
ss,
|
|
||||||
)
|
|
||||||
_cached_current_datetime = now
|
|
||||||
return _cached_formatted_datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
|
||||||
ref, name = info
|
|
||||||
ob = ref()
|
|
||||||
if ob is not None:
|
|
||||||
with suppress(Exception):
|
|
||||||
getattr(ob, name)()
|
|
||||||
|
|
||||||
|
|
||||||
def weakref_handle(
|
|
||||||
ob: object,
|
|
||||||
name: str,
|
|
||||||
timeout: float,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
timeout_ceil_threshold: float = 5,
|
|
||||||
) -> Optional[asyncio.TimerHandle]:
|
|
||||||
if timeout is not None and timeout > 0:
|
|
||||||
when = loop.time() + timeout
|
|
||||||
if timeout >= timeout_ceil_threshold:
|
|
||||||
when = ceil(when)
|
|
||||||
|
|
||||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def call_later(
|
|
||||||
cb: Callable[[], Any],
|
|
||||||
timeout: float,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
timeout_ceil_threshold: float = 5,
|
|
||||||
) -> Optional[asyncio.TimerHandle]:
|
|
||||||
if timeout is not None and timeout > 0:
|
|
||||||
when = loop.time() + timeout
|
|
||||||
if timeout > timeout_ceil_threshold:
|
|
||||||
when = ceil(when)
|
|
||||||
return loop.call_at(when, cb)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class TimeoutHandle:
|
|
||||||
"""Timeout handle"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
timeout: Optional[float],
|
|
||||||
ceil_threshold: float = 5,
|
|
||||||
) -> None:
|
|
||||||
self._timeout = timeout
|
|
||||||
self._loop = loop
|
|
||||||
self._ceil_threshold = ceil_threshold
|
|
||||||
self._callbacks: List[
|
|
||||||
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
|
||||||
] = []
|
|
||||||
|
|
||||||
def register(
|
|
||||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
self._callbacks.append((callback, args, kwargs))
|
|
||||||
|
|
||||||
def close(self) -> None:
|
|
||||||
self._callbacks.clear()
|
|
||||||
|
|
||||||
def start(self) -> Optional[asyncio.Handle]:
|
|
||||||
timeout = self._timeout
|
|
||||||
if timeout is not None and timeout > 0:
|
|
||||||
when = self._loop.time() + timeout
|
|
||||||
if timeout >= self._ceil_threshold:
|
|
||||||
when = ceil(when)
|
|
||||||
return self._loop.call_at(when, self.__call__)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def timer(self) -> "BaseTimerContext":
|
|
||||||
if self._timeout is not None and self._timeout > 0:
|
|
||||||
timer = TimerContext(self._loop)
|
|
||||||
self.register(timer.timeout)
|
|
||||||
return timer
|
|
||||||
else:
|
|
||||||
return TimerNoop()
|
|
||||||
|
|
||||||
def __call__(self) -> None:
|
|
||||||
for cb, args, kwargs in self._callbacks:
|
|
||||||
with suppress(Exception):
|
|
||||||
cb(*args, **kwargs)
|
|
||||||
|
|
||||||
self._callbacks.clear()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
|
||||||
def assert_timeout(self) -> None:
|
|
||||||
"""Raise TimeoutError if timeout has been exceeded."""
|
|
||||||
|
|
||||||
|
|
||||||
class TimerNoop(BaseTimerContext):
|
|
||||||
def __enter__(self) -> BaseTimerContext:
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_val: Optional[BaseException],
|
|
||||||
exc_tb: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
class TimerContext(BaseTimerContext):
|
|
||||||
"""Low resolution timeout context manager"""
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
self._loop = loop
|
|
||||||
self._tasks: List[asyncio.Task[Any]] = []
|
|
||||||
self._cancelled = False
|
|
||||||
|
|
||||||
def assert_timeout(self) -> None:
|
|
||||||
"""Raise TimeoutError if timer has already been cancelled."""
|
|
||||||
if self._cancelled:
|
|
||||||
raise asyncio.TimeoutError from None
|
|
||||||
|
|
||||||
def __enter__(self) -> BaseTimerContext:
|
|
||||||
task = current_task(loop=self._loop)
|
|
||||||
|
|
||||||
if task is None:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Timeout context manager should be used " "inside a task"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._cancelled:
|
|
||||||
raise asyncio.TimeoutError from None
|
|
||||||
|
|
||||||
self._tasks.append(task)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_val: Optional[BaseException],
|
|
||||||
exc_tb: Optional[TracebackType],
|
|
||||||
) -> Optional[bool]:
|
|
||||||
if self._tasks:
|
|
||||||
self._tasks.pop()
|
|
||||||
|
|
||||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
|
||||||
raise asyncio.TimeoutError from None
|
|
||||||
return None
|
|
||||||
|
|
||||||
def timeout(self) -> None:
|
|
||||||
if not self._cancelled:
|
|
||||||
for task in set(self._tasks):
|
|
||||||
task.cancel()
|
|
||||||
|
|
||||||
self._cancelled = True
|
|
||||||
|
|
||||||
|
|
||||||
def ceil_timeout(
|
|
||||||
delay: Optional[float], ceil_threshold: float = 5
|
|
||||||
) -> async_timeout.Timeout:
|
|
||||||
if delay is None or delay <= 0:
|
|
||||||
return async_timeout.timeout(None)
|
|
||||||
|
|
||||||
loop = get_running_loop()
|
|
||||||
now = loop.time()
|
|
||||||
when = now + delay
|
|
||||||
if delay > ceil_threshold:
|
|
||||||
when = ceil(when)
|
|
||||||
return async_timeout.timeout_at(when)
|
|
||||||
|
|
||||||
|
|
||||||
class HeadersMixin:
|
|
||||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
|
||||||
|
|
||||||
_headers: MultiMapping[str]
|
|
||||||
|
|
||||||
_content_type: Optional[str] = None
|
|
||||||
_content_dict: Optional[Dict[str, str]] = None
|
|
||||||
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
|
||||||
|
|
||||||
def _parse_content_type(self, raw: Optional[str]) -> None:
|
|
||||||
self._stored_content_type = raw
|
|
||||||
if raw is None:
|
|
||||||
# default value according to RFC 2616
|
|
||||||
self._content_type = "application/octet-stream"
|
|
||||||
self._content_dict = {}
|
|
||||||
else:
|
|
||||||
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
|
||||||
self._content_type = msg.get_content_type()
|
|
||||||
params = msg.get_params(())
|
|
||||||
self._content_dict = dict(params[1:]) # First element is content type again
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_type(self) -> str:
|
|
||||||
"""The value of content part for Content-Type HTTP header."""
|
|
||||||
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
|
||||||
if self._stored_content_type != raw:
|
|
||||||
self._parse_content_type(raw)
|
|
||||||
return self._content_type # type: ignore[return-value]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def charset(self) -> Optional[str]:
|
|
||||||
"""The value of charset part for Content-Type HTTP header."""
|
|
||||||
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
|
||||||
if self._stored_content_type != raw:
|
|
||||||
self._parse_content_type(raw)
|
|
||||||
return self._content_dict.get("charset") # type: ignore[union-attr]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_length(self) -> Optional[int]:
|
|
||||||
"""The value of Content-Length HTTP header."""
|
|
||||||
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
|
||||||
|
|
||||||
if content_length is not None:
|
|
||||||
return int(content_length)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
|
||||||
if not fut.done():
|
|
||||||
fut.set_result(result)
|
|
||||||
|
|
||||||
|
|
||||||
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
|
||||||
if not fut.done():
|
|
||||||
fut.set_exception(exc)
|
|
||||||
|
|
||||||
|
|
||||||
@functools.total_ordering
|
|
||||||
class AppKey(Generic[_T]):
|
|
||||||
"""Keys for static typing support in Application."""
|
|
||||||
|
|
||||||
__slots__ = ("_name", "_t", "__orig_class__")
|
|
||||||
|
|
||||||
# This may be set by Python when instantiating with a generic type. We need to
|
|
||||||
# support this, in order to support types that are not concrete classes,
|
|
||||||
# like Iterable, which can't be passed as the second parameter to __init__.
|
|
||||||
__orig_class__: Type[object]
|
|
||||||
|
|
||||||
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
|
||||||
# Prefix with module name to help deduplicate key names.
|
|
||||||
frame = inspect.currentframe()
|
|
||||||
while frame:
|
|
||||||
if frame.f_code.co_name == "<module>":
|
|
||||||
module: str = frame.f_globals["__name__"]
|
|
||||||
break
|
|
||||||
frame = frame.f_back
|
|
||||||
|
|
||||||
self._name = module + "." + name
|
|
||||||
self._t = t
|
|
||||||
|
|
||||||
def __lt__(self, other: object) -> bool:
|
|
||||||
if isinstance(other, AppKey):
|
|
||||||
return self._name < other._name
|
|
||||||
return True # Order AppKey above other types.
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
t = self._t
|
|
||||||
if t is None:
|
|
||||||
with suppress(AttributeError):
|
|
||||||
# Set to type arg.
|
|
||||||
t = get_args(self.__orig_class__)[0]
|
|
||||||
|
|
||||||
if t is None:
|
|
||||||
t_repr = "<<Unknown>>"
|
|
||||||
elif isinstance(t, type):
|
|
||||||
if t.__module__ == "builtins":
|
|
||||||
t_repr = t.__qualname__
|
|
||||||
else:
|
|
||||||
t_repr = f"{t.__module__}.{t.__qualname__}"
|
|
||||||
else:
|
|
||||||
t_repr = repr(t)
|
|
||||||
return f"<AppKey({self._name}, type={t_repr})>"
|
|
||||||
|
|
||||||
|
|
||||||
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
|
||||||
__slots__ = ("_maps",)
|
|
||||||
|
|
||||||
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
|
||||||
self._maps = tuple(maps)
|
|
||||||
|
|
||||||
def __init_subclass__(cls) -> None:
|
|
||||||
raise TypeError(
|
|
||||||
"Inheritance class {} from ChainMapProxy "
|
|
||||||
"is forbidden".format(cls.__name__)
|
|
||||||
)
|
|
||||||
|
|
||||||
@overload # type: ignore[override]
|
|
||||||
def __getitem__(self, key: AppKey[_T]) -> _T:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: str) -> Any:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
|
||||||
for mapping in self._maps:
|
|
||||||
try:
|
|
||||||
return mapping[key]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
raise KeyError(key)
|
|
||||||
|
|
||||||
@overload # type: ignore[override]
|
|
||||||
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get(self, key: str, default: Any = ...) -> Any:
|
|
||||||
...
|
|
||||||
|
|
||||||
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
|
||||||
try:
|
|
||||||
return self[key]
|
|
||||||
except KeyError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
# reuses stored hash values if possible
|
|
||||||
return len(set().union(*self._maps))
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
|
||||||
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
|
||||||
for mapping in reversed(self._maps):
|
|
||||||
# reuses stored hash values if possible
|
|
||||||
d.update(mapping)
|
|
||||||
return iter(d)
|
|
||||||
|
|
||||||
def __contains__(self, key: object) -> bool:
|
|
||||||
return any(key in m for m in self._maps)
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
|
||||||
return any(self._maps)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
content = ", ".join(map(repr, self._maps))
|
|
||||||
return f"ChainMapProxy({content})"
|
|
||||||
|
|
||||||
|
|
||||||
# https://tools.ietf.org/html/rfc7232#section-2.3
|
|
||||||
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
|
||||||
_ETAGC_RE = re.compile(_ETAGC)
|
|
||||||
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
|
||||||
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
|
||||||
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
|
||||||
|
|
||||||
ETAG_ANY = "*"
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class ETag:
|
|
||||||
value: str
|
|
||||||
is_weak: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
def validate_etag_value(value: str) -> None:
|
|
||||||
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
|
||||||
raise ValueError(
|
|
||||||
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
|
||||||
"""Process a date string, return a datetime object"""
|
|
||||||
if date_str is not None:
|
|
||||||
timetuple = parsedate(date_str)
|
|
||||||
if timetuple is not None:
|
|
||||||
with suppress(ValueError):
|
|
||||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def must_be_empty_body(method: str, code: int) -> bool:
|
|
||||||
"""Check if a request must return an empty body."""
|
|
||||||
return (
|
|
||||||
status_code_must_be_empty_body(code)
|
|
||||||
or method_must_be_empty_body(method)
|
|
||||||
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def method_must_be_empty_body(method: str) -> bool:
|
|
||||||
"""Check if a method must return an empty body."""
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
|
||||||
return method.upper() == hdrs.METH_HEAD
|
|
||||||
|
|
||||||
|
|
||||||
def status_code_must_be_empty_body(code: int) -> bool:
|
|
||||||
"""Check if a status code must return an empty body."""
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
|
||||||
return code in {204, 304} or 100 <= code < 200
|
|
||||||
|
|
||||||
|
|
||||||
def should_remove_content_length(method: str, code: int) -> bool:
|
|
||||||
"""Check if a Content-Length header should be removed.
|
|
||||||
|
|
||||||
This should always be a subset of must_be_empty_body
|
|
||||||
"""
|
|
||||||
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
|
||||||
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
|
||||||
return (
|
|
||||||
code in {204, 304}
|
|
||||||
or 100 <= code < 200
|
|
||||||
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
|
||||||
)
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
import sys
|
|
||||||
from http import HTTPStatus
|
|
||||||
from typing import Mapping, Tuple
|
|
||||||
|
|
||||||
from . import __version__
|
|
||||||
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
|
||||||
from .http_parser import (
|
|
||||||
HeadersParser as HeadersParser,
|
|
||||||
HttpParser as HttpParser,
|
|
||||||
HttpRequestParser as HttpRequestParser,
|
|
||||||
HttpResponseParser as HttpResponseParser,
|
|
||||||
RawRequestMessage as RawRequestMessage,
|
|
||||||
RawResponseMessage as RawResponseMessage,
|
|
||||||
)
|
|
||||||
from .http_websocket import (
|
|
||||||
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
|
||||||
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
|
||||||
WS_KEY as WS_KEY,
|
|
||||||
WebSocketError as WebSocketError,
|
|
||||||
WebSocketReader as WebSocketReader,
|
|
||||||
WebSocketWriter as WebSocketWriter,
|
|
||||||
WSCloseCode as WSCloseCode,
|
|
||||||
WSMessage as WSMessage,
|
|
||||||
WSMsgType as WSMsgType,
|
|
||||||
ws_ext_gen as ws_ext_gen,
|
|
||||||
ws_ext_parse as ws_ext_parse,
|
|
||||||
)
|
|
||||||
from .http_writer import (
|
|
||||||
HttpVersion as HttpVersion,
|
|
||||||
HttpVersion10 as HttpVersion10,
|
|
||||||
HttpVersion11 as HttpVersion11,
|
|
||||||
StreamWriter as StreamWriter,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"HttpProcessingError",
|
|
||||||
"RESPONSES",
|
|
||||||
"SERVER_SOFTWARE",
|
|
||||||
# .http_writer
|
|
||||||
"StreamWriter",
|
|
||||||
"HttpVersion",
|
|
||||||
"HttpVersion10",
|
|
||||||
"HttpVersion11",
|
|
||||||
# .http_parser
|
|
||||||
"HeadersParser",
|
|
||||||
"HttpParser",
|
|
||||||
"HttpRequestParser",
|
|
||||||
"HttpResponseParser",
|
|
||||||
"RawRequestMessage",
|
|
||||||
"RawResponseMessage",
|
|
||||||
# .http_websocket
|
|
||||||
"WS_CLOSED_MESSAGE",
|
|
||||||
"WS_CLOSING_MESSAGE",
|
|
||||||
"WS_KEY",
|
|
||||||
"WebSocketReader",
|
|
||||||
"WebSocketWriter",
|
|
||||||
"ws_ext_gen",
|
|
||||||
"ws_ext_parse",
|
|
||||||
"WSMessage",
|
|
||||||
"WebSocketError",
|
|
||||||
"WSMsgType",
|
|
||||||
"WSCloseCode",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
|
||||||
sys.version_info, __version__
|
|
||||||
)
|
|
||||||
|
|
||||||
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
|
||||||
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
|
||||||
}
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
"""Low-level http related exceptions."""
|
|
||||||
|
|
||||||
|
|
||||||
from textwrap import indent
|
|
||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from .typedefs import _CIMultiDict
|
|
||||||
|
|
||||||
__all__ = ("HttpProcessingError",)
|
|
||||||
|
|
||||||
|
|
||||||
class HttpProcessingError(Exception):
|
|
||||||
"""HTTP error.
|
|
||||||
|
|
||||||
Shortcut for raising HTTP errors with custom code, message and headers.
|
|
||||||
|
|
||||||
code: HTTP Error code.
|
|
||||||
message: (optional) Error message.
|
|
||||||
headers: (optional) Headers to be sent in response, a list of pairs
|
|
||||||
"""
|
|
||||||
|
|
||||||
code = 0
|
|
||||||
message = ""
|
|
||||||
headers = None
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
code: Optional[int] = None,
|
|
||||||
message: str = "",
|
|
||||||
headers: Optional[_CIMultiDict] = None,
|
|
||||||
) -> None:
|
|
||||||
if code is not None:
|
|
||||||
self.code = code
|
|
||||||
self.headers = headers
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
msg = indent(self.message, " ")
|
|
||||||
return f"{self.code}, message:\n{msg}"
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
|
||||||
|
|
||||||
|
|
||||||
class BadHttpMessage(HttpProcessingError):
|
|
||||||
|
|
||||||
code = 400
|
|
||||||
message = "Bad Request"
|
|
||||||
|
|
||||||
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
|
||||||
super().__init__(message=message, headers=headers)
|
|
||||||
self.args = (message,)
|
|
||||||
|
|
||||||
|
|
||||||
class HttpBadRequest(BadHttpMessage):
|
|
||||||
|
|
||||||
code = 400
|
|
||||||
message = "Bad Request"
|
|
||||||
|
|
||||||
|
|
||||||
class PayloadEncodingError(BadHttpMessage):
|
|
||||||
"""Base class for payload errors"""
|
|
||||||
|
|
||||||
|
|
||||||
class ContentEncodingError(PayloadEncodingError):
|
|
||||||
"""Content encoding error."""
|
|
||||||
|
|
||||||
|
|
||||||
class TransferEncodingError(PayloadEncodingError):
|
|
||||||
"""transfer encoding error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ContentLengthError(PayloadEncodingError):
|
|
||||||
"""Not enough data for satisfy content length header."""
|
|
||||||
|
|
||||||
|
|
||||||
class LineTooLong(BadHttpMessage):
|
|
||||||
def __init__(
|
|
||||||
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
|
||||||
)
|
|
||||||
self.args = (line, limit, actual_size)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidHeader(BadHttpMessage):
|
|
||||||
def __init__(self, hdr: Union[bytes, str]) -> None:
|
|
||||||
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
|
||||||
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
|
||||||
self.hdr = hdr_s
|
|
||||||
self.args = (hdr,)
|
|
||||||
|
|
||||||
|
|
||||||
class BadStatusLine(BadHttpMessage):
|
|
||||||
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
|
||||||
if not isinstance(line, str):
|
|
||||||
line = repr(line)
|
|
||||||
super().__init__(error or f"Bad status line {line!r}")
|
|
||||||
self.args = (line,)
|
|
||||||
self.line = line
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidURLError(BadHttpMessage):
|
|
||||||
pass
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,734 +0,0 @@
|
|||||||
"""WebSocket protocol versions 13 and 8."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import functools
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import zlib
|
|
||||||
from enum import IntEnum
|
|
||||||
from struct import Struct
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Final,
|
|
||||||
List,
|
|
||||||
NamedTuple,
|
|
||||||
Optional,
|
|
||||||
Pattern,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
|
||||||
from .helpers import NO_EXTENSIONS
|
|
||||||
from .streams import DataQueue
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"WS_CLOSED_MESSAGE",
|
|
||||||
"WS_CLOSING_MESSAGE",
|
|
||||||
"WS_KEY",
|
|
||||||
"WebSocketReader",
|
|
||||||
"WebSocketWriter",
|
|
||||||
"WSMessage",
|
|
||||||
"WebSocketError",
|
|
||||||
"WSMsgType",
|
|
||||||
"WSCloseCode",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WSCloseCode(IntEnum):
|
|
||||||
OK = 1000
|
|
||||||
GOING_AWAY = 1001
|
|
||||||
PROTOCOL_ERROR = 1002
|
|
||||||
UNSUPPORTED_DATA = 1003
|
|
||||||
ABNORMAL_CLOSURE = 1006
|
|
||||||
INVALID_TEXT = 1007
|
|
||||||
POLICY_VIOLATION = 1008
|
|
||||||
MESSAGE_TOO_BIG = 1009
|
|
||||||
MANDATORY_EXTENSION = 1010
|
|
||||||
INTERNAL_ERROR = 1011
|
|
||||||
SERVICE_RESTART = 1012
|
|
||||||
TRY_AGAIN_LATER = 1013
|
|
||||||
BAD_GATEWAY = 1014
|
|
||||||
|
|
||||||
|
|
||||||
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
|
||||||
|
|
||||||
# For websockets, keeping latency low is extremely important as implementations
|
|
||||||
# generally expect to be able to send and receive messages quickly. We use a
|
|
||||||
# larger chunk size than the default to reduce the number of executor calls
|
|
||||||
# since the executor is a significant source of latency and overhead when
|
|
||||||
# the chunks are small. A size of 5KiB was chosen because it is also the
|
|
||||||
# same value python-zlib-ng choose to use as the threshold to release the GIL.
|
|
||||||
|
|
||||||
WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
|
|
||||||
|
|
||||||
|
|
||||||
class WSMsgType(IntEnum):
|
|
||||||
# websocket spec types
|
|
||||||
CONTINUATION = 0x0
|
|
||||||
TEXT = 0x1
|
|
||||||
BINARY = 0x2
|
|
||||||
PING = 0x9
|
|
||||||
PONG = 0xA
|
|
||||||
CLOSE = 0x8
|
|
||||||
|
|
||||||
# aiohttp specific types
|
|
||||||
CLOSING = 0x100
|
|
||||||
CLOSED = 0x101
|
|
||||||
ERROR = 0x102
|
|
||||||
|
|
||||||
text = TEXT
|
|
||||||
binary = BINARY
|
|
||||||
ping = PING
|
|
||||||
pong = PONG
|
|
||||||
close = CLOSE
|
|
||||||
closing = CLOSING
|
|
||||||
closed = CLOSED
|
|
||||||
error = ERROR
|
|
||||||
|
|
||||||
|
|
||||||
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
|
||||||
|
|
||||||
|
|
||||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
|
||||||
UNPACK_LEN3 = Struct("!Q").unpack_from
|
|
||||||
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
|
||||||
PACK_LEN1 = Struct("!BB").pack
|
|
||||||
PACK_LEN2 = Struct("!BBH").pack
|
|
||||||
PACK_LEN3 = Struct("!BBQ").pack
|
|
||||||
PACK_CLOSE_CODE = Struct("!H").pack
|
|
||||||
MSG_SIZE: Final[int] = 2**14
|
|
||||||
DEFAULT_LIMIT: Final[int] = 2**16
|
|
||||||
|
|
||||||
|
|
||||||
class WSMessage(NamedTuple):
|
|
||||||
type: WSMsgType
|
|
||||||
# To type correctly, this would need some kind of tagged union for each type.
|
|
||||||
data: Any
|
|
||||||
extra: Optional[str]
|
|
||||||
|
|
||||||
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
|
||||||
"""Return parsed JSON data.
|
|
||||||
|
|
||||||
.. versionadded:: 0.22
|
|
||||||
"""
|
|
||||||
return loads(self.data)
|
|
||||||
|
|
||||||
|
|
||||||
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
|
||||||
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
class WebSocketError(Exception):
|
|
||||||
"""WebSocket protocol parser error."""
|
|
||||||
|
|
||||||
def __init__(self, code: int, message: str) -> None:
|
|
||||||
self.code = code
|
|
||||||
super().__init__(code, message)
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return cast(str, self.args[1])
|
|
||||||
|
|
||||||
|
|
||||||
class WSHandshakeError(Exception):
|
|
||||||
"""WebSocket protocol handshake error."""
|
|
||||||
|
|
||||||
|
|
||||||
native_byteorder: Final[str] = sys.byteorder
|
|
||||||
|
|
||||||
|
|
||||||
# Used by _websocket_mask_python
|
|
||||||
@functools.lru_cache
|
|
||||||
def _xor_table() -> List[bytes]:
|
|
||||||
return [bytes(a ^ b for a in range(256)) for b in range(256)]
|
|
||||||
|
|
||||||
|
|
||||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
|
||||||
"""Websocket masking function.
|
|
||||||
|
|
||||||
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
|
||||||
object of any length. The contents of `data` are masked with `mask`,
|
|
||||||
as specified in section 5.3 of RFC 6455.
|
|
||||||
|
|
||||||
Note that this function mutates the `data` argument.
|
|
||||||
|
|
||||||
This pure-python implementation may be replaced by an optimized
|
|
||||||
version when available.
|
|
||||||
|
|
||||||
"""
|
|
||||||
assert isinstance(data, bytearray), data
|
|
||||||
assert len(mask) == 4, mask
|
|
||||||
|
|
||||||
if data:
|
|
||||||
_XOR_TABLE = _xor_table()
|
|
||||||
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
|
||||||
data[::4] = data[::4].translate(a)
|
|
||||||
data[1::4] = data[1::4].translate(b)
|
|
||||||
data[2::4] = data[2::4].translate(c)
|
|
||||||
data[3::4] = data[3::4].translate(d)
|
|
||||||
|
|
||||||
|
|
||||||
if NO_EXTENSIONS: # pragma: no cover
|
|
||||||
_websocket_mask = _websocket_mask_python
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
|
|
||||||
|
|
||||||
_websocket_mask = _websocket_mask_cython
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
_websocket_mask = _websocket_mask_python
|
|
||||||
|
|
||||||
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
|
||||||
|
|
||||||
|
|
||||||
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
|
||||||
r"^(?:;\s*(?:"
|
|
||||||
r"(server_no_context_takeover)|"
|
|
||||||
r"(client_no_context_takeover)|"
|
|
||||||
r"(server_max_window_bits(?:=(\d+))?)|"
|
|
||||||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
|
||||||
)
|
|
||||||
|
|
||||||
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
|
||||||
|
|
||||||
|
|
||||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
|
||||||
if not extstr:
|
|
||||||
return 0, False
|
|
||||||
|
|
||||||
compress = 0
|
|
||||||
notakeover = False
|
|
||||||
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
|
||||||
defext = ext.group(1)
|
|
||||||
# Return compress = 15 when get `permessage-deflate`
|
|
||||||
if not defext:
|
|
||||||
compress = 15
|
|
||||||
break
|
|
||||||
match = _WS_EXT_RE.match(defext)
|
|
||||||
if match:
|
|
||||||
compress = 15
|
|
||||||
if isserver:
|
|
||||||
# Server never fail to detect compress handshake.
|
|
||||||
# Server does not need to send max wbit to client
|
|
||||||
if match.group(4):
|
|
||||||
compress = int(match.group(4))
|
|
||||||
# Group3 must match if group4 matches
|
|
||||||
# Compress wbit 8 does not support in zlib
|
|
||||||
# If compress level not support,
|
|
||||||
# CONTINUE to next extension
|
|
||||||
if compress > 15 or compress < 9:
|
|
||||||
compress = 0
|
|
||||||
continue
|
|
||||||
if match.group(1):
|
|
||||||
notakeover = True
|
|
||||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if match.group(6):
|
|
||||||
compress = int(match.group(6))
|
|
||||||
# Group5 must match if group6 matches
|
|
||||||
# Compress wbit 8 does not support in zlib
|
|
||||||
# If compress level not support,
|
|
||||||
# FAIL the parse progress
|
|
||||||
if compress > 15 or compress < 9:
|
|
||||||
raise WSHandshakeError("Invalid window size")
|
|
||||||
if match.group(2):
|
|
||||||
notakeover = True
|
|
||||||
# Ignore regex group 5 & 6 for client_max_window_bits
|
|
||||||
break
|
|
||||||
# Return Fail if client side and not match
|
|
||||||
elif not isserver:
|
|
||||||
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
|
||||||
|
|
||||||
return compress, notakeover
|
|
||||||
|
|
||||||
|
|
||||||
def ws_ext_gen(
|
|
||||||
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
|
||||||
) -> str:
|
|
||||||
# client_notakeover=False not used for server
|
|
||||||
# compress wbit 8 does not support in zlib
|
|
||||||
if compress < 9 or compress > 15:
|
|
||||||
raise ValueError(
|
|
||||||
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
|
||||||
)
|
|
||||||
enabledext = ["permessage-deflate"]
|
|
||||||
if not isserver:
|
|
||||||
enabledext.append("client_max_window_bits")
|
|
||||||
|
|
||||||
if compress < 15:
|
|
||||||
enabledext.append("server_max_window_bits=" + str(compress))
|
|
||||||
if server_notakeover:
|
|
||||||
enabledext.append("server_no_context_takeover")
|
|
||||||
# if client_notakeover:
|
|
||||||
# enabledext.append('client_no_context_takeover')
|
|
||||||
return "; ".join(enabledext)
|
|
||||||
|
|
||||||
|
|
||||||
class WSParserState(IntEnum):
|
|
||||||
READ_HEADER = 1
|
|
||||||
READ_PAYLOAD_LENGTH = 2
|
|
||||||
READ_PAYLOAD_MASK = 3
|
|
||||||
READ_PAYLOAD = 4
|
|
||||||
|
|
||||||
|
|
||||||
class WebSocketReader:
|
|
||||||
def __init__(
|
|
||||||
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
|
||||||
) -> None:
|
|
||||||
self.queue = queue
|
|
||||||
self._max_msg_size = max_msg_size
|
|
||||||
|
|
||||||
self._exc: Optional[BaseException] = None
|
|
||||||
self._partial = bytearray()
|
|
||||||
self._state = WSParserState.READ_HEADER
|
|
||||||
|
|
||||||
self._opcode: Optional[int] = None
|
|
||||||
self._frame_fin = False
|
|
||||||
self._frame_opcode: Optional[int] = None
|
|
||||||
self._frame_payload = bytearray()
|
|
||||||
|
|
||||||
self._tail = b""
|
|
||||||
self._has_mask = False
|
|
||||||
self._frame_mask: Optional[bytes] = None
|
|
||||||
self._payload_length = 0
|
|
||||||
self._payload_length_flag = 0
|
|
||||||
self._compressed: Optional[bool] = None
|
|
||||||
self._decompressobj: Optional[ZLibDecompressor] = None
|
|
||||||
self._compress = compress
|
|
||||||
|
|
||||||
def feed_eof(self) -> None:
|
|
||||||
self.queue.feed_eof()
|
|
||||||
|
|
||||||
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
|
||||||
if self._exc:
|
|
||||||
return True, data
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self._feed_data(data)
|
|
||||||
except Exception as exc:
|
|
||||||
self._exc = exc
|
|
||||||
self.queue.set_exception(exc)
|
|
||||||
return True, b""
|
|
||||||
|
|
||||||
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
|
||||||
for fin, opcode, payload, compressed in self.parse_frame(data):
|
|
||||||
if compressed and not self._decompressobj:
|
|
||||||
self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
|
|
||||||
if opcode == WSMsgType.CLOSE:
|
|
||||||
if len(payload) >= 2:
|
|
||||||
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
|
||||||
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
f"Invalid close code: {close_code}",
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
close_message = payload[2:].decode("utf-8")
|
|
||||||
except UnicodeDecodeError as exc:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
|
||||||
) from exc
|
|
||||||
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
|
||||||
elif payload:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
|
||||||
|
|
||||||
self.queue.feed_data(msg, 0)
|
|
||||||
|
|
||||||
elif opcode == WSMsgType.PING:
|
|
||||||
self.queue.feed_data(
|
|
||||||
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif opcode == WSMsgType.PONG:
|
|
||||||
self.queue.feed_data(
|
|
||||||
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
|
||||||
and self._opcode is None
|
|
||||||
):
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# load text/binary
|
|
||||||
if not fin:
|
|
||||||
# got partial frame payload
|
|
||||||
if opcode != WSMsgType.CONTINUATION:
|
|
||||||
self._opcode = opcode
|
|
||||||
self._partial.extend(payload)
|
|
||||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.MESSAGE_TOO_BIG,
|
|
||||||
"Message size {} exceeds limit {}".format(
|
|
||||||
len(self._partial), self._max_msg_size
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# previous frame was non finished
|
|
||||||
# we should get continuation opcode
|
|
||||||
if self._partial:
|
|
||||||
if opcode != WSMsgType.CONTINUATION:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
"The opcode in non-fin frame is expected "
|
|
||||||
"to be zero, got {!r}".format(opcode),
|
|
||||||
)
|
|
||||||
|
|
||||||
if opcode == WSMsgType.CONTINUATION:
|
|
||||||
assert self._opcode is not None
|
|
||||||
opcode = self._opcode
|
|
||||||
self._opcode = None
|
|
||||||
|
|
||||||
self._partial.extend(payload)
|
|
||||||
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.MESSAGE_TOO_BIG,
|
|
||||||
"Message size {} exceeds limit {}".format(
|
|
||||||
len(self._partial), self._max_msg_size
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Decompress process must to be done after all packets
|
|
||||||
# received.
|
|
||||||
if compressed:
|
|
||||||
assert self._decompressobj is not None
|
|
||||||
self._partial.extend(_WS_DEFLATE_TRAILING)
|
|
||||||
payload_merged = self._decompressobj.decompress_sync(
|
|
||||||
self._partial, self._max_msg_size
|
|
||||||
)
|
|
||||||
if self._decompressobj.unconsumed_tail:
|
|
||||||
left = len(self._decompressobj.unconsumed_tail)
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.MESSAGE_TOO_BIG,
|
|
||||||
"Decompressed message size {} exceeds limit {}".format(
|
|
||||||
self._max_msg_size + left, self._max_msg_size
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
payload_merged = bytes(self._partial)
|
|
||||||
|
|
||||||
self._partial.clear()
|
|
||||||
|
|
||||||
if opcode == WSMsgType.TEXT:
|
|
||||||
try:
|
|
||||||
text = payload_merged.decode("utf-8")
|
|
||||||
self.queue.feed_data(
|
|
||||||
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
|
||||||
)
|
|
||||||
except UnicodeDecodeError as exc:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
|
||||||
) from exc
|
|
||||||
else:
|
|
||||||
self.queue.feed_data(
|
|
||||||
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
|
||||||
len(payload_merged),
|
|
||||||
)
|
|
||||||
|
|
||||||
return False, b""
|
|
||||||
|
|
||||||
def parse_frame(
|
|
||||||
self, buf: bytes
|
|
||||||
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
|
||||||
"""Return the next frame from the socket."""
|
|
||||||
frames = []
|
|
||||||
if self._tail:
|
|
||||||
buf, self._tail = self._tail + buf, b""
|
|
||||||
|
|
||||||
start_pos = 0
|
|
||||||
buf_length = len(buf)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
# read header
|
|
||||||
if self._state == WSParserState.READ_HEADER:
|
|
||||||
if buf_length - start_pos >= 2:
|
|
||||||
data = buf[start_pos : start_pos + 2]
|
|
||||||
start_pos += 2
|
|
||||||
first_byte, second_byte = data
|
|
||||||
|
|
||||||
fin = (first_byte >> 7) & 1
|
|
||||||
rsv1 = (first_byte >> 6) & 1
|
|
||||||
rsv2 = (first_byte >> 5) & 1
|
|
||||||
rsv3 = (first_byte >> 4) & 1
|
|
||||||
opcode = first_byte & 0xF
|
|
||||||
|
|
||||||
# frame-fin = %x0 ; more frames of this message follow
|
|
||||||
# / %x1 ; final frame of this message
|
|
||||||
# frame-rsv1 = %x0 ;
|
|
||||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
|
||||||
# frame-rsv2 = %x0 ;
|
|
||||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
|
||||||
# frame-rsv3 = %x0 ;
|
|
||||||
# 1 bit, MUST be 0 unless negotiated otherwise
|
|
||||||
#
|
|
||||||
# Remove rsv1 from this test for deflate development
|
|
||||||
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
"Received frame with non-zero reserved bits",
|
|
||||||
)
|
|
||||||
|
|
||||||
if opcode > 0x7 and fin == 0:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
"Received fragmented control frame",
|
|
||||||
)
|
|
||||||
|
|
||||||
has_mask = (second_byte >> 7) & 1
|
|
||||||
length = second_byte & 0x7F
|
|
||||||
|
|
||||||
# Control frames MUST have a payload
|
|
||||||
# length of 125 bytes or less
|
|
||||||
if opcode > 0x7 and length > 125:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
"Control frame payload cannot be " "larger than 125 bytes",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set compress status if last package is FIN
|
|
||||||
# OR set compress status if this is first fragment
|
|
||||||
# Raise error if not first fragment with rsv1 = 0x1
|
|
||||||
if self._frame_fin or self._compressed is None:
|
|
||||||
self._compressed = True if rsv1 else False
|
|
||||||
elif rsv1:
|
|
||||||
raise WebSocketError(
|
|
||||||
WSCloseCode.PROTOCOL_ERROR,
|
|
||||||
"Received frame with non-zero reserved bits",
|
|
||||||
)
|
|
||||||
|
|
||||||
self._frame_fin = bool(fin)
|
|
||||||
self._frame_opcode = opcode
|
|
||||||
self._has_mask = bool(has_mask)
|
|
||||||
self._payload_length_flag = length
|
|
||||||
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
# read payload length
|
|
||||||
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
|
||||||
length = self._payload_length_flag
|
|
||||||
if length == 126:
|
|
||||||
if buf_length - start_pos >= 2:
|
|
||||||
data = buf[start_pos : start_pos + 2]
|
|
||||||
start_pos += 2
|
|
||||||
length = UNPACK_LEN2(data)[0]
|
|
||||||
self._payload_length = length
|
|
||||||
self._state = (
|
|
||||||
WSParserState.READ_PAYLOAD_MASK
|
|
||||||
if self._has_mask
|
|
||||||
else WSParserState.READ_PAYLOAD
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
elif length > 126:
|
|
||||||
if buf_length - start_pos >= 8:
|
|
||||||
data = buf[start_pos : start_pos + 8]
|
|
||||||
start_pos += 8
|
|
||||||
length = UNPACK_LEN3(data)[0]
|
|
||||||
self._payload_length = length
|
|
||||||
self._state = (
|
|
||||||
WSParserState.READ_PAYLOAD_MASK
|
|
||||||
if self._has_mask
|
|
||||||
else WSParserState.READ_PAYLOAD
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self._payload_length = length
|
|
||||||
self._state = (
|
|
||||||
WSParserState.READ_PAYLOAD_MASK
|
|
||||||
if self._has_mask
|
|
||||||
else WSParserState.READ_PAYLOAD
|
|
||||||
)
|
|
||||||
|
|
||||||
# read payload mask
|
|
||||||
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
|
||||||
if buf_length - start_pos >= 4:
|
|
||||||
self._frame_mask = buf[start_pos : start_pos + 4]
|
|
||||||
start_pos += 4
|
|
||||||
self._state = WSParserState.READ_PAYLOAD
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
if self._state == WSParserState.READ_PAYLOAD:
|
|
||||||
length = self._payload_length
|
|
||||||
payload = self._frame_payload
|
|
||||||
|
|
||||||
chunk_len = buf_length - start_pos
|
|
||||||
if length >= chunk_len:
|
|
||||||
self._payload_length = length - chunk_len
|
|
||||||
payload.extend(buf[start_pos:])
|
|
||||||
start_pos = buf_length
|
|
||||||
else:
|
|
||||||
self._payload_length = 0
|
|
||||||
payload.extend(buf[start_pos : start_pos + length])
|
|
||||||
start_pos = start_pos + length
|
|
||||||
|
|
||||||
if self._payload_length == 0:
|
|
||||||
if self._has_mask:
|
|
||||||
assert self._frame_mask is not None
|
|
||||||
_websocket_mask(self._frame_mask, payload)
|
|
||||||
|
|
||||||
frames.append(
|
|
||||||
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._frame_payload = bytearray()
|
|
||||||
self._state = WSParserState.READ_HEADER
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
self._tail = buf[start_pos:]
|
|
||||||
|
|
||||||
return frames
|
|
||||||
|
|
||||||
|
|
||||||
class WebSocketWriter:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
protocol: BaseProtocol,
|
|
||||||
transport: asyncio.Transport,
|
|
||||||
*,
|
|
||||||
use_mask: bool = False,
|
|
||||||
limit: int = DEFAULT_LIMIT,
|
|
||||||
random: Any = random.Random(),
|
|
||||||
compress: int = 0,
|
|
||||||
notakeover: bool = False,
|
|
||||||
) -> None:
|
|
||||||
self.protocol = protocol
|
|
||||||
self.transport = transport
|
|
||||||
self.use_mask = use_mask
|
|
||||||
self.randrange = random.randrange
|
|
||||||
self.compress = compress
|
|
||||||
self.notakeover = notakeover
|
|
||||||
self._closing = False
|
|
||||||
self._limit = limit
|
|
||||||
self._output_size = 0
|
|
||||||
self._compressobj: Any = None # actually compressobj
|
|
||||||
|
|
||||||
async def _send_frame(
|
|
||||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
|
||||||
) -> None:
|
|
||||||
"""Send a frame over the websocket with message as its payload."""
|
|
||||||
if self._closing and not (opcode & WSMsgType.CLOSE):
|
|
||||||
raise ConnectionResetError("Cannot write to closing transport")
|
|
||||||
|
|
||||||
rsv = 0
|
|
||||||
|
|
||||||
# Only compress larger packets (disabled)
|
|
||||||
# Does small packet needs to be compressed?
|
|
||||||
# if self.compress and opcode < 8 and len(message) > 124:
|
|
||||||
if (compress or self.compress) and opcode < 8:
|
|
||||||
if compress:
|
|
||||||
# Do not set self._compress if compressing is for this frame
|
|
||||||
compressobj = ZLibCompressor(
|
|
||||||
level=zlib.Z_BEST_SPEED,
|
|
||||||
wbits=-compress,
|
|
||||||
max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
|
|
||||||
)
|
|
||||||
else: # self.compress
|
|
||||||
if not self._compressobj:
|
|
||||||
self._compressobj = ZLibCompressor(
|
|
||||||
level=zlib.Z_BEST_SPEED,
|
|
||||||
wbits=-self.compress,
|
|
||||||
max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
|
|
||||||
)
|
|
||||||
compressobj = self._compressobj
|
|
||||||
|
|
||||||
message = await compressobj.compress(message)
|
|
||||||
message += compressobj.flush(
|
|
||||||
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
|
||||||
)
|
|
||||||
if message.endswith(_WS_DEFLATE_TRAILING):
|
|
||||||
message = message[:-4]
|
|
||||||
rsv = rsv | 0x40
|
|
||||||
|
|
||||||
msg_length = len(message)
|
|
||||||
|
|
||||||
use_mask = self.use_mask
|
|
||||||
if use_mask:
|
|
||||||
mask_bit = 0x80
|
|
||||||
else:
|
|
||||||
mask_bit = 0
|
|
||||||
|
|
||||||
if msg_length < 126:
|
|
||||||
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
|
||||||
elif msg_length < (1 << 16):
|
|
||||||
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
|
||||||
else:
|
|
||||||
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
|
||||||
if use_mask:
|
|
||||||
mask = self.randrange(0, 0xFFFFFFFF)
|
|
||||||
mask = mask.to_bytes(4, "big")
|
|
||||||
message = bytearray(message)
|
|
||||||
_websocket_mask(mask, message)
|
|
||||||
self._write(header + mask + message)
|
|
||||||
self._output_size += len(header) + len(mask) + len(message)
|
|
||||||
else:
|
|
||||||
if len(message) > MSG_SIZE:
|
|
||||||
self._write(header)
|
|
||||||
self._write(message)
|
|
||||||
else:
|
|
||||||
self._write(header + message)
|
|
||||||
|
|
||||||
self._output_size += len(header) + len(message)
|
|
||||||
|
|
||||||
if self._output_size > self._limit:
|
|
||||||
self._output_size = 0
|
|
||||||
await self.protocol._drain_helper()
|
|
||||||
|
|
||||||
def _write(self, data: bytes) -> None:
|
|
||||||
if self.transport is None or self.transport.is_closing():
|
|
||||||
raise ConnectionResetError("Cannot write to closing transport")
|
|
||||||
self.transport.write(data)
|
|
||||||
|
|
||||||
async def pong(self, message: Union[bytes, str] = b"") -> None:
|
|
||||||
"""Send pong message."""
|
|
||||||
if isinstance(message, str):
|
|
||||||
message = message.encode("utf-8")
|
|
||||||
await self._send_frame(message, WSMsgType.PONG)
|
|
||||||
|
|
||||||
async def ping(self, message: Union[bytes, str] = b"") -> None:
|
|
||||||
"""Send ping message."""
|
|
||||||
if isinstance(message, str):
|
|
||||||
message = message.encode("utf-8")
|
|
||||||
await self._send_frame(message, WSMsgType.PING)
|
|
||||||
|
|
||||||
async def send(
|
|
||||||
self,
|
|
||||||
message: Union[str, bytes],
|
|
||||||
binary: bool = False,
|
|
||||||
compress: Optional[int] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Send a frame over the websocket with message as its payload."""
|
|
||||||
if isinstance(message, str):
|
|
||||||
message = message.encode("utf-8")
|
|
||||||
if binary:
|
|
||||||
await self._send_frame(message, WSMsgType.BINARY, compress)
|
|
||||||
else:
|
|
||||||
await self._send_frame(message, WSMsgType.TEXT, compress)
|
|
||||||
|
|
||||||
async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
|
|
||||||
"""Close the websocket, sending the specified code and message."""
|
|
||||||
if isinstance(message, str):
|
|
||||||
message = message.encode("utf-8")
|
|
||||||
try:
|
|
||||||
await self._send_frame(
|
|
||||||
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
self._closing = True
|
|
||||||
@@ -1,198 +0,0 @@
|
|||||||
"""Http related parsers and protocol."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import zlib
|
|
||||||
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
|
||||||
|
|
||||||
from multidict import CIMultiDict
|
|
||||||
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .compression_utils import ZLibCompressor
|
|
||||||
from .helpers import NO_EXTENSIONS
|
|
||||||
|
|
||||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
|
||||||
|
|
||||||
|
|
||||||
class HttpVersion(NamedTuple):
|
|
||||||
major: int
|
|
||||||
minor: int
|
|
||||||
|
|
||||||
|
|
||||||
HttpVersion10 = HttpVersion(1, 0)
|
|
||||||
HttpVersion11 = HttpVersion(1, 1)
|
|
||||||
|
|
||||||
|
|
||||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
|
||||||
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
|
||||||
|
|
||||||
|
|
||||||
class StreamWriter(AbstractStreamWriter):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
protocol: BaseProtocol,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
on_chunk_sent: _T_OnChunkSent = None,
|
|
||||||
on_headers_sent: _T_OnHeadersSent = None,
|
|
||||||
) -> None:
|
|
||||||
self._protocol = protocol
|
|
||||||
|
|
||||||
self.loop = loop
|
|
||||||
self.length = None
|
|
||||||
self.chunked = False
|
|
||||||
self.buffer_size = 0
|
|
||||||
self.output_size = 0
|
|
||||||
|
|
||||||
self._eof = False
|
|
||||||
self._compress: Optional[ZLibCompressor] = None
|
|
||||||
self._drain_waiter = None
|
|
||||||
|
|
||||||
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
|
||||||
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
|
||||||
|
|
||||||
@property
|
|
||||||
def transport(self) -> Optional[asyncio.Transport]:
|
|
||||||
return self._protocol.transport
|
|
||||||
|
|
||||||
@property
|
|
||||||
def protocol(self) -> BaseProtocol:
|
|
||||||
return self._protocol
|
|
||||||
|
|
||||||
def enable_chunking(self) -> None:
|
|
||||||
self.chunked = True
|
|
||||||
|
|
||||||
def enable_compression(
|
|
||||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
|
||||||
) -> None:
|
|
||||||
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
|
|
||||||
|
|
||||||
def _write(self, chunk: bytes) -> None:
|
|
||||||
size = len(chunk)
|
|
||||||
self.buffer_size += size
|
|
||||||
self.output_size += size
|
|
||||||
transport = self.transport
|
|
||||||
if not self._protocol.connected or transport is None or transport.is_closing():
|
|
||||||
raise ConnectionResetError("Cannot write to closing transport")
|
|
||||||
transport.write(chunk)
|
|
||||||
|
|
||||||
async def write(
|
|
||||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
|
||||||
) -> None:
|
|
||||||
"""Writes chunk of data to a stream.
|
|
||||||
|
|
||||||
write_eof() indicates end of stream.
|
|
||||||
writer can't be used after write_eof() method being called.
|
|
||||||
write() return drain future.
|
|
||||||
"""
|
|
||||||
if self._on_chunk_sent is not None:
|
|
||||||
await self._on_chunk_sent(chunk)
|
|
||||||
|
|
||||||
if isinstance(chunk, memoryview):
|
|
||||||
if chunk.nbytes != len(chunk):
|
|
||||||
# just reshape it
|
|
||||||
chunk = chunk.cast("c")
|
|
||||||
|
|
||||||
if self._compress is not None:
|
|
||||||
chunk = await self._compress.compress(chunk)
|
|
||||||
if not chunk:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.length is not None:
|
|
||||||
chunk_len = len(chunk)
|
|
||||||
if self.length >= chunk_len:
|
|
||||||
self.length = self.length - chunk_len
|
|
||||||
else:
|
|
||||||
chunk = chunk[: self.length]
|
|
||||||
self.length = 0
|
|
||||||
if not chunk:
|
|
||||||
return
|
|
||||||
|
|
||||||
if chunk:
|
|
||||||
if self.chunked:
|
|
||||||
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
|
||||||
chunk = chunk_len_pre + chunk + b"\r\n"
|
|
||||||
|
|
||||||
self._write(chunk)
|
|
||||||
|
|
||||||
if self.buffer_size > LIMIT and drain:
|
|
||||||
self.buffer_size = 0
|
|
||||||
await self.drain()
|
|
||||||
|
|
||||||
async def write_headers(
|
|
||||||
self, status_line: str, headers: "CIMultiDict[str]"
|
|
||||||
) -> None:
|
|
||||||
"""Write request/response status and headers."""
|
|
||||||
if self._on_headers_sent is not None:
|
|
||||||
await self._on_headers_sent(headers)
|
|
||||||
|
|
||||||
# status + headers
|
|
||||||
buf = _serialize_headers(status_line, headers)
|
|
||||||
self._write(buf)
|
|
||||||
|
|
||||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
|
||||||
if self._eof:
|
|
||||||
return
|
|
||||||
|
|
||||||
if chunk and self._on_chunk_sent is not None:
|
|
||||||
await self._on_chunk_sent(chunk)
|
|
||||||
|
|
||||||
if self._compress:
|
|
||||||
if chunk:
|
|
||||||
chunk = await self._compress.compress(chunk)
|
|
||||||
|
|
||||||
chunk += self._compress.flush()
|
|
||||||
if chunk and self.chunked:
|
|
||||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
|
||||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
|
||||||
else:
|
|
||||||
if self.chunked:
|
|
||||||
if chunk:
|
|
||||||
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
|
||||||
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
|
||||||
else:
|
|
||||||
chunk = b"0\r\n\r\n"
|
|
||||||
|
|
||||||
if chunk:
|
|
||||||
self._write(chunk)
|
|
||||||
|
|
||||||
await self.drain()
|
|
||||||
|
|
||||||
self._eof = True
|
|
||||||
|
|
||||||
async def drain(self) -> None:
|
|
||||||
"""Flush the write buffer.
|
|
||||||
|
|
||||||
The intended use is to write
|
|
||||||
|
|
||||||
await w.write(data)
|
|
||||||
await w.drain()
|
|
||||||
"""
|
|
||||||
if self._protocol.transport is not None:
|
|
||||||
await self._protocol._drain_helper()
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_header(string: str) -> str:
|
|
||||||
if "\r" in string or "\n" in string:
|
|
||||||
raise ValueError(
|
|
||||||
"Newline or carriage return detected in headers. "
|
|
||||||
"Potential header injection attack."
|
|
||||||
)
|
|
||||||
return string
|
|
||||||
|
|
||||||
|
|
||||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
|
||||||
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
|
||||||
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
|
||||||
return line.encode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
_serialize_headers = _py_serialize_headers
|
|
||||||
|
|
||||||
try:
|
|
||||||
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
|
|
||||||
|
|
||||||
_c_serialize_headers = _http_writer._serialize_headers
|
|
||||||
if not NO_EXTENSIONS:
|
|
||||||
_serialize_headers = _c_serialize_headers
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import collections
|
|
||||||
from typing import Any, Deque, Optional
|
|
||||||
|
|
||||||
|
|
||||||
class EventResultOrError:
|
|
||||||
"""Event asyncio lock helper class.
|
|
||||||
|
|
||||||
Wraps the Event asyncio lock allowing either to awake the
|
|
||||||
locked Tasks without any error or raising an exception.
|
|
||||||
|
|
||||||
thanks to @vorpalsmith for the simple design.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
self._loop = loop
|
|
||||||
self._exc: Optional[BaseException] = None
|
|
||||||
self._event = asyncio.Event()
|
|
||||||
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
|
|
||||||
|
|
||||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
|
||||||
self._exc = exc
|
|
||||||
self._event.set()
|
|
||||||
|
|
||||||
async def wait(self) -> Any:
|
|
||||||
waiter = self._loop.create_task(self._event.wait())
|
|
||||||
self._waiters.append(waiter)
|
|
||||||
try:
|
|
||||||
val = await waiter
|
|
||||||
finally:
|
|
||||||
self._waiters.remove(waiter)
|
|
||||||
|
|
||||||
if self._exc is not None:
|
|
||||||
raise self._exc
|
|
||||||
|
|
||||||
return val
|
|
||||||
|
|
||||||
def cancel(self) -> None:
|
|
||||||
"""Cancel all waiters"""
|
|
||||||
for waiter in self._waiters:
|
|
||||||
waiter.cancel()
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
access_logger = logging.getLogger("aiohttp.access")
|
|
||||||
client_logger = logging.getLogger("aiohttp.client")
|
|
||||||
internal_logger = logging.getLogger("aiohttp.internal")
|
|
||||||
server_logger = logging.getLogger("aiohttp.server")
|
|
||||||
web_logger = logging.getLogger("aiohttp.web")
|
|
||||||
ws_logger = logging.getLogger("aiohttp.websocket")
|
|
||||||
@@ -1,969 +0,0 @@
|
|||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
import uuid
|
|
||||||
import warnings
|
|
||||||
import zlib
|
|
||||||
from collections import deque
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
AsyncIterator,
|
|
||||||
Deque,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
from urllib.parse import parse_qsl, unquote, urlencode
|
|
||||||
|
|
||||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
|
|
||||||
|
|
||||||
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
|
||||||
from .hdrs import (
|
|
||||||
CONTENT_DISPOSITION,
|
|
||||||
CONTENT_ENCODING,
|
|
||||||
CONTENT_LENGTH,
|
|
||||||
CONTENT_TRANSFER_ENCODING,
|
|
||||||
CONTENT_TYPE,
|
|
||||||
)
|
|
||||||
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
|
||||||
from .http import HeadersParser
|
|
||||||
from .payload import (
|
|
||||||
JsonPayload,
|
|
||||||
LookupError,
|
|
||||||
Order,
|
|
||||||
Payload,
|
|
||||||
StringPayload,
|
|
||||||
get_payload,
|
|
||||||
payload_type,
|
|
||||||
)
|
|
||||||
from .streams import StreamReader
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"MultipartReader",
|
|
||||||
"MultipartWriter",
|
|
||||||
"BodyPartReader",
|
|
||||||
"BadContentDispositionHeader",
|
|
||||||
"BadContentDispositionParam",
|
|
||||||
"parse_content_disposition",
|
|
||||||
"content_disposition_filename",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .client_reqrep import ClientResponse
|
|
||||||
|
|
||||||
|
|
||||||
class BadContentDispositionHeader(RuntimeWarning):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BadContentDispositionParam(RuntimeWarning):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def parse_content_disposition(
|
|
||||||
header: Optional[str],
|
|
||||||
) -> Tuple[Optional[str], Dict[str, str]]:
|
|
||||||
def is_token(string: str) -> bool:
|
|
||||||
return bool(string) and TOKEN >= set(string)
|
|
||||||
|
|
||||||
def is_quoted(string: str) -> bool:
|
|
||||||
return string[0] == string[-1] == '"'
|
|
||||||
|
|
||||||
def is_rfc5987(string: str) -> bool:
|
|
||||||
return is_token(string) and string.count("'") == 2
|
|
||||||
|
|
||||||
def is_extended_param(string: str) -> bool:
|
|
||||||
return string.endswith("*")
|
|
||||||
|
|
||||||
def is_continuous_param(string: str) -> bool:
|
|
||||||
pos = string.find("*") + 1
|
|
||||||
if not pos:
|
|
||||||
return False
|
|
||||||
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
|
||||||
return substring.isdigit()
|
|
||||||
|
|
||||||
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
|
||||||
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
|
||||||
|
|
||||||
if not header:
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
disptype, *parts = header.split(";")
|
|
||||||
if not is_token(disptype):
|
|
||||||
warnings.warn(BadContentDispositionHeader(header))
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
params: Dict[str, str] = {}
|
|
||||||
while parts:
|
|
||||||
item = parts.pop(0)
|
|
||||||
|
|
||||||
if "=" not in item:
|
|
||||||
warnings.warn(BadContentDispositionHeader(header))
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
key, value = item.split("=", 1)
|
|
||||||
key = key.lower().strip()
|
|
||||||
value = value.lstrip()
|
|
||||||
|
|
||||||
if key in params:
|
|
||||||
warnings.warn(BadContentDispositionHeader(header))
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
if not is_token(key):
|
|
||||||
warnings.warn(BadContentDispositionParam(item))
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif is_continuous_param(key):
|
|
||||||
if is_quoted(value):
|
|
||||||
value = unescape(value[1:-1])
|
|
||||||
elif not is_token(value):
|
|
||||||
warnings.warn(BadContentDispositionParam(item))
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif is_extended_param(key):
|
|
||||||
if is_rfc5987(value):
|
|
||||||
encoding, _, value = value.split("'", 2)
|
|
||||||
encoding = encoding or "utf-8"
|
|
||||||
else:
|
|
||||||
warnings.warn(BadContentDispositionParam(item))
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
value = unquote(value, encoding, "strict")
|
|
||||||
except UnicodeDecodeError: # pragma: nocover
|
|
||||||
warnings.warn(BadContentDispositionParam(item))
|
|
||||||
continue
|
|
||||||
|
|
||||||
else:
|
|
||||||
failed = True
|
|
||||||
if is_quoted(value):
|
|
||||||
failed = False
|
|
||||||
value = unescape(value[1:-1].lstrip("\\/"))
|
|
||||||
elif is_token(value):
|
|
||||||
failed = False
|
|
||||||
elif parts:
|
|
||||||
# maybe just ; in filename, in any case this is just
|
|
||||||
# one case fix, for proper fix we need to redesign parser
|
|
||||||
_value = f"{value};{parts[0]}"
|
|
||||||
if is_quoted(_value):
|
|
||||||
parts.pop(0)
|
|
||||||
value = unescape(_value[1:-1].lstrip("\\/"))
|
|
||||||
failed = False
|
|
||||||
|
|
||||||
if failed:
|
|
||||||
warnings.warn(BadContentDispositionHeader(header))
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
params[key] = value
|
|
||||||
|
|
||||||
return disptype.lower(), params
|
|
||||||
|
|
||||||
|
|
||||||
def content_disposition_filename(
|
|
||||||
params: Mapping[str, str], name: str = "filename"
|
|
||||||
) -> Optional[str]:
|
|
||||||
name_suf = "%s*" % name
|
|
||||||
if not params:
|
|
||||||
return None
|
|
||||||
elif name_suf in params:
|
|
||||||
return params[name_suf]
|
|
||||||
elif name in params:
|
|
||||||
return params[name]
|
|
||||||
else:
|
|
||||||
parts = []
|
|
||||||
fnparams = sorted(
|
|
||||||
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
|
||||||
)
|
|
||||||
for num, (key, value) in enumerate(fnparams):
|
|
||||||
_, tail = key.split("*", 1)
|
|
||||||
if tail.endswith("*"):
|
|
||||||
tail = tail[:-1]
|
|
||||||
if tail == str(num):
|
|
||||||
parts.append(value)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
if not parts:
|
|
||||||
return None
|
|
||||||
value = "".join(parts)
|
|
||||||
if "'" in value:
|
|
||||||
encoding, _, value = value.split("'", 2)
|
|
||||||
encoding = encoding or "utf-8"
|
|
||||||
return unquote(value, encoding, "strict")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class MultipartResponseWrapper:
|
|
||||||
"""Wrapper around the MultipartReader.
|
|
||||||
|
|
||||||
It takes care about
|
|
||||||
underlying connection and close it when it needs in.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
resp: "ClientResponse",
|
|
||||||
stream: "MultipartReader",
|
|
||||||
) -> None:
|
|
||||||
self.resp = resp
|
|
||||||
self.stream = stream
|
|
||||||
|
|
||||||
def __aiter__(self) -> "MultipartResponseWrapper":
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(
|
|
||||||
self,
|
|
||||||
) -> Union["MultipartReader", "BodyPartReader"]:
|
|
||||||
part = await self.next()
|
|
||||||
if part is None:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return part
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
"""Returns True when all response data had been read."""
|
|
||||||
return self.resp.content.at_eof()
|
|
||||||
|
|
||||||
async def next(
|
|
||||||
self,
|
|
||||||
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
|
||||||
"""Emits next multipart reader object."""
|
|
||||||
item = await self.stream.next()
|
|
||||||
if self.stream.at_eof():
|
|
||||||
await self.release()
|
|
||||||
return item
|
|
||||||
|
|
||||||
async def release(self) -> None:
|
|
||||||
"""Release the connection gracefully.
|
|
||||||
|
|
||||||
All remaining content is read to the void.
|
|
||||||
"""
|
|
||||||
await self.resp.release()
|
|
||||||
|
|
||||||
|
|
||||||
class BodyPartReader:
|
|
||||||
"""Multipart reader for single body part."""
|
|
||||||
|
|
||||||
chunk_size = 8192
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
|
|
||||||
) -> None:
|
|
||||||
self.headers = headers
|
|
||||||
self._boundary = boundary
|
|
||||||
self._content = content
|
|
||||||
self._at_eof = False
|
|
||||||
length = self.headers.get(CONTENT_LENGTH, None)
|
|
||||||
self._length = int(length) if length is not None else None
|
|
||||||
self._read_bytes = 0
|
|
||||||
self._unread: Deque[bytes] = deque()
|
|
||||||
self._prev_chunk: Optional[bytes] = None
|
|
||||||
self._content_eof = 0
|
|
||||||
self._cache: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
|
||||||
return self # type: ignore[return-value]
|
|
||||||
|
|
||||||
async def __anext__(self) -> bytes:
|
|
||||||
part = await self.next()
|
|
||||||
if part is None:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return part
|
|
||||||
|
|
||||||
async def next(self) -> Optional[bytes]:
|
|
||||||
item = await self.read()
|
|
||||||
if not item:
|
|
||||||
return None
|
|
||||||
return item
|
|
||||||
|
|
||||||
async def read(self, *, decode: bool = False) -> bytes:
|
|
||||||
"""Reads body part data.
|
|
||||||
|
|
||||||
decode: Decodes data following by encoding
|
|
||||||
method from Content-Encoding header. If it missed
|
|
||||||
data remains untouched
|
|
||||||
"""
|
|
||||||
if self._at_eof:
|
|
||||||
return b""
|
|
||||||
data = bytearray()
|
|
||||||
while not self._at_eof:
|
|
||||||
data.extend(await self.read_chunk(self.chunk_size))
|
|
||||||
if decode:
|
|
||||||
return self.decode(data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
|
||||||
"""Reads body part content chunk of the specified size.
|
|
||||||
|
|
||||||
size: chunk size
|
|
||||||
"""
|
|
||||||
if self._at_eof:
|
|
||||||
return b""
|
|
||||||
if self._length:
|
|
||||||
chunk = await self._read_chunk_from_length(size)
|
|
||||||
else:
|
|
||||||
chunk = await self._read_chunk_from_stream(size)
|
|
||||||
|
|
||||||
self._read_bytes += len(chunk)
|
|
||||||
if self._read_bytes == self._length:
|
|
||||||
self._at_eof = True
|
|
||||||
if self._at_eof:
|
|
||||||
clrf = await self._content.readline()
|
|
||||||
assert (
|
|
||||||
b"\r\n" == clrf
|
|
||||||
), "reader did not read all the data or it is malformed"
|
|
||||||
return chunk
|
|
||||||
|
|
||||||
async def _read_chunk_from_length(self, size: int) -> bytes:
|
|
||||||
# Reads body part content chunk of the specified size.
|
|
||||||
# The body part must has Content-Length header with proper value.
|
|
||||||
assert self._length is not None, "Content-Length required for chunked read"
|
|
||||||
chunk_size = min(size, self._length - self._read_bytes)
|
|
||||||
chunk = await self._content.read(chunk_size)
|
|
||||||
return chunk
|
|
||||||
|
|
||||||
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
|
||||||
# Reads content chunk of body part with unknown length.
|
|
||||||
# The Content-Length header for body part is not necessary.
|
|
||||||
assert (
|
|
||||||
size >= len(self._boundary) + 2
|
|
||||||
), "Chunk size must be greater or equal than boundary length + 2"
|
|
||||||
first_chunk = self._prev_chunk is None
|
|
||||||
if first_chunk:
|
|
||||||
self._prev_chunk = await self._content.read(size)
|
|
||||||
|
|
||||||
chunk = await self._content.read(size)
|
|
||||||
self._content_eof += int(self._content.at_eof())
|
|
||||||
assert self._content_eof < 3, "Reading after EOF"
|
|
||||||
assert self._prev_chunk is not None
|
|
||||||
window = self._prev_chunk + chunk
|
|
||||||
sub = b"\r\n" + self._boundary
|
|
||||||
if first_chunk:
|
|
||||||
idx = window.find(sub)
|
|
||||||
else:
|
|
||||||
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
|
||||||
if idx >= 0:
|
|
||||||
# pushing boundary back to content
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
|
||||||
self._content.unread_data(window[idx:])
|
|
||||||
if size > idx:
|
|
||||||
self._prev_chunk = self._prev_chunk[:idx]
|
|
||||||
chunk = window[len(self._prev_chunk) : idx]
|
|
||||||
if not chunk:
|
|
||||||
self._at_eof = True
|
|
||||||
result = self._prev_chunk
|
|
||||||
self._prev_chunk = chunk
|
|
||||||
return result
|
|
||||||
|
|
||||||
async def readline(self) -> bytes:
|
|
||||||
"""Reads body part by line by line."""
|
|
||||||
if self._at_eof:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
if self._unread:
|
|
||||||
line = self._unread.popleft()
|
|
||||||
else:
|
|
||||||
line = await self._content.readline()
|
|
||||||
|
|
||||||
if line.startswith(self._boundary):
|
|
||||||
# the very last boundary may not come with \r\n,
|
|
||||||
# so set single rules for everyone
|
|
||||||
sline = line.rstrip(b"\r\n")
|
|
||||||
boundary = self._boundary
|
|
||||||
last_boundary = self._boundary + b"--"
|
|
||||||
# ensure that we read exactly the boundary, not something alike
|
|
||||||
if sline == boundary or sline == last_boundary:
|
|
||||||
self._at_eof = True
|
|
||||||
self._unread.append(line)
|
|
||||||
return b""
|
|
||||||
else:
|
|
||||||
next_line = await self._content.readline()
|
|
||||||
if next_line.startswith(self._boundary):
|
|
||||||
line = line[:-2] # strip CRLF but only once
|
|
||||||
self._unread.append(next_line)
|
|
||||||
|
|
||||||
return line
|
|
||||||
|
|
||||||
async def release(self) -> None:
|
|
||||||
"""Like read(), but reads all the data to the void."""
|
|
||||||
if self._at_eof:
|
|
||||||
return
|
|
||||||
while not self._at_eof:
|
|
||||||
await self.read_chunk(self.chunk_size)
|
|
||||||
|
|
||||||
async def text(self, *, encoding: Optional[str] = None) -> str:
|
|
||||||
"""Like read(), but assumes that body part contains text data."""
|
|
||||||
data = await self.read(decode=True)
|
|
||||||
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
|
|
||||||
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
|
|
||||||
encoding = encoding or self.get_charset(default="utf-8")
|
|
||||||
return data.decode(encoding)
|
|
||||||
|
|
||||||
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Like read(), but assumes that body parts contains JSON data."""
|
|
||||||
data = await self.read(decode=True)
|
|
||||||
if not data:
|
|
||||||
return None
|
|
||||||
encoding = encoding or self.get_charset(default="utf-8")
|
|
||||||
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
|
||||||
|
|
||||||
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
|
||||||
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
|
||||||
data = await self.read(decode=True)
|
|
||||||
if not data:
|
|
||||||
return []
|
|
||||||
if encoding is not None:
|
|
||||||
real_encoding = encoding
|
|
||||||
else:
|
|
||||||
real_encoding = self.get_charset(default="utf-8")
|
|
||||||
try:
|
|
||||||
decoded_data = data.rstrip().decode(real_encoding)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
|
|
||||||
|
|
||||||
return parse_qsl(
|
|
||||||
decoded_data,
|
|
||||||
keep_blank_values=True,
|
|
||||||
encoding=real_encoding,
|
|
||||||
)
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
"""Returns True if the boundary was reached or False otherwise."""
|
|
||||||
return self._at_eof
|
|
||||||
|
|
||||||
def decode(self, data: bytes) -> bytes:
|
|
||||||
"""Decodes data.
|
|
||||||
|
|
||||||
Decoding is done according the specified Content-Encoding
|
|
||||||
or Content-Transfer-Encoding headers value.
|
|
||||||
"""
|
|
||||||
if CONTENT_TRANSFER_ENCODING in self.headers:
|
|
||||||
data = self._decode_content_transfer(data)
|
|
||||||
if CONTENT_ENCODING in self.headers:
|
|
||||||
return self._decode_content(data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _decode_content(self, data: bytes) -> bytes:
|
|
||||||
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
|
||||||
if encoding == "identity":
|
|
||||||
return data
|
|
||||||
if encoding in {"deflate", "gzip"}:
|
|
||||||
return ZLibDecompressor(
|
|
||||||
encoding=encoding,
|
|
||||||
suppress_deflate_header=True,
|
|
||||||
).decompress_sync(data)
|
|
||||||
|
|
||||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
|
||||||
|
|
||||||
def _decode_content_transfer(self, data: bytes) -> bytes:
|
|
||||||
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
|
||||||
|
|
||||||
if encoding == "base64":
|
|
||||||
return base64.b64decode(data)
|
|
||||||
elif encoding == "quoted-printable":
|
|
||||||
return binascii.a2b_qp(data)
|
|
||||||
elif encoding in ("binary", "8bit", "7bit"):
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"unknown content transfer encoding: {}" "".format(encoding)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_charset(self, default: str) -> str:
|
|
||||||
"""Returns charset parameter from Content-Type header or default."""
|
|
||||||
ctype = self.headers.get(CONTENT_TYPE, "")
|
|
||||||
mimetype = parse_mimetype(ctype)
|
|
||||||
return mimetype.parameters.get("charset", default)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def name(self) -> Optional[str]:
|
|
||||||
"""Returns name specified in Content-Disposition header.
|
|
||||||
|
|
||||||
If the header is missing or malformed, returns None.
|
|
||||||
"""
|
|
||||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
|
||||||
return content_disposition_filename(params, "name")
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def filename(self) -> Optional[str]:
|
|
||||||
"""Returns filename specified in Content-Disposition header.
|
|
||||||
|
|
||||||
Returns None if the header is missing or malformed.
|
|
||||||
"""
|
|
||||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
|
||||||
return content_disposition_filename(params, "filename")
|
|
||||||
|
|
||||||
|
|
||||||
@payload_type(BodyPartReader, order=Order.try_first)
|
|
||||||
class BodyPartReaderPayload(Payload):
|
|
||||||
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
|
||||||
super().__init__(value, *args, **kwargs)
|
|
||||||
|
|
||||||
params: Dict[str, str] = {}
|
|
||||||
if value.name is not None:
|
|
||||||
params["name"] = value.name
|
|
||||||
if value.filename is not None:
|
|
||||||
params["filename"] = value.filename
|
|
||||||
|
|
||||||
if params:
|
|
||||||
self.set_content_disposition("attachment", True, **params)
|
|
||||||
|
|
||||||
async def write(self, writer: Any) -> None:
|
|
||||||
field = self._value
|
|
||||||
chunk = await field.read_chunk(size=2**16)
|
|
||||||
while chunk:
|
|
||||||
await writer.write(field.decode(chunk))
|
|
||||||
chunk = await field.read_chunk(size=2**16)
|
|
||||||
|
|
||||||
|
|
||||||
class MultipartReader:
|
|
||||||
"""Multipart body reader."""
|
|
||||||
|
|
||||||
#: Response wrapper, used when multipart readers constructs from response.
|
|
||||||
response_wrapper_cls = MultipartResponseWrapper
|
|
||||||
#: Multipart reader class, used to handle multipart/* body parts.
|
|
||||||
#: None points to type(self)
|
|
||||||
multipart_reader_cls = None
|
|
||||||
#: Body part reader class for non multipart/* content types.
|
|
||||||
part_reader_cls = BodyPartReader
|
|
||||||
|
|
||||||
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
|
||||||
self.headers = headers
|
|
||||||
self._boundary = ("--" + self._get_boundary()).encode()
|
|
||||||
self._content = content
|
|
||||||
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
|
||||||
self._at_eof = False
|
|
||||||
self._at_bof = True
|
|
||||||
self._unread: List[bytes] = []
|
|
||||||
|
|
||||||
def __aiter__(
|
|
||||||
self,
|
|
||||||
) -> AsyncIterator["BodyPartReader"]:
|
|
||||||
return self # type: ignore[return-value]
|
|
||||||
|
|
||||||
async def __anext__(
|
|
||||||
self,
|
|
||||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
|
||||||
part = await self.next()
|
|
||||||
if part is None:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return part
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_response(
|
|
||||||
cls,
|
|
||||||
response: "ClientResponse",
|
|
||||||
) -> MultipartResponseWrapper:
|
|
||||||
"""Constructs reader instance from HTTP response.
|
|
||||||
|
|
||||||
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
|
||||||
"""
|
|
||||||
obj = cls.response_wrapper_cls(
|
|
||||||
response, cls(response.headers, response.content)
|
|
||||||
)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
"""Returns True if the final boundary was reached, false otherwise."""
|
|
||||||
return self._at_eof
|
|
||||||
|
|
||||||
async def next(
|
|
||||||
self,
|
|
||||||
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
|
||||||
"""Emits the next multipart body part."""
|
|
||||||
# So, if we're at BOF, we need to skip till the boundary.
|
|
||||||
if self._at_eof:
|
|
||||||
return None
|
|
||||||
await self._maybe_release_last_part()
|
|
||||||
if self._at_bof:
|
|
||||||
await self._read_until_first_boundary()
|
|
||||||
self._at_bof = False
|
|
||||||
else:
|
|
||||||
await self._read_boundary()
|
|
||||||
if self._at_eof: # we just read the last boundary, nothing to do there
|
|
||||||
return None
|
|
||||||
self._last_part = await self.fetch_next_part()
|
|
||||||
return self._last_part
|
|
||||||
|
|
||||||
async def release(self) -> None:
|
|
||||||
"""Reads all the body parts to the void till the final boundary."""
|
|
||||||
while not self._at_eof:
|
|
||||||
item = await self.next()
|
|
||||||
if item is None:
|
|
||||||
break
|
|
||||||
await item.release()
|
|
||||||
|
|
||||||
async def fetch_next_part(
|
|
||||||
self,
|
|
||||||
) -> Union["MultipartReader", BodyPartReader]:
|
|
||||||
"""Returns the next body part reader."""
|
|
||||||
headers = await self._read_headers()
|
|
||||||
return self._get_part_reader(headers)
|
|
||||||
|
|
||||||
def _get_part_reader(
|
|
||||||
self,
|
|
||||||
headers: "CIMultiDictProxy[str]",
|
|
||||||
) -> Union["MultipartReader", BodyPartReader]:
|
|
||||||
"""Dispatches the response by the `Content-Type` header.
|
|
||||||
|
|
||||||
Returns a suitable reader instance.
|
|
||||||
|
|
||||||
:param dict headers: Response headers
|
|
||||||
"""
|
|
||||||
ctype = headers.get(CONTENT_TYPE, "")
|
|
||||||
mimetype = parse_mimetype(ctype)
|
|
||||||
|
|
||||||
if mimetype.type == "multipart":
|
|
||||||
if self.multipart_reader_cls is None:
|
|
||||||
return type(self)(headers, self._content)
|
|
||||||
return self.multipart_reader_cls(headers, self._content)
|
|
||||||
else:
|
|
||||||
return self.part_reader_cls(self._boundary, headers, self._content)
|
|
||||||
|
|
||||||
def _get_boundary(self) -> str:
|
|
||||||
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
|
|
||||||
|
|
||||||
assert mimetype.type == "multipart", "multipart/* content type expected"
|
|
||||||
|
|
||||||
if "boundary" not in mimetype.parameters:
|
|
||||||
raise ValueError(
|
|
||||||
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
|
|
||||||
)
|
|
||||||
|
|
||||||
boundary = mimetype.parameters["boundary"]
|
|
||||||
if len(boundary) > 70:
|
|
||||||
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
|
||||||
|
|
||||||
return boundary
|
|
||||||
|
|
||||||
async def _readline(self) -> bytes:
|
|
||||||
if self._unread:
|
|
||||||
return self._unread.pop()
|
|
||||||
return await self._content.readline()
|
|
||||||
|
|
||||||
async def _read_until_first_boundary(self) -> None:
|
|
||||||
while True:
|
|
||||||
chunk = await self._readline()
|
|
||||||
if chunk == b"":
|
|
||||||
raise ValueError(
|
|
||||||
"Could not find starting boundary %r" % (self._boundary)
|
|
||||||
)
|
|
||||||
chunk = chunk.rstrip()
|
|
||||||
if chunk == self._boundary:
|
|
||||||
return
|
|
||||||
elif chunk == self._boundary + b"--":
|
|
||||||
self._at_eof = True
|
|
||||||
return
|
|
||||||
|
|
||||||
async def _read_boundary(self) -> None:
|
|
||||||
chunk = (await self._readline()).rstrip()
|
|
||||||
if chunk == self._boundary:
|
|
||||||
pass
|
|
||||||
elif chunk == self._boundary + b"--":
|
|
||||||
self._at_eof = True
|
|
||||||
epilogue = await self._readline()
|
|
||||||
next_line = await self._readline()
|
|
||||||
|
|
||||||
# the epilogue is expected and then either the end of input or the
|
|
||||||
# parent multipart boundary, if the parent boundary is found then
|
|
||||||
# it should be marked as unread and handed to the parent for
|
|
||||||
# processing
|
|
||||||
if next_line[:2] == b"--":
|
|
||||||
self._unread.append(next_line)
|
|
||||||
# otherwise the request is likely missing an epilogue and both
|
|
||||||
# lines should be passed to the parent for processing
|
|
||||||
# (this handles the old behavior gracefully)
|
|
||||||
else:
|
|
||||||
self._unread.extend([next_line, epilogue])
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
|
||||||
|
|
||||||
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
|
||||||
lines = [b""]
|
|
||||||
while True:
|
|
||||||
chunk = await self._content.readline()
|
|
||||||
chunk = chunk.strip()
|
|
||||||
lines.append(chunk)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
parser = HeadersParser()
|
|
||||||
headers, raw_headers = parser.parse_headers(lines)
|
|
||||||
return headers
|
|
||||||
|
|
||||||
async def _maybe_release_last_part(self) -> None:
|
|
||||||
"""Ensures that the last read body part is read completely."""
|
|
||||||
if self._last_part is not None:
|
|
||||||
if not self._last_part.at_eof():
|
|
||||||
await self._last_part.release()
|
|
||||||
self._unread.extend(self._last_part._unread)
|
|
||||||
self._last_part = None
|
|
||||||
|
|
||||||
|
|
||||||
_Part = Tuple[Payload, str, str]
|
|
||||||
|
|
||||||
|
|
||||||
class MultipartWriter(Payload):
|
|
||||||
"""Multipart body writer."""
|
|
||||||
|
|
||||||
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
|
||||||
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
|
||||||
# The underlying Payload API demands a str (utf-8), not bytes,
|
|
||||||
# so we need to ensure we don't lose anything during conversion.
|
|
||||||
# As a result, require the boundary to be ASCII only.
|
|
||||||
# In both situations.
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._boundary = boundary.encode("ascii")
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise ValueError("boundary should contain ASCII only chars") from None
|
|
||||||
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
|
||||||
|
|
||||||
super().__init__(None, content_type=ctype)
|
|
||||||
|
|
||||||
self._parts: List[_Part] = []
|
|
||||||
|
|
||||||
def __enter__(self) -> "MultipartWriter":
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_val: Optional[BaseException],
|
|
||||||
exc_tb: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[_Part]:
|
|
||||||
return iter(self._parts)
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._parts)
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
|
||||||
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _boundary_value(self) -> str:
|
|
||||||
"""Wrap boundary parameter value in quotes, if necessary.
|
|
||||||
|
|
||||||
Reads self.boundary and returns a unicode string.
|
|
||||||
"""
|
|
||||||
# Refer to RFCs 7231, 7230, 5234.
|
|
||||||
#
|
|
||||||
# parameter = token "=" ( token / quoted-string )
|
|
||||||
# token = 1*tchar
|
|
||||||
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
|
||||||
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
|
||||||
# obs-text = %x80-FF
|
|
||||||
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
|
||||||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
|
||||||
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
|
||||||
# / DIGIT / ALPHA
|
|
||||||
# ; any VCHAR, except delimiters
|
|
||||||
# VCHAR = %x21-7E
|
|
||||||
value = self._boundary
|
|
||||||
if re.match(self._valid_tchar_regex, value):
|
|
||||||
return value.decode("ascii") # cannot fail
|
|
||||||
|
|
||||||
if re.search(self._invalid_qdtext_char_regex, value):
|
|
||||||
raise ValueError("boundary value contains invalid characters")
|
|
||||||
|
|
||||||
# escape %x5C and %x22
|
|
||||||
quoted_value_content = value.replace(b"\\", b"\\\\")
|
|
||||||
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
|
||||||
|
|
||||||
return '"' + quoted_value_content.decode("ascii") + '"'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def boundary(self) -> str:
|
|
||||||
return self._boundary.decode("ascii")
|
|
||||||
|
|
||||||
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
|
|
||||||
if headers is None:
|
|
||||||
headers = CIMultiDict()
|
|
||||||
|
|
||||||
if isinstance(obj, Payload):
|
|
||||||
obj.headers.update(headers)
|
|
||||||
return self.append_payload(obj)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
payload = get_payload(obj, headers=headers)
|
|
||||||
except LookupError:
|
|
||||||
raise TypeError("Cannot create payload from %r" % obj)
|
|
||||||
else:
|
|
||||||
return self.append_payload(payload)
|
|
||||||
|
|
||||||
def append_payload(self, payload: Payload) -> Payload:
|
|
||||||
"""Adds a new body part to multipart writer."""
|
|
||||||
# compression
|
|
||||||
encoding: Optional[str] = payload.headers.get(
|
|
||||||
CONTENT_ENCODING,
|
|
||||||
"",
|
|
||||||
).lower()
|
|
||||||
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
|
||||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
|
||||||
if encoding == "identity":
|
|
||||||
encoding = None
|
|
||||||
|
|
||||||
# te encoding
|
|
||||||
te_encoding: Optional[str] = payload.headers.get(
|
|
||||||
CONTENT_TRANSFER_ENCODING,
|
|
||||||
"",
|
|
||||||
).lower()
|
|
||||||
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
|
||||||
raise RuntimeError(
|
|
||||||
"unknown content transfer encoding: {}" "".format(te_encoding)
|
|
||||||
)
|
|
||||||
if te_encoding == "binary":
|
|
||||||
te_encoding = None
|
|
||||||
|
|
||||||
# size
|
|
||||||
size = payload.size
|
|
||||||
if size is not None and not (encoding or te_encoding):
|
|
||||||
payload.headers[CONTENT_LENGTH] = str(size)
|
|
||||||
|
|
||||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def append_json(
|
|
||||||
self, obj: Any, headers: Optional[MultiMapping[str]] = None
|
|
||||||
) -> Payload:
|
|
||||||
"""Helper to append JSON part."""
|
|
||||||
if headers is None:
|
|
||||||
headers = CIMultiDict()
|
|
||||||
|
|
||||||
return self.append_payload(JsonPayload(obj, headers=headers))
|
|
||||||
|
|
||||||
def append_form(
|
|
||||||
self,
|
|
||||||
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
|
||||||
headers: Optional[MultiMapping[str]] = None,
|
|
||||||
) -> Payload:
|
|
||||||
"""Helper to append form urlencoded part."""
|
|
||||||
assert isinstance(obj, (Sequence, Mapping))
|
|
||||||
|
|
||||||
if headers is None:
|
|
||||||
headers = CIMultiDict()
|
|
||||||
|
|
||||||
if isinstance(obj, Mapping):
|
|
||||||
obj = list(obj.items())
|
|
||||||
data = urlencode(obj, doseq=True)
|
|
||||||
|
|
||||||
return self.append_payload(
|
|
||||||
StringPayload(
|
|
||||||
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> Optional[int]:
|
|
||||||
"""Size of the payload."""
|
|
||||||
total = 0
|
|
||||||
for part, encoding, te_encoding in self._parts:
|
|
||||||
if encoding or te_encoding or part.size is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
total += int(
|
|
||||||
2
|
|
||||||
+ len(self._boundary)
|
|
||||||
+ 2
|
|
||||||
+ part.size # b'--'+self._boundary+b'\r\n'
|
|
||||||
+ len(part._binary_headers)
|
|
||||||
+ 2 # b'\r\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
|
||||||
return total
|
|
||||||
|
|
||||||
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
|
||||||
"""Write body."""
|
|
||||||
for part, encoding, te_encoding in self._parts:
|
|
||||||
await writer.write(b"--" + self._boundary + b"\r\n")
|
|
||||||
await writer.write(part._binary_headers)
|
|
||||||
|
|
||||||
if encoding or te_encoding:
|
|
||||||
w = MultipartPayloadWriter(writer)
|
|
||||||
if encoding:
|
|
||||||
w.enable_compression(encoding)
|
|
||||||
if te_encoding:
|
|
||||||
w.enable_encoding(te_encoding)
|
|
||||||
await part.write(w) # type: ignore[arg-type]
|
|
||||||
await w.write_eof()
|
|
||||||
else:
|
|
||||||
await part.write(writer)
|
|
||||||
|
|
||||||
await writer.write(b"\r\n")
|
|
||||||
|
|
||||||
if close_boundary:
|
|
||||||
await writer.write(b"--" + self._boundary + b"--\r\n")
|
|
||||||
|
|
||||||
|
|
||||||
class MultipartPayloadWriter:
|
|
||||||
def __init__(self, writer: Any) -> None:
|
|
||||||
self._writer = writer
|
|
||||||
self._encoding: Optional[str] = None
|
|
||||||
self._compress: Optional[ZLibCompressor] = None
|
|
||||||
self._encoding_buffer: Optional[bytearray] = None
|
|
||||||
|
|
||||||
def enable_encoding(self, encoding: str) -> None:
|
|
||||||
if encoding == "base64":
|
|
||||||
self._encoding = encoding
|
|
||||||
self._encoding_buffer = bytearray()
|
|
||||||
elif encoding == "quoted-printable":
|
|
||||||
self._encoding = "quoted-printable"
|
|
||||||
|
|
||||||
def enable_compression(
|
|
||||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
|
||||||
) -> None:
|
|
||||||
self._compress = ZLibCompressor(
|
|
||||||
encoding=encoding,
|
|
||||||
suppress_deflate_header=True,
|
|
||||||
strategy=strategy,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def write_eof(self) -> None:
|
|
||||||
if self._compress is not None:
|
|
||||||
chunk = self._compress.flush()
|
|
||||||
if chunk:
|
|
||||||
self._compress = None
|
|
||||||
await self.write(chunk)
|
|
||||||
|
|
||||||
if self._encoding == "base64":
|
|
||||||
if self._encoding_buffer:
|
|
||||||
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
|
||||||
|
|
||||||
async def write(self, chunk: bytes) -> None:
|
|
||||||
if self._compress is not None:
|
|
||||||
if chunk:
|
|
||||||
chunk = await self._compress.compress(chunk)
|
|
||||||
if not chunk:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._encoding == "base64":
|
|
||||||
buf = self._encoding_buffer
|
|
||||||
assert buf is not None
|
|
||||||
buf.extend(chunk)
|
|
||||||
|
|
||||||
if buf:
|
|
||||||
div, mod = divmod(len(buf), 3)
|
|
||||||
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
|
||||||
if enc_chunk:
|
|
||||||
b64chunk = base64.b64encode(enc_chunk)
|
|
||||||
await self._writer.write(b64chunk)
|
|
||||||
elif self._encoding == "quoted-printable":
|
|
||||||
await self._writer.write(binascii.b2a_qp(chunk))
|
|
||||||
else:
|
|
||||||
await self._writer.write(chunk)
|
|
||||||
@@ -1,463 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import enum
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import mimetypes
|
|
||||||
import os
|
|
||||||
import warnings
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from itertools import chain
|
|
||||||
from typing import (
|
|
||||||
IO,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
ByteString,
|
|
||||||
Dict,
|
|
||||||
Final,
|
|
||||||
Iterable,
|
|
||||||
Optional,
|
|
||||||
TextIO,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
from multidict import CIMultiDict
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .helpers import (
|
|
||||||
_SENTINEL,
|
|
||||||
content_disposition_header,
|
|
||||||
guess_filename,
|
|
||||||
parse_mimetype,
|
|
||||||
sentinel,
|
|
||||||
)
|
|
||||||
from .streams import StreamReader
|
|
||||||
from .typedefs import JSONEncoder, _CIMultiDict
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"PAYLOAD_REGISTRY",
|
|
||||||
"get_payload",
|
|
||||||
"payload_type",
|
|
||||||
"Payload",
|
|
||||||
"BytesPayload",
|
|
||||||
"StringPayload",
|
|
||||||
"IOBasePayload",
|
|
||||||
"BytesIOPayload",
|
|
||||||
"BufferedReaderPayload",
|
|
||||||
"TextIOPayload",
|
|
||||||
"StringIOPayload",
|
|
||||||
"JsonPayload",
|
|
||||||
"AsyncIterablePayload",
|
|
||||||
)
|
|
||||||
|
|
||||||
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
|
||||||
class LookupError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Order(str, enum.Enum):
|
|
||||||
normal = "normal"
|
|
||||||
try_first = "try_first"
|
|
||||||
try_last = "try_last"
|
|
||||||
|
|
||||||
|
|
||||||
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
|
||||||
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def register_payload(
|
|
||||||
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
|
||||||
) -> None:
|
|
||||||
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
|
||||||
|
|
||||||
|
|
||||||
class payload_type:
|
|
||||||
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
|
||||||
self.type = type
|
|
||||||
self.order = order
|
|
||||||
|
|
||||||
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
|
||||||
register_payload(factory, self.type, order=self.order)
|
|
||||||
return factory
|
|
||||||
|
|
||||||
|
|
||||||
PayloadType = Type["Payload"]
|
|
||||||
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class PayloadRegistry:
|
|
||||||
"""Payload registry.
|
|
||||||
|
|
||||||
note: we need zope.interface for more efficient adapter search
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._first: List[_PayloadRegistryItem] = []
|
|
||||||
self._normal: List[_PayloadRegistryItem] = []
|
|
||||||
self._last: List[_PayloadRegistryItem] = []
|
|
||||||
|
|
||||||
def get(
|
|
||||||
self,
|
|
||||||
data: Any,
|
|
||||||
*args: Any,
|
|
||||||
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> "Payload":
|
|
||||||
if isinstance(data, Payload):
|
|
||||||
return data
|
|
||||||
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
|
||||||
if isinstance(data, type):
|
|
||||||
return factory(data, *args, **kwargs)
|
|
||||||
|
|
||||||
raise LookupError()
|
|
||||||
|
|
||||||
def register(
|
|
||||||
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
|
||||||
) -> None:
|
|
||||||
if order is Order.try_first:
|
|
||||||
self._first.append((factory, type))
|
|
||||||
elif order is Order.normal:
|
|
||||||
self._normal.append((factory, type))
|
|
||||||
elif order is Order.try_last:
|
|
||||||
self._last.append((factory, type))
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported order {order!r}")
|
|
||||||
|
|
||||||
|
|
||||||
class Payload(ABC):
|
|
||||||
|
|
||||||
_default_content_type: str = "application/octet-stream"
|
|
||||||
_size: Optional[int] = None
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
value: Any,
|
|
||||||
headers: Optional[
|
|
||||||
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
|
||||||
] = None,
|
|
||||||
content_type: Union[str, None, _SENTINEL] = sentinel,
|
|
||||||
filename: Optional[str] = None,
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
self._encoding = encoding
|
|
||||||
self._filename = filename
|
|
||||||
self._headers: _CIMultiDict = CIMultiDict()
|
|
||||||
self._value = value
|
|
||||||
if content_type is not sentinel and content_type is not None:
|
|
||||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
elif self._filename is not None:
|
|
||||||
content_type = mimetypes.guess_type(self._filename)[0]
|
|
||||||
if content_type is None:
|
|
||||||
content_type = self._default_content_type
|
|
||||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
else:
|
|
||||||
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
|
||||||
self._headers.update(headers or {})
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> Optional[int]:
|
|
||||||
"""Size of the payload."""
|
|
||||||
return self._size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filename(self) -> Optional[str]:
|
|
||||||
"""Filename of the payload."""
|
|
||||||
return self._filename
|
|
||||||
|
|
||||||
@property
|
|
||||||
def headers(self) -> _CIMultiDict:
|
|
||||||
"""Custom item headers"""
|
|
||||||
return self._headers
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _binary_headers(self) -> bytes:
|
|
||||||
return (
|
|
||||||
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
|
||||||
"utf-8"
|
|
||||||
)
|
|
||||||
+ b"\r\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def encoding(self) -> Optional[str]:
|
|
||||||
"""Payload encoding"""
|
|
||||||
return self._encoding
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_type(self) -> str:
|
|
||||||
"""Content type"""
|
|
||||||
return self._headers[hdrs.CONTENT_TYPE]
|
|
||||||
|
|
||||||
def set_content_disposition(
|
|
||||||
self,
|
|
||||||
disptype: str,
|
|
||||||
quote_fields: bool = True,
|
|
||||||
_charset: str = "utf-8",
|
|
||||||
**params: Any,
|
|
||||||
) -> None:
|
|
||||||
"""Sets ``Content-Disposition`` header."""
|
|
||||||
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
|
||||||
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
|
||||||
)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
"""Write payload.
|
|
||||||
|
|
||||||
writer is an AbstractStreamWriter instance:
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class BytesPayload(Payload):
|
|
||||||
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
|
||||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
|
||||||
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
|
||||||
|
|
||||||
if "content_type" not in kwargs:
|
|
||||||
kwargs["content_type"] = "application/octet-stream"
|
|
||||||
|
|
||||||
super().__init__(value, *args, **kwargs)
|
|
||||||
|
|
||||||
if isinstance(value, memoryview):
|
|
||||||
self._size = value.nbytes
|
|
||||||
else:
|
|
||||||
self._size = len(value)
|
|
||||||
|
|
||||||
if self._size > TOO_LARGE_BYTES_BODY:
|
|
||||||
kwargs = {"source": self}
|
|
||||||
warnings.warn(
|
|
||||||
"Sending a large body directly with raw bytes might"
|
|
||||||
" lock the event loop. You should probably pass an "
|
|
||||||
"io.BytesIO object instead",
|
|
||||||
ResourceWarning,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
await writer.write(self._value)
|
|
||||||
|
|
||||||
|
|
||||||
class StringPayload(BytesPayload):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
value: str,
|
|
||||||
*args: Any,
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
if encoding is None:
|
|
||||||
if content_type is None:
|
|
||||||
real_encoding = "utf-8"
|
|
||||||
content_type = "text/plain; charset=utf-8"
|
|
||||||
else:
|
|
||||||
mimetype = parse_mimetype(content_type)
|
|
||||||
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
|
||||||
else:
|
|
||||||
if content_type is None:
|
|
||||||
content_type = "text/plain; charset=%s" % encoding
|
|
||||||
real_encoding = encoding
|
|
||||||
|
|
||||||
super().__init__(
|
|
||||||
value.encode(real_encoding),
|
|
||||||
encoding=real_encoding,
|
|
||||||
content_type=content_type,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StringIOPayload(StringPayload):
|
|
||||||
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
|
||||||
super().__init__(value.read(), *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class IOBasePayload(Payload):
|
|
||||||
_value: IO[Any]
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
if "filename" not in kwargs:
|
|
||||||
kwargs["filename"] = guess_filename(value)
|
|
||||||
|
|
||||||
super().__init__(value, *args, **kwargs)
|
|
||||||
|
|
||||||
if self._filename is not None and disposition is not None:
|
|
||||||
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
|
||||||
self.set_content_disposition(disposition, filename=self._filename)
|
|
||||||
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
try:
|
|
||||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
|
||||||
while chunk:
|
|
||||||
await writer.write(chunk)
|
|
||||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
|
||||||
finally:
|
|
||||||
await loop.run_in_executor(None, self._value.close)
|
|
||||||
|
|
||||||
|
|
||||||
class TextIOPayload(IOBasePayload):
|
|
||||||
_value: TextIO
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
value: TextIO,
|
|
||||||
*args: Any,
|
|
||||||
encoding: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
if encoding is None:
|
|
||||||
if content_type is None:
|
|
||||||
encoding = "utf-8"
|
|
||||||
content_type = "text/plain; charset=utf-8"
|
|
||||||
else:
|
|
||||||
mimetype = parse_mimetype(content_type)
|
|
||||||
encoding = mimetype.parameters.get("charset", "utf-8")
|
|
||||||
else:
|
|
||||||
if content_type is None:
|
|
||||||
content_type = "text/plain; charset=%s" % encoding
|
|
||||||
|
|
||||||
super().__init__(
|
|
||||||
value,
|
|
||||||
content_type=content_type,
|
|
||||||
encoding=encoding,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> Optional[int]:
|
|
||||||
try:
|
|
||||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
|
||||||
except OSError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
try:
|
|
||||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
|
||||||
while chunk:
|
|
||||||
data = (
|
|
||||||
chunk.encode(encoding=self._encoding)
|
|
||||||
if self._encoding
|
|
||||||
else chunk.encode()
|
|
||||||
)
|
|
||||||
await writer.write(data)
|
|
||||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
|
||||||
finally:
|
|
||||||
await loop.run_in_executor(None, self._value.close)
|
|
||||||
|
|
||||||
|
|
||||||
class BytesIOPayload(IOBasePayload):
|
|
||||||
@property
|
|
||||||
def size(self) -> int:
|
|
||||||
position = self._value.tell()
|
|
||||||
end = self._value.seek(0, os.SEEK_END)
|
|
||||||
self._value.seek(position)
|
|
||||||
return end - position
|
|
||||||
|
|
||||||
|
|
||||||
class BufferedReaderPayload(IOBasePayload):
|
|
||||||
@property
|
|
||||||
def size(self) -> Optional[int]:
|
|
||||||
try:
|
|
||||||
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
|
||||||
except OSError:
|
|
||||||
# data.fileno() is not supported, e.g.
|
|
||||||
# io.BufferedReader(io.BytesIO(b'data'))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class JsonPayload(BytesPayload):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
value: Any,
|
|
||||||
encoding: str = "utf-8",
|
|
||||||
content_type: str = "application/json",
|
|
||||||
dumps: JSONEncoder = json.dumps,
|
|
||||||
*args: Any,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
super().__init__(
|
|
||||||
dumps(value).encode(encoding),
|
|
||||||
content_type=content_type,
|
|
||||||
encoding=encoding,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from typing import AsyncIterable, AsyncIterator
|
|
||||||
|
|
||||||
_AsyncIterator = AsyncIterator[bytes]
|
|
||||||
_AsyncIterable = AsyncIterable[bytes]
|
|
||||||
else:
|
|
||||||
from collections.abc import AsyncIterable, AsyncIterator
|
|
||||||
|
|
||||||
_AsyncIterator = AsyncIterator
|
|
||||||
_AsyncIterable = AsyncIterable
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncIterablePayload(Payload):
|
|
||||||
|
|
||||||
_iter: Optional[_AsyncIterator] = None
|
|
||||||
|
|
||||||
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
|
||||||
if not isinstance(value, AsyncIterable):
|
|
||||||
raise TypeError(
|
|
||||||
"value argument must support "
|
|
||||||
"collections.abc.AsyncIterable interface, "
|
|
||||||
"got {!r}".format(type(value))
|
|
||||||
)
|
|
||||||
|
|
||||||
if "content_type" not in kwargs:
|
|
||||||
kwargs["content_type"] = "application/octet-stream"
|
|
||||||
|
|
||||||
super().__init__(value, *args, **kwargs)
|
|
||||||
|
|
||||||
self._iter = value.__aiter__()
|
|
||||||
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
if self._iter:
|
|
||||||
try:
|
|
||||||
# iter is not None check prevents rare cases
|
|
||||||
# when the case iterable is used twice
|
|
||||||
while True:
|
|
||||||
chunk = await self._iter.__anext__()
|
|
||||||
await writer.write(chunk)
|
|
||||||
except StopAsyncIteration:
|
|
||||||
self._iter = None
|
|
||||||
|
|
||||||
|
|
||||||
class StreamReaderPayload(AsyncIterablePayload):
|
|
||||||
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
|
||||||
super().__init__(value.iter_any(), *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
PAYLOAD_REGISTRY = PayloadRegistry()
|
|
||||||
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
|
||||||
PAYLOAD_REGISTRY.register(StringPayload, str)
|
|
||||||
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
|
||||||
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
|
||||||
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
|
||||||
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
|
||||||
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
|
||||||
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
|
||||||
# try_last for giving a chance to more specialized async interables like
|
|
||||||
# multidict.BodyPartReaderPayload override the default
|
|
||||||
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
"""
|
|
||||||
Payload implementation for coroutines as data provider.
|
|
||||||
|
|
||||||
As a simple case, you can upload data from file::
|
|
||||||
|
|
||||||
@aiohttp.streamer
|
|
||||||
async def file_sender(writer, file_name=None):
|
|
||||||
with open(file_name, 'rb') as f:
|
|
||||||
chunk = f.read(2**16)
|
|
||||||
while chunk:
|
|
||||||
await writer.write(chunk)
|
|
||||||
|
|
||||||
chunk = f.read(2**16)
|
|
||||||
|
|
||||||
Then you can use `file_sender` like this:
|
|
||||||
|
|
||||||
async with session.post('http://httpbin.org/post',
|
|
||||||
data=file_sender(file_name='huge_file')) as resp:
|
|
||||||
print(await resp.text())
|
|
||||||
|
|
||||||
..note:: Coroutine must accept `writer` as first argument
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import types
|
|
||||||
import warnings
|
|
||||||
from typing import Any, Awaitable, Callable, Dict, Tuple
|
|
||||||
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .payload import Payload, payload_type
|
|
||||||
|
|
||||||
__all__ = ("streamer",)
|
|
||||||
|
|
||||||
|
|
||||||
class _stream_wrapper:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coro: Callable[..., Awaitable[None]],
|
|
||||||
args: Tuple[Any, ...],
|
|
||||||
kwargs: Dict[str, Any],
|
|
||||||
) -> None:
|
|
||||||
self.coro = types.coroutine(coro)
|
|
||||||
self.args = args
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
await self.coro(writer, *self.args, **self.kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class streamer:
|
|
||||||
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
|
||||||
warnings.warn(
|
|
||||||
"@streamer is deprecated, use async generators instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
self.coro = coro
|
|
||||||
|
|
||||||
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
|
||||||
return _stream_wrapper(self.coro, args, kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@payload_type(_stream_wrapper)
|
|
||||||
class StreamWrapperPayload(Payload):
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
await self._value(writer)
|
|
||||||
|
|
||||||
|
|
||||||
@payload_type(streamer)
|
|
||||||
class StreamPayload(StreamWrapperPayload):
|
|
||||||
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
|
||||||
super().__init__(value(), *args, **kwargs)
|
|
||||||
|
|
||||||
async def write(self, writer: AbstractStreamWriter) -> None:
|
|
||||||
await self._value(writer)
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Marker
|
|
||||||
@@ -1,381 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
import warnings
|
|
||||||
from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from aiohttp.helpers import isasyncgenfunction
|
|
||||||
from aiohttp.web import Application
|
|
||||||
|
|
||||||
from .test_utils import (
|
|
||||||
BaseTestServer,
|
|
||||||
RawTestServer,
|
|
||||||
TestClient,
|
|
||||||
TestServer,
|
|
||||||
loop_context,
|
|
||||||
setup_test_loop,
|
|
||||||
teardown_test_loop,
|
|
||||||
unused_port as _unused_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
import uvloop
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
uvloop = None # type: ignore[assignment]
|
|
||||||
|
|
||||||
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
|
||||||
AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]]
|
|
||||||
AiohttpServer = Callable[[Application], Awaitable[TestServer]]
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
|
||||||
parser.addoption(
|
|
||||||
"--aiohttp-fast",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="run tests faster by disabling extra checks",
|
|
||||||
)
|
|
||||||
parser.addoption(
|
|
||||||
"--aiohttp-loop",
|
|
||||||
action="store",
|
|
||||||
default="pyloop",
|
|
||||||
help="run tests with specific loop: pyloop, uvloop or all",
|
|
||||||
)
|
|
||||||
parser.addoption(
|
|
||||||
"--aiohttp-enable-loop-debug",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="enable event loop debug mode",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
|
||||||
"""Set up pytest fixture.
|
|
||||||
|
|
||||||
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
|
||||||
"""
|
|
||||||
func = fixturedef.func
|
|
||||||
|
|
||||||
if isasyncgenfunction(func):
|
|
||||||
# async generator fixture
|
|
||||||
is_async_gen = True
|
|
||||||
elif asyncio.iscoroutinefunction(func):
|
|
||||||
# regular async fixture
|
|
||||||
is_async_gen = False
|
|
||||||
else:
|
|
||||||
# not an async fixture, nothing to do
|
|
||||||
return
|
|
||||||
|
|
||||||
strip_request = False
|
|
||||||
if "request" not in fixturedef.argnames:
|
|
||||||
fixturedef.argnames += ("request",)
|
|
||||||
strip_request = True
|
|
||||||
|
|
||||||
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
|
||||||
request = kwargs["request"]
|
|
||||||
if strip_request:
|
|
||||||
del kwargs["request"]
|
|
||||||
|
|
||||||
# if neither the fixture nor the test use the 'loop' fixture,
|
|
||||||
# 'getfixturevalue' will fail because the test is not parameterized
|
|
||||||
# (this can be removed someday if 'loop' is no longer parameterized)
|
|
||||||
if "loop" not in request.fixturenames:
|
|
||||||
raise Exception(
|
|
||||||
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
|
||||||
"be used in tests depending from it."
|
|
||||||
)
|
|
||||||
|
|
||||||
_loop = request.getfixturevalue("loop")
|
|
||||||
|
|
||||||
if is_async_gen:
|
|
||||||
# for async generators, we need to advance the generator once,
|
|
||||||
# then advance it again in a finalizer
|
|
||||||
gen = func(*args, **kwargs)
|
|
||||||
|
|
||||||
def finalizer(): # type: ignore[no-untyped-def]
|
|
||||||
try:
|
|
||||||
return _loop.run_until_complete(gen.__anext__())
|
|
||||||
except StopAsyncIteration:
|
|
||||||
pass
|
|
||||||
|
|
||||||
request.addfinalizer(finalizer)
|
|
||||||
return _loop.run_until_complete(gen.__anext__())
|
|
||||||
else:
|
|
||||||
return _loop.run_until_complete(func(*args, **kwargs))
|
|
||||||
|
|
||||||
fixturedef.func = wrapper
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def fast(request): # type: ignore[no-untyped-def]
|
|
||||||
"""--fast config option"""
|
|
||||||
return request.config.getoption("--aiohttp-fast")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def loop_debug(request): # type: ignore[no-untyped-def]
|
|
||||||
"""--enable-loop-debug config option"""
|
|
||||||
return request.config.getoption("--aiohttp-enable-loop-debug")
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
|
||||||
"""Context manager which checks for RuntimeWarnings.
|
|
||||||
|
|
||||||
This exists specifically to
|
|
||||||
avoid "coroutine 'X' was never awaited" warnings being missed.
|
|
||||||
|
|
||||||
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
|
||||||
"""
|
|
||||||
with warnings.catch_warnings(record=True) as _warnings:
|
|
||||||
yield
|
|
||||||
rw = [
|
|
||||||
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
|
||||||
for w in _warnings
|
|
||||||
if w.category == RuntimeWarning
|
|
||||||
]
|
|
||||||
if rw:
|
|
||||||
raise RuntimeError(
|
|
||||||
"{} Runtime Warning{},\n{}".format(
|
|
||||||
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
|
||||||
"""Passthrough loop context.
|
|
||||||
|
|
||||||
Sets up and tears down a loop unless one is passed in via the loop
|
|
||||||
argument when it's passed straight through.
|
|
||||||
"""
|
|
||||||
if loop:
|
|
||||||
# loop already exists, pass it straight through
|
|
||||||
yield loop
|
|
||||||
else:
|
|
||||||
# this shadows loop_context's standard behavior
|
|
||||||
loop = setup_test_loop()
|
|
||||||
yield loop
|
|
||||||
teardown_test_loop(loop, fast=fast)
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
|
||||||
"""Fix pytest collecting for coroutines."""
|
|
||||||
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
|
||||||
return list(collector._genfunctions(name, obj))
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
|
||||||
"""Run coroutines in an event loop instead of a normal function call."""
|
|
||||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
|
||||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
|
||||||
existing_loop = pyfuncitem.funcargs.get(
|
|
||||||
"proactor_loop"
|
|
||||||
) or pyfuncitem.funcargs.get("loop", None)
|
|
||||||
with _runtime_warning_context():
|
|
||||||
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
|
||||||
testargs = {
|
|
||||||
arg: pyfuncitem.funcargs[arg]
|
|
||||||
for arg in pyfuncitem._fixtureinfo.argnames
|
|
||||||
}
|
|
||||||
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
|
||||||
if "loop_factory" not in metafunc.fixturenames:
|
|
||||||
return
|
|
||||||
|
|
||||||
loops = metafunc.config.option.aiohttp_loop
|
|
||||||
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
|
||||||
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
|
||||||
|
|
||||||
if uvloop is not None: # pragma: no cover
|
|
||||||
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
|
||||||
|
|
||||||
if loops == "all":
|
|
||||||
loops = "pyloop,uvloop?"
|
|
||||||
|
|
||||||
factories = {} # type: ignore[var-annotated]
|
|
||||||
for name in loops.split(","):
|
|
||||||
required = not name.endswith("?")
|
|
||||||
name = name.strip(" ?")
|
|
||||||
if name not in avail_factories: # pragma: no cover
|
|
||||||
if required:
|
|
||||||
raise ValueError(
|
|
||||||
"Unknown loop '%s', available loops: %s"
|
|
||||||
% (name, list(factories.keys()))
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
factories[name] = avail_factories[name]
|
|
||||||
metafunc.parametrize(
|
|
||||||
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
|
||||||
"""Return an instance of the event loop."""
|
|
||||||
policy = loop_factory()
|
|
||||||
asyncio.set_event_loop_policy(policy)
|
|
||||||
with loop_context(fast=fast) as _loop:
|
|
||||||
if loop_debug:
|
|
||||||
_loop.set_debug(True) # pragma: no cover
|
|
||||||
asyncio.set_event_loop(_loop)
|
|
||||||
yield _loop
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def proactor_loop(): # type: ignore[no-untyped-def]
|
|
||||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
|
||||||
asyncio.set_event_loop_policy(policy)
|
|
||||||
|
|
||||||
with loop_context(policy.new_event_loop) as _loop:
|
|
||||||
asyncio.set_event_loop(_loop)
|
|
||||||
yield _loop
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
|
||||||
warnings.warn(
|
|
||||||
"Deprecated, use aiohttp_unused_port fixture instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return aiohttp_unused_port
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def aiohttp_unused_port() -> Callable[[], int]:
|
|
||||||
"""Return a port that is unused on the current host."""
|
|
||||||
return _unused_port
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
|
||||||
"""Factory to create a TestServer instance, given an app.
|
|
||||||
|
|
||||||
aiohttp_server(app, **kwargs)
|
|
||||||
"""
|
|
||||||
servers = []
|
|
||||||
|
|
||||||
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
|
||||||
server = TestServer(app, port=port)
|
|
||||||
await server.start_server(loop=loop, **kwargs)
|
|
||||||
servers.append(server)
|
|
||||||
return server
|
|
||||||
|
|
||||||
yield go
|
|
||||||
|
|
||||||
async def finalize() -> None:
|
|
||||||
while servers:
|
|
||||||
await servers.pop().close()
|
|
||||||
|
|
||||||
loop.run_until_complete(finalize())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
|
||||||
warnings.warn(
|
|
||||||
"Deprecated, use aiohttp_server fixture instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return aiohttp_server
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
|
||||||
"""Factory to create a RawTestServer instance, given a web handler.
|
|
||||||
|
|
||||||
aiohttp_raw_server(handler, **kwargs)
|
|
||||||
"""
|
|
||||||
servers = []
|
|
||||||
|
|
||||||
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
|
||||||
server = RawTestServer(handler, port=port)
|
|
||||||
await server.start_server(loop=loop, **kwargs)
|
|
||||||
servers.append(server)
|
|
||||||
return server
|
|
||||||
|
|
||||||
yield go
|
|
||||||
|
|
||||||
async def finalize() -> None:
|
|
||||||
while servers:
|
|
||||||
await servers.pop().close()
|
|
||||||
|
|
||||||
loop.run_until_complete(finalize())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
|
||||||
aiohttp_raw_server,
|
|
||||||
):
|
|
||||||
warnings.warn(
|
|
||||||
"Deprecated, use aiohttp_raw_server fixture instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return aiohttp_raw_server
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def aiohttp_client(
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
) -> Iterator[AiohttpClient]:
|
|
||||||
"""Factory to create a TestClient instance.
|
|
||||||
|
|
||||||
aiohttp_client(app, **kwargs)
|
|
||||||
aiohttp_client(server, **kwargs)
|
|
||||||
aiohttp_client(raw_server, **kwargs)
|
|
||||||
"""
|
|
||||||
clients = []
|
|
||||||
|
|
||||||
async def go(
|
|
||||||
__param: Union[Application, BaseTestServer],
|
|
||||||
*args: Any,
|
|
||||||
server_kwargs: Optional[Dict[str, Any]] = None,
|
|
||||||
**kwargs: Any
|
|
||||||
) -> TestClient:
|
|
||||||
|
|
||||||
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
|
||||||
__param, (Application, BaseTestServer)
|
|
||||||
):
|
|
||||||
__param = __param(loop, *args, **kwargs)
|
|
||||||
kwargs = {}
|
|
||||||
else:
|
|
||||||
assert not args, "args should be empty"
|
|
||||||
|
|
||||||
if isinstance(__param, Application):
|
|
||||||
server_kwargs = server_kwargs or {}
|
|
||||||
server = TestServer(__param, loop=loop, **server_kwargs)
|
|
||||||
client = TestClient(server, loop=loop, **kwargs)
|
|
||||||
elif isinstance(__param, BaseTestServer):
|
|
||||||
client = TestClient(__param, loop=loop, **kwargs)
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown argument type: %r" % type(__param))
|
|
||||||
|
|
||||||
await client.start_server()
|
|
||||||
clients.append(client)
|
|
||||||
return client
|
|
||||||
|
|
||||||
yield go
|
|
||||||
|
|
||||||
async def finalize() -> None:
|
|
||||||
while clients:
|
|
||||||
await clients.pop().close()
|
|
||||||
|
|
||||||
loop.run_until_complete(finalize())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
|
||||||
warnings.warn(
|
|
||||||
"Deprecated, use aiohttp_client fixture instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return aiohttp_client
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import socket
|
|
||||||
from typing import Any, Dict, List, Optional, Type, Union
|
|
||||||
|
|
||||||
from .abc import AbstractResolver
|
|
||||||
from .helpers import get_running_loop
|
|
||||||
|
|
||||||
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
|
||||||
|
|
||||||
try:
|
|
||||||
import aiodns
|
|
||||||
|
|
||||||
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
aiodns = None
|
|
||||||
|
|
||||||
aiodns_default = False
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadedResolver(AbstractResolver):
|
|
||||||
"""Threaded resolver.
|
|
||||||
|
|
||||||
Uses an Executor for synchronous getaddrinfo() calls.
|
|
||||||
concurrent.futures.ThreadPoolExecutor is used by default.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
|
||||||
self._loop = get_running_loop(loop)
|
|
||||||
|
|
||||||
async def resolve(
|
|
||||||
self, hostname: str, port: int = 0, family: int = socket.AF_INET
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
infos = await self._loop.getaddrinfo(
|
|
||||||
hostname,
|
|
||||||
port,
|
|
||||||
type=socket.SOCK_STREAM,
|
|
||||||
family=family,
|
|
||||||
flags=socket.AI_ADDRCONFIG,
|
|
||||||
)
|
|
||||||
|
|
||||||
hosts = []
|
|
||||||
for family, _, proto, _, address in infos:
|
|
||||||
if family == socket.AF_INET6:
|
|
||||||
if len(address) < 3:
|
|
||||||
# IPv6 is not supported by Python build,
|
|
||||||
# or IPv6 is not enabled in the host
|
|
||||||
continue
|
|
||||||
if address[3]:
|
|
||||||
# This is essential for link-local IPv6 addresses.
|
|
||||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
|
||||||
# getnameinfo() unconditionally, but performance makes sense.
|
|
||||||
host, _port = socket.getnameinfo(
|
|
||||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
|
||||||
)
|
|
||||||
port = int(_port)
|
|
||||||
else:
|
|
||||||
host, port = address[:2]
|
|
||||||
else: # IPv4
|
|
||||||
assert family == socket.AF_INET
|
|
||||||
host, port = address # type: ignore[misc]
|
|
||||||
hosts.append(
|
|
||||||
{
|
|
||||||
"hostname": hostname,
|
|
||||||
"host": host,
|
|
||||||
"port": port,
|
|
||||||
"family": family,
|
|
||||||
"proto": proto,
|
|
||||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
async def close(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncResolver(AbstractResolver):
|
|
||||||
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
*args: Any,
|
|
||||||
**kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
if aiodns is None:
|
|
||||||
raise RuntimeError("Resolver requires aiodns library")
|
|
||||||
|
|
||||||
self._loop = get_running_loop(loop)
|
|
||||||
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
|
|
||||||
|
|
||||||
if not hasattr(self._resolver, "gethostbyname"):
|
|
||||||
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
|
||||||
self.resolve = self._resolve_with_query # type: ignore
|
|
||||||
|
|
||||||
async def resolve(
|
|
||||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
try:
|
|
||||||
resp = await self._resolver.gethostbyname(host, family)
|
|
||||||
except aiodns.error.DNSError as exc:
|
|
||||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
|
||||||
raise OSError(msg) from exc
|
|
||||||
hosts = []
|
|
||||||
for address in resp.addresses:
|
|
||||||
hosts.append(
|
|
||||||
{
|
|
||||||
"hostname": host,
|
|
||||||
"host": address,
|
|
||||||
"port": port,
|
|
||||||
"family": family,
|
|
||||||
"proto": 0,
|
|
||||||
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not hosts:
|
|
||||||
raise OSError("DNS lookup failed")
|
|
||||||
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
async def _resolve_with_query(
|
|
||||||
self, host: str, port: int = 0, family: int = socket.AF_INET
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
if family == socket.AF_INET6:
|
|
||||||
qtype = "AAAA"
|
|
||||||
else:
|
|
||||||
qtype = "A"
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = await self._resolver.query(host, qtype)
|
|
||||||
except aiodns.error.DNSError as exc:
|
|
||||||
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
|
||||||
raise OSError(msg) from exc
|
|
||||||
|
|
||||||
hosts = []
|
|
||||||
for rr in resp:
|
|
||||||
hosts.append(
|
|
||||||
{
|
|
||||||
"hostname": host,
|
|
||||||
"host": rr.host,
|
|
||||||
"port": port,
|
|
||||||
"family": family,
|
|
||||||
"proto": 0,
|
|
||||||
"flags": socket.AI_NUMERICHOST,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not hosts:
|
|
||||||
raise OSError("DNS lookup failed")
|
|
||||||
|
|
||||||
return hosts
|
|
||||||
|
|
||||||
async def close(self) -> None:
|
|
||||||
self._resolver.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
|
||||||
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
|
||||||
@@ -1,666 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import collections
|
|
||||||
import warnings
|
|
||||||
from typing import (
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Deque,
|
|
||||||
Final,
|
|
||||||
Generic,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
TypeVar,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result
|
|
||||||
from .log import internal_logger
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"EMPTY_PAYLOAD",
|
|
||||||
"EofStream",
|
|
||||||
"StreamReader",
|
|
||||||
"DataQueue",
|
|
||||||
"FlowControlDataQueue",
|
|
||||||
)
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
|
|
||||||
|
|
||||||
class EofStream(Exception):
|
|
||||||
"""eof stream indication."""
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncStreamIterator(Generic[_T]):
|
|
||||||
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
|
||||||
self.read_func = read_func
|
|
||||||
|
|
||||||
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self) -> _T:
|
|
||||||
try:
|
|
||||||
rv = await self.read_func()
|
|
||||||
except EofStream:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
if rv == b"":
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
class ChunkTupleAsyncStreamIterator:
|
|
||||||
def __init__(self, stream: "StreamReader") -> None:
|
|
||||||
self._stream = stream
|
|
||||||
|
|
||||||
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self) -> Tuple[bytes, bool]:
|
|
||||||
rv = await self._stream.readchunk()
|
|
||||||
if rv == (b"", False):
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncStreamReaderMixin:
|
|
||||||
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
|
||||||
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
|
||||||
"""Returns an asynchronous iterator that yields chunks of size n."""
|
|
||||||
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
|
||||||
"""Yield all available data as soon as it is received."""
|
|
||||||
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
|
||||||
"""Yield chunks of data as they are received by the server.
|
|
||||||
|
|
||||||
The yielded objects are tuples
|
|
||||||
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
|
||||||
"""
|
|
||||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
|
|
||||||
class StreamReader(AsyncStreamReaderMixin):
|
|
||||||
"""An enhancement of asyncio.StreamReader.
|
|
||||||
|
|
||||||
Supports asynchronous iteration by line, chunk or as available::
|
|
||||||
|
|
||||||
async for line in reader:
|
|
||||||
...
|
|
||||||
async for chunk in reader.iter_chunked(1024):
|
|
||||||
...
|
|
||||||
async for slice in reader.iter_any():
|
|
||||||
...
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
total_bytes = 0
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
protocol: BaseProtocol,
|
|
||||||
limit: int,
|
|
||||||
*,
|
|
||||||
timer: Optional[BaseTimerContext] = None,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> None:
|
|
||||||
self._protocol = protocol
|
|
||||||
self._low_water = limit
|
|
||||||
self._high_water = limit * 2
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
self._loop = loop
|
|
||||||
self._size = 0
|
|
||||||
self._cursor = 0
|
|
||||||
self._http_chunk_splits: Optional[List[int]] = None
|
|
||||||
self._buffer: Deque[bytes] = collections.deque()
|
|
||||||
self._buffer_offset = 0
|
|
||||||
self._eof = False
|
|
||||||
self._waiter: Optional[asyncio.Future[None]] = None
|
|
||||||
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
|
||||||
self._exception: Optional[BaseException] = None
|
|
||||||
self._timer = TimerNoop() if timer is None else timer
|
|
||||||
self._eof_callbacks: List[Callable[[], None]] = []
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
info = [self.__class__.__name__]
|
|
||||||
if self._size:
|
|
||||||
info.append("%d bytes" % self._size)
|
|
||||||
if self._eof:
|
|
||||||
info.append("eof")
|
|
||||||
if self._low_water != 2**16: # default limit
|
|
||||||
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
|
||||||
if self._waiter:
|
|
||||||
info.append("w=%r" % self._waiter)
|
|
||||||
if self._exception:
|
|
||||||
info.append("e=%r" % self._exception)
|
|
||||||
return "<%s>" % " ".join(info)
|
|
||||||
|
|
||||||
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
|
||||||
return (self._low_water, self._high_water)
|
|
||||||
|
|
||||||
def exception(self) -> Optional[BaseException]:
|
|
||||||
return self._exception
|
|
||||||
|
|
||||||
def set_exception(self, exc: BaseException) -> None:
|
|
||||||
self._exception = exc
|
|
||||||
self._eof_callbacks.clear()
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_exception(waiter, exc)
|
|
||||||
|
|
||||||
waiter = self._eof_waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._eof_waiter = None
|
|
||||||
set_exception(waiter, exc)
|
|
||||||
|
|
||||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
|
||||||
if self._eof:
|
|
||||||
try:
|
|
||||||
callback()
|
|
||||||
except Exception:
|
|
||||||
internal_logger.exception("Exception in eof callback")
|
|
||||||
else:
|
|
||||||
self._eof_callbacks.append(callback)
|
|
||||||
|
|
||||||
def feed_eof(self) -> None:
|
|
||||||
self._eof = True
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
waiter = self._eof_waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._eof_waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
for cb in self._eof_callbacks:
|
|
||||||
try:
|
|
||||||
cb()
|
|
||||||
except Exception:
|
|
||||||
internal_logger.exception("Exception in eof callback")
|
|
||||||
|
|
||||||
self._eof_callbacks.clear()
|
|
||||||
|
|
||||||
def is_eof(self) -> bool:
|
|
||||||
"""Return True if 'feed_eof' was called."""
|
|
||||||
return self._eof
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
|
||||||
return self._eof and not self._buffer
|
|
||||||
|
|
||||||
async def wait_eof(self) -> None:
|
|
||||||
if self._eof:
|
|
||||||
return
|
|
||||||
|
|
||||||
assert self._eof_waiter is None
|
|
||||||
self._eof_waiter = self._loop.create_future()
|
|
||||||
try:
|
|
||||||
await self._eof_waiter
|
|
||||||
finally:
|
|
||||||
self._eof_waiter = None
|
|
||||||
|
|
||||||
def unread_data(self, data: bytes) -> None:
|
|
||||||
"""rollback reading some data from stream, inserting it to buffer head."""
|
|
||||||
warnings.warn(
|
|
||||||
"unread_data() is deprecated "
|
|
||||||
"and will be removed in future releases (#3260)",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
if not data:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._buffer_offset:
|
|
||||||
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
|
||||||
self._buffer_offset = 0
|
|
||||||
self._size += len(data)
|
|
||||||
self._cursor -= len(data)
|
|
||||||
self._buffer.appendleft(data)
|
|
||||||
self._eof_counter = 0
|
|
||||||
|
|
||||||
# TODO: size is ignored, remove the param later
|
|
||||||
def feed_data(self, data: bytes, size: int = 0) -> None:
|
|
||||||
assert not self._eof, "feed_data after feed_eof"
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._size += len(data)
|
|
||||||
self._buffer.append(data)
|
|
||||||
self.total_bytes += len(data)
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
if self._size > self._high_water and not self._protocol._reading_paused:
|
|
||||||
self._protocol.pause_reading()
|
|
||||||
|
|
||||||
def begin_http_chunk_receiving(self) -> None:
|
|
||||||
if self._http_chunk_splits is None:
|
|
||||||
if self.total_bytes:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Called begin_http_chunk_receiving when" "some data was already fed"
|
|
||||||
)
|
|
||||||
self._http_chunk_splits = []
|
|
||||||
|
|
||||||
def end_http_chunk_receiving(self) -> None:
|
|
||||||
if self._http_chunk_splits is None:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Called end_chunk_receiving without calling "
|
|
||||||
"begin_chunk_receiving first"
|
|
||||||
)
|
|
||||||
|
|
||||||
# self._http_chunk_splits contains logical byte offsets from start of
|
|
||||||
# the body transfer. Each offset is the offset of the end of a chunk.
|
|
||||||
# "Logical" means bytes, accessible for a user.
|
|
||||||
# If no chunks containing logical data were received, current position
|
|
||||||
# is difinitely zero.
|
|
||||||
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
|
||||||
|
|
||||||
if self.total_bytes == pos:
|
|
||||||
# We should not add empty chunks here. So we check for that.
|
|
||||||
# Note, when chunked + gzip is used, we can receive a chunk
|
|
||||||
# of compressed data, but that data may not be enough for gzip FSM
|
|
||||||
# to yield any uncompressed data. That's why current position may
|
|
||||||
# not change after receiving a chunk.
|
|
||||||
return
|
|
||||||
|
|
||||||
self._http_chunk_splits.append(self.total_bytes)
|
|
||||||
|
|
||||||
# wake up readchunk when end of http chunk received
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
async def _wait(self, func_name: str) -> None:
|
|
||||||
# StreamReader uses a future to link the protocol feed_data() method
|
|
||||||
# to a read coroutine. Running two read coroutines at the same time
|
|
||||||
# would have an unexpected behaviour. It would not possible to know
|
|
||||||
# which coroutine would get the next data.
|
|
||||||
if self._waiter is not None:
|
|
||||||
raise RuntimeError(
|
|
||||||
"%s() called while another coroutine is "
|
|
||||||
"already waiting for incoming data" % func_name
|
|
||||||
)
|
|
||||||
|
|
||||||
waiter = self._waiter = self._loop.create_future()
|
|
||||||
try:
|
|
||||||
with self._timer:
|
|
||||||
await waiter
|
|
||||||
finally:
|
|
||||||
self._waiter = None
|
|
||||||
|
|
||||||
async def readline(self) -> bytes:
|
|
||||||
return await self.readuntil()
|
|
||||||
|
|
||||||
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
|
||||||
seplen = len(separator)
|
|
||||||
if seplen == 0:
|
|
||||||
raise ValueError("Separator should be at least one-byte string")
|
|
||||||
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
chunk = b""
|
|
||||||
chunk_size = 0
|
|
||||||
not_enough = True
|
|
||||||
|
|
||||||
while not_enough:
|
|
||||||
while self._buffer and not_enough:
|
|
||||||
offset = self._buffer_offset
|
|
||||||
ichar = self._buffer[0].find(separator, offset) + 1
|
|
||||||
# Read from current offset to found separator or to the end.
|
|
||||||
data = self._read_nowait_chunk(
|
|
||||||
ichar - offset + seplen - 1 if ichar else -1
|
|
||||||
)
|
|
||||||
chunk += data
|
|
||||||
chunk_size += len(data)
|
|
||||||
if ichar:
|
|
||||||
not_enough = False
|
|
||||||
|
|
||||||
if chunk_size > self._high_water:
|
|
||||||
raise ValueError("Chunk too big")
|
|
||||||
|
|
||||||
if self._eof:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not_enough:
|
|
||||||
await self._wait("readuntil")
|
|
||||||
|
|
||||||
return chunk
|
|
||||||
|
|
||||||
async def read(self, n: int = -1) -> bytes:
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
# migration problem; with DataQueue you have to catch
|
|
||||||
# EofStream exception, so common way is to run payload.read() inside
|
|
||||||
# infinite loop. what can cause real infinite loop with StreamReader
|
|
||||||
# lets keep this code one major release.
|
|
||||||
if __debug__:
|
|
||||||
if self._eof and not self._buffer:
|
|
||||||
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
|
||||||
if self._eof_counter > 5:
|
|
||||||
internal_logger.warning(
|
|
||||||
"Multiple access to StreamReader in eof state, "
|
|
||||||
"might be infinite loop.",
|
|
||||||
stack_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not n:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
if n < 0:
|
|
||||||
# This used to just loop creating a new waiter hoping to
|
|
||||||
# collect everything in self._buffer, but that would
|
|
||||||
# deadlock if the subprocess sends more than self.limit
|
|
||||||
# bytes. So just call self.readany() until EOF.
|
|
||||||
blocks = []
|
|
||||||
while True:
|
|
||||||
block = await self.readany()
|
|
||||||
if not block:
|
|
||||||
break
|
|
||||||
blocks.append(block)
|
|
||||||
return b"".join(blocks)
|
|
||||||
|
|
||||||
# TODO: should be `if` instead of `while`
|
|
||||||
# because waiter maybe triggered on chunk end,
|
|
||||||
# without feeding any data
|
|
||||||
while not self._buffer and not self._eof:
|
|
||||||
await self._wait("read")
|
|
||||||
|
|
||||||
return self._read_nowait(n)
|
|
||||||
|
|
||||||
async def readany(self) -> bytes:
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
# TODO: should be `if` instead of `while`
|
|
||||||
# because waiter maybe triggered on chunk end,
|
|
||||||
# without feeding any data
|
|
||||||
while not self._buffer and not self._eof:
|
|
||||||
await self._wait("readany")
|
|
||||||
|
|
||||||
return self._read_nowait(-1)
|
|
||||||
|
|
||||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
|
||||||
"""Returns a tuple of (data, end_of_http_chunk).
|
|
||||||
|
|
||||||
When chunked transfer
|
|
||||||
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
|
||||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
|
||||||
always False.
|
|
||||||
"""
|
|
||||||
while True:
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
while self._http_chunk_splits:
|
|
||||||
pos = self._http_chunk_splits.pop(0)
|
|
||||||
if pos == self._cursor:
|
|
||||||
return (b"", True)
|
|
||||||
if pos > self._cursor:
|
|
||||||
return (self._read_nowait(pos - self._cursor), True)
|
|
||||||
internal_logger.warning(
|
|
||||||
"Skipping HTTP chunk end due to data "
|
|
||||||
"consumption beyond chunk boundary"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._buffer:
|
|
||||||
return (self._read_nowait_chunk(-1), False)
|
|
||||||
# return (self._read_nowait(-1), False)
|
|
||||||
|
|
||||||
if self._eof:
|
|
||||||
# Special case for signifying EOF.
|
|
||||||
# (b'', True) is not a final return value actually.
|
|
||||||
return (b"", False)
|
|
||||||
|
|
||||||
await self._wait("readchunk")
|
|
||||||
|
|
||||||
async def readexactly(self, n: int) -> bytes:
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
blocks: List[bytes] = []
|
|
||||||
while n > 0:
|
|
||||||
block = await self.read(n)
|
|
||||||
if not block:
|
|
||||||
partial = b"".join(blocks)
|
|
||||||
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
|
||||||
blocks.append(block)
|
|
||||||
n -= len(block)
|
|
||||||
|
|
||||||
return b"".join(blocks)
|
|
||||||
|
|
||||||
def read_nowait(self, n: int = -1) -> bytes:
|
|
||||||
# default was changed to be consistent with .read(-1)
|
|
||||||
#
|
|
||||||
# I believe the most users don't know about the method and
|
|
||||||
# they are not affected.
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
|
|
||||||
if self._waiter and not self._waiter.done():
|
|
||||||
raise RuntimeError(
|
|
||||||
"Called while some coroutine is waiting for incoming data."
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._read_nowait(n)
|
|
||||||
|
|
||||||
def _read_nowait_chunk(self, n: int) -> bytes:
|
|
||||||
first_buffer = self._buffer[0]
|
|
||||||
offset = self._buffer_offset
|
|
||||||
if n != -1 and len(first_buffer) - offset > n:
|
|
||||||
data = first_buffer[offset : offset + n]
|
|
||||||
self._buffer_offset += n
|
|
||||||
|
|
||||||
elif offset:
|
|
||||||
self._buffer.popleft()
|
|
||||||
data = first_buffer[offset:]
|
|
||||||
self._buffer_offset = 0
|
|
||||||
|
|
||||||
else:
|
|
||||||
data = self._buffer.popleft()
|
|
||||||
|
|
||||||
self._size -= len(data)
|
|
||||||
self._cursor += len(data)
|
|
||||||
|
|
||||||
chunk_splits = self._http_chunk_splits
|
|
||||||
# Prevent memory leak: drop useless chunk splits
|
|
||||||
while chunk_splits and chunk_splits[0] < self._cursor:
|
|
||||||
chunk_splits.pop(0)
|
|
||||||
|
|
||||||
if self._size < self._low_water and self._protocol._reading_paused:
|
|
||||||
self._protocol.resume_reading()
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _read_nowait(self, n: int) -> bytes:
|
|
||||||
"""Read not more than n bytes, or whole buffer if n == -1"""
|
|
||||||
self._timer.assert_timeout()
|
|
||||||
|
|
||||||
chunks = []
|
|
||||||
while self._buffer:
|
|
||||||
chunk = self._read_nowait_chunk(n)
|
|
||||||
chunks.append(chunk)
|
|
||||||
if n != -1:
|
|
||||||
n -= len(chunk)
|
|
||||||
if n == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
return b"".join(chunks) if chunks else b""
|
|
||||||
|
|
||||||
|
|
||||||
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._read_eof_chunk = False
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return "<%s>" % self.__class__.__name__
|
|
||||||
|
|
||||||
def exception(self) -> Optional[BaseException]:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def set_exception(self, exc: BaseException) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
|
||||||
try:
|
|
||||||
callback()
|
|
||||||
except Exception:
|
|
||||||
internal_logger.exception("Exception in eof callback")
|
|
||||||
|
|
||||||
def feed_eof(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def is_eof(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def wait_eof(self) -> None:
|
|
||||||
return
|
|
||||||
|
|
||||||
def feed_data(self, data: bytes, n: int = 0) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def readline(self) -> bytes:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
async def read(self, n: int = -1) -> bytes:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
# TODO add async def readuntil
|
|
||||||
|
|
||||||
async def readany(self) -> bytes:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
|
||||||
if not self._read_eof_chunk:
|
|
||||||
self._read_eof_chunk = True
|
|
||||||
return (b"", False)
|
|
||||||
|
|
||||||
return (b"", True)
|
|
||||||
|
|
||||||
async def readexactly(self, n: int) -> bytes:
|
|
||||||
raise asyncio.IncompleteReadError(b"", n)
|
|
||||||
|
|
||||||
def read_nowait(self, n: int = -1) -> bytes:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
|
|
||||||
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
|
||||||
|
|
||||||
|
|
||||||
class DataQueue(Generic[_T]):
|
|
||||||
"""DataQueue is a general-purpose blocking queue with one reader."""
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
self._loop = loop
|
|
||||||
self._eof = False
|
|
||||||
self._waiter: Optional[asyncio.Future[None]] = None
|
|
||||||
self._exception: Optional[BaseException] = None
|
|
||||||
self._size = 0
|
|
||||||
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._buffer)
|
|
||||||
|
|
||||||
def is_eof(self) -> bool:
|
|
||||||
return self._eof
|
|
||||||
|
|
||||||
def at_eof(self) -> bool:
|
|
||||||
return self._eof and not self._buffer
|
|
||||||
|
|
||||||
def exception(self) -> Optional[BaseException]:
|
|
||||||
return self._exception
|
|
||||||
|
|
||||||
def set_exception(self, exc: BaseException) -> None:
|
|
||||||
self._eof = True
|
|
||||||
self._exception = exc
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_exception(waiter, exc)
|
|
||||||
|
|
||||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
|
||||||
self._size += size
|
|
||||||
self._buffer.append((data, size))
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
def feed_eof(self) -> None:
|
|
||||||
self._eof = True
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._waiter = None
|
|
||||||
set_result(waiter, None)
|
|
||||||
|
|
||||||
async def read(self) -> _T:
|
|
||||||
if not self._buffer and not self._eof:
|
|
||||||
assert not self._waiter
|
|
||||||
self._waiter = self._loop.create_future()
|
|
||||||
try:
|
|
||||||
await self._waiter
|
|
||||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
||||||
self._waiter = None
|
|
||||||
raise
|
|
||||||
|
|
||||||
if self._buffer:
|
|
||||||
data, size = self._buffer.popleft()
|
|
||||||
self._size -= size
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
if self._exception is not None:
|
|
||||||
raise self._exception
|
|
||||||
else:
|
|
||||||
raise EofStream
|
|
||||||
|
|
||||||
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
|
||||||
return AsyncStreamIterator(self.read)
|
|
||||||
|
|
||||||
|
|
||||||
class FlowControlDataQueue(DataQueue[_T]):
|
|
||||||
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
|
||||||
|
|
||||||
It is a destination for parsed data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
|
||||||
) -> None:
|
|
||||||
super().__init__(loop=loop)
|
|
||||||
|
|
||||||
self._protocol = protocol
|
|
||||||
self._limit = limit * 2
|
|
||||||
|
|
||||||
def feed_data(self, data: _T, size: int = 0) -> None:
|
|
||||||
super().feed_data(data, size)
|
|
||||||
|
|
||||||
if self._size > self._limit and not self._protocol._reading_paused:
|
|
||||||
self._protocol.pause_reading()
|
|
||||||
|
|
||||||
async def read(self) -> _T:
|
|
||||||
try:
|
|
||||||
return await super().read()
|
|
||||||
finally:
|
|
||||||
if self._size < self._limit and self._protocol._reading_paused:
|
|
||||||
self._protocol.resume_reading()
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
"""Helper methods to tune a TCP connection"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import socket
|
|
||||||
from contextlib import suppress
|
|
||||||
from typing import Optional # noqa
|
|
||||||
|
|
||||||
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(socket, "SO_KEEPALIVE"):
|
|
||||||
|
|
||||||
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
|
||||||
sock = transport.get_extra_info("socket")
|
|
||||||
if sock is not None:
|
|
||||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
|
||||||
sock = transport.get_extra_info("socket")
|
|
||||||
|
|
||||||
if sock is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
|
||||||
return
|
|
||||||
|
|
||||||
value = bool(value)
|
|
||||||
|
|
||||||
# socket may be closed already, on windows OSError get raised
|
|
||||||
with suppress(OSError):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
|
||||||
@@ -1,675 +0,0 @@
|
|||||||
"""Utilities shared by tests."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
import gc
|
|
||||||
import inspect
|
|
||||||
import ipaddress
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
from unittest import IsolatedAsyncioTestCase, mock
|
|
||||||
|
|
||||||
from aiosignal import Signal
|
|
||||||
from multidict import CIMultiDict, CIMultiDictProxy
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
|
|
||||||
|
|
||||||
from . import ClientSession, hdrs
|
|
||||||
from .abc import AbstractCookieJar
|
|
||||||
from .client_reqrep import ClientResponse
|
|
||||||
from .client_ws import ClientWebSocketResponse
|
|
||||||
from .helpers import sentinel
|
|
||||||
from .http import HttpVersion, RawRequestMessage
|
|
||||||
from .typedefs import StrOrURL
|
|
||||||
from .web import (
|
|
||||||
Application,
|
|
||||||
AppRunner,
|
|
||||||
BaseRunner,
|
|
||||||
Request,
|
|
||||||
Server,
|
|
||||||
ServerRunner,
|
|
||||||
SockSite,
|
|
||||||
UrlMappingMatchInfo,
|
|
||||||
)
|
|
||||||
from .web_protocol import _RequestHandler
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from ssl import SSLContext
|
|
||||||
else:
|
|
||||||
SSLContext = None
|
|
||||||
|
|
||||||
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
|
||||||
|
|
||||||
|
|
||||||
def get_unused_port_socket(
|
|
||||||
host: str, family: socket.AddressFamily = socket.AF_INET
|
|
||||||
) -> socket.socket:
|
|
||||||
return get_port_socket(host, 0, family)
|
|
||||||
|
|
||||||
|
|
||||||
def get_port_socket(
|
|
||||||
host: str, port: int, family: socket.AddressFamily
|
|
||||||
) -> socket.socket:
|
|
||||||
s = socket.socket(family, socket.SOCK_STREAM)
|
|
||||||
if REUSE_ADDRESS:
|
|
||||||
# Windows has different semantics for SO_REUSEADDR,
|
|
||||||
# so don't set it. Ref:
|
|
||||||
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
|
||||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
s.bind((host, port))
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def unused_port() -> int:
|
|
||||||
"""Return a port that is unused on the current host."""
|
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
||||||
s.bind(("127.0.0.1", 0))
|
|
||||||
return cast(int, s.getsockname()[1])
|
|
||||||
|
|
||||||
|
|
||||||
class BaseTestServer(ABC):
|
|
||||||
__test__ = False
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
scheme: Union[str, object] = sentinel,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
host: str = "127.0.0.1",
|
|
||||||
port: Optional[int] = None,
|
|
||||||
skip_url_asserts: bool = False,
|
|
||||||
socket_factory: Callable[
|
|
||||||
[str, int, socket.AddressFamily], socket.socket
|
|
||||||
] = get_port_socket,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
self._loop = loop
|
|
||||||
self.runner: Optional[BaseRunner] = None
|
|
||||||
self._root: Optional[URL] = None
|
|
||||||
self.host = host
|
|
||||||
self.port = port
|
|
||||||
self._closed = False
|
|
||||||
self.scheme = scheme
|
|
||||||
self.skip_url_asserts = skip_url_asserts
|
|
||||||
self.socket_factory = socket_factory
|
|
||||||
|
|
||||||
async def start_server(
|
|
||||||
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
if self.runner:
|
|
||||||
return
|
|
||||||
self._loop = loop
|
|
||||||
self._ssl = kwargs.pop("ssl", None)
|
|
||||||
self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
|
|
||||||
await self.runner.setup()
|
|
||||||
if not self.port:
|
|
||||||
self.port = 0
|
|
||||||
try:
|
|
||||||
version = ipaddress.ip_address(self.host).version
|
|
||||||
except ValueError:
|
|
||||||
version = 4
|
|
||||||
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
|
||||||
_sock = self.socket_factory(self.host, self.port, family)
|
|
||||||
self.host, self.port = _sock.getsockname()[:2]
|
|
||||||
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
|
||||||
await site.start()
|
|
||||||
server = site._server
|
|
||||||
assert server is not None
|
|
||||||
sockets = server.sockets # type: ignore[attr-defined]
|
|
||||||
assert sockets is not None
|
|
||||||
self.port = sockets[0].getsockname()[1]
|
|
||||||
if self.scheme is sentinel:
|
|
||||||
if self._ssl:
|
|
||||||
scheme = "https"
|
|
||||||
else:
|
|
||||||
scheme = "http"
|
|
||||||
self.scheme = scheme
|
|
||||||
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
|
|
||||||
|
|
||||||
@abstractmethod # pragma: no cover
|
|
||||||
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def make_url(self, path: StrOrURL) -> URL:
|
|
||||||
assert self._root is not None
|
|
||||||
url = URL(path)
|
|
||||||
if not self.skip_url_asserts:
|
|
||||||
assert not url.is_absolute()
|
|
||||||
return self._root.join(url)
|
|
||||||
else:
|
|
||||||
return URL(str(self._root) + str(path))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def started(self) -> bool:
|
|
||||||
return self.runner is not None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def closed(self) -> bool:
|
|
||||||
return self._closed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def handler(self) -> Server:
|
|
||||||
# for backward compatibility
|
|
||||||
# web.Server instance
|
|
||||||
runner = self.runner
|
|
||||||
assert runner is not None
|
|
||||||
assert runner.server is not None
|
|
||||||
return runner.server
|
|
||||||
|
|
||||||
async def close(self) -> None:
|
|
||||||
"""Close all fixtures created by the test client.
|
|
||||||
|
|
||||||
After that point, the TestClient is no longer usable.
|
|
||||||
|
|
||||||
This is an idempotent function: running close multiple times
|
|
||||||
will not have any additional effects.
|
|
||||||
|
|
||||||
close is also run when the object is garbage collected, and on
|
|
||||||
exit when used as a context manager.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.started and not self.closed:
|
|
||||||
assert self.runner is not None
|
|
||||||
await self.runner.cleanup()
|
|
||||||
self._root = None
|
|
||||||
self.port = None
|
|
||||||
self._closed = True
|
|
||||||
|
|
||||||
def __enter__(self) -> None:
|
|
||||||
raise TypeError("Use async with instead")
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_value: Optional[BaseException],
|
|
||||||
traceback: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
# __exit__ should exist in pair with __enter__ but never executed
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
async def __aenter__(self) -> "BaseTestServer":
|
|
||||||
await self.start_server(loop=self._loop)
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_value: Optional[BaseException],
|
|
||||||
traceback: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
|
|
||||||
class TestServer(BaseTestServer):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
app: Application,
|
|
||||||
*,
|
|
||||||
scheme: Union[str, object] = sentinel,
|
|
||||||
host: str = "127.0.0.1",
|
|
||||||
port: Optional[int] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
):
|
|
||||||
self.app = app
|
|
||||||
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
|
||||||
|
|
||||||
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
|
||||||
return AppRunner(self.app, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class RawTestServer(BaseTestServer):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
handler: _RequestHandler,
|
|
||||||
*,
|
|
||||||
scheme: Union[str, object] = sentinel,
|
|
||||||
host: str = "127.0.0.1",
|
|
||||||
port: Optional[int] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
self._handler = handler
|
|
||||||
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
|
||||||
|
|
||||||
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
|
||||||
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
|
||||||
return ServerRunner(srv, debug=debug, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class TestClient:
|
|
||||||
"""
|
|
||||||
A test client implementation.
|
|
||||||
|
|
||||||
To write functional tests for aiohttp based servers.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
__test__ = False
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
server: BaseTestServer,
|
|
||||||
*,
|
|
||||||
cookie_jar: Optional[AbstractCookieJar] = None,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
if not isinstance(server, BaseTestServer):
|
|
||||||
raise TypeError(
|
|
||||||
"server must be TestServer " "instance, found type: %r" % type(server)
|
|
||||||
)
|
|
||||||
self._server = server
|
|
||||||
self._loop = loop
|
|
||||||
if cookie_jar is None:
|
|
||||||
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
|
||||||
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
|
||||||
self._closed = False
|
|
||||||
self._responses: List[ClientResponse] = []
|
|
||||||
self._websockets: List[ClientWebSocketResponse] = []
|
|
||||||
|
|
||||||
async def start_server(self) -> None:
|
|
||||||
await self._server.start_server(loop=self._loop)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self) -> str:
|
|
||||||
return self._server.host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> Optional[int]:
|
|
||||||
return self._server.port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def server(self) -> BaseTestServer:
|
|
||||||
return self._server
|
|
||||||
|
|
||||||
@property
|
|
||||||
def app(self) -> Optional[Application]:
|
|
||||||
return cast(Optional[Application], getattr(self._server, "app", None))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def session(self) -> ClientSession:
|
|
||||||
"""An internal aiohttp.ClientSession.
|
|
||||||
|
|
||||||
Unlike the methods on the TestClient, client session requests
|
|
||||||
do not automatically include the host in the url queried, and
|
|
||||||
will require an absolute path to the resource.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._session
|
|
||||||
|
|
||||||
def make_url(self, path: StrOrURL) -> URL:
|
|
||||||
return self._server.make_url(path)
|
|
||||||
|
|
||||||
async def _request(
|
|
||||||
self, method: str, path: StrOrURL, **kwargs: Any
|
|
||||||
) -> ClientResponse:
|
|
||||||
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
|
||||||
# save it to close later
|
|
||||||
self._responses.append(resp)
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def request(
|
|
||||||
self, method: str, path: StrOrURL, **kwargs: Any
|
|
||||||
) -> _RequestContextManager:
|
|
||||||
"""Routes a request to tested http server.
|
|
||||||
|
|
||||||
The interface is identical to aiohttp.ClientSession.request,
|
|
||||||
except the loop kwarg is overridden by the instance used by the
|
|
||||||
test server.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return _RequestContextManager(self._request(method, path, **kwargs))
|
|
||||||
|
|
||||||
def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP GET request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
|
||||||
|
|
||||||
def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP POST request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
|
||||||
|
|
||||||
def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP OPTIONS request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
|
|
||||||
|
|
||||||
def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP HEAD request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
|
||||||
|
|
||||||
def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP PUT request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
|
||||||
|
|
||||||
def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP PATCH request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
|
|
||||||
|
|
||||||
def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
|
||||||
"""Perform an HTTP PATCH request."""
|
|
||||||
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
|
|
||||||
|
|
||||||
def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
|
|
||||||
"""Initiate websocket connection.
|
|
||||||
|
|
||||||
The api corresponds to aiohttp.ClientSession.ws_connect.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
|
||||||
|
|
||||||
async def _ws_connect(
|
|
||||||
self, path: StrOrURL, **kwargs: Any
|
|
||||||
) -> ClientWebSocketResponse:
|
|
||||||
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
|
||||||
self._websockets.append(ws)
|
|
||||||
return ws
|
|
||||||
|
|
||||||
async def close(self) -> None:
|
|
||||||
"""Close all fixtures created by the test client.
|
|
||||||
|
|
||||||
After that point, the TestClient is no longer usable.
|
|
||||||
|
|
||||||
This is an idempotent function: running close multiple times
|
|
||||||
will not have any additional effects.
|
|
||||||
|
|
||||||
close is also run on exit when used as a(n) (asynchronous)
|
|
||||||
context manager.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not self._closed:
|
|
||||||
for resp in self._responses:
|
|
||||||
resp.close()
|
|
||||||
for ws in self._websockets:
|
|
||||||
await ws.close()
|
|
||||||
await self._session.close()
|
|
||||||
await self._server.close()
|
|
||||||
self._closed = True
|
|
||||||
|
|
||||||
def __enter__(self) -> None:
|
|
||||||
raise TypeError("Use async with instead")
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc: Optional[BaseException],
|
|
||||||
tb: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
# __exit__ should exist in pair with __enter__ but never executed
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
async def __aenter__(self) -> "TestClient":
|
|
||||||
await self.start_server()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc: Optional[BaseException],
|
|
||||||
tb: Optional[TracebackType],
|
|
||||||
) -> None:
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
|
|
||||||
class AioHTTPTestCase(IsolatedAsyncioTestCase):
|
|
||||||
"""A base class to allow for unittest web applications using aiohttp.
|
|
||||||
|
|
||||||
Provides the following:
|
|
||||||
|
|
||||||
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
|
||||||
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
|
||||||
application and server are running.
|
|
||||||
* self.app (aiohttp.web.Application): the application returned by
|
|
||||||
self.get_application()
|
|
||||||
|
|
||||||
Note that the TestClient's methods are asynchronous: you have to
|
|
||||||
execute function on the test client using asynchronous methods.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def get_application(self) -> Application:
|
|
||||||
"""Get application.
|
|
||||||
|
|
||||||
This method should be overridden
|
|
||||||
to return the aiohttp.web.Application
|
|
||||||
object to test.
|
|
||||||
"""
|
|
||||||
return self.get_app()
|
|
||||||
|
|
||||||
def get_app(self) -> Application:
|
|
||||||
"""Obsolete method used to constructing web application.
|
|
||||||
|
|
||||||
Use .get_application() coroutine instead.
|
|
||||||
"""
|
|
||||||
raise RuntimeError("Did you forget to define get_application()?")
|
|
||||||
|
|
||||||
async def asyncSetUp(self) -> None:
|
|
||||||
self.loop = asyncio.get_running_loop()
|
|
||||||
return await self.setUpAsync()
|
|
||||||
|
|
||||||
async def setUpAsync(self) -> None:
|
|
||||||
self.app = await self.get_application()
|
|
||||||
self.server = await self.get_server(self.app)
|
|
||||||
self.client = await self.get_client(self.server)
|
|
||||||
|
|
||||||
await self.client.start_server()
|
|
||||||
|
|
||||||
async def asyncTearDown(self) -> None:
|
|
||||||
return await self.tearDownAsync()
|
|
||||||
|
|
||||||
async def tearDownAsync(self) -> None:
|
|
||||||
await self.client.close()
|
|
||||||
|
|
||||||
async def get_server(self, app: Application) -> TestServer:
|
|
||||||
"""Return a TestServer instance."""
|
|
||||||
return TestServer(app, loop=self.loop)
|
|
||||||
|
|
||||||
async def get_client(self, server: TestServer) -> TestClient:
|
|
||||||
"""Return a TestClient instance."""
|
|
||||||
return TestClient(server, loop=self.loop)
|
|
||||||
|
|
||||||
|
|
||||||
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
|
||||||
"""
|
|
||||||
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
|
||||||
|
|
||||||
In 3.8+, this does nothing.
|
|
||||||
"""
|
|
||||||
warnings.warn(
|
|
||||||
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return func
|
|
||||||
|
|
||||||
|
|
||||||
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def loop_context(
|
|
||||||
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
|
||||||
) -> Iterator[asyncio.AbstractEventLoop]:
|
|
||||||
"""A contextmanager that creates an event_loop, for test purposes.
|
|
||||||
|
|
||||||
Handles the creation and cleanup of a test loop.
|
|
||||||
"""
|
|
||||||
loop = setup_test_loop(loop_factory)
|
|
||||||
yield loop
|
|
||||||
teardown_test_loop(loop, fast=fast)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_test_loop(
|
|
||||||
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
|
||||||
) -> asyncio.AbstractEventLoop:
|
|
||||||
"""Create and return an asyncio.BaseEventLoop instance.
|
|
||||||
|
|
||||||
The caller should also call teardown_test_loop,
|
|
||||||
once they are done with the loop.
|
|
||||||
"""
|
|
||||||
loop = loop_factory()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
return loop
|
|
||||||
|
|
||||||
|
|
||||||
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
|
||||||
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
|
||||||
closed = loop.is_closed()
|
|
||||||
if not closed:
|
|
||||||
loop.call_soon(loop.stop)
|
|
||||||
loop.run_forever()
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
if not fast:
|
|
||||||
gc.collect()
|
|
||||||
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_app_mock() -> mock.MagicMock:
|
|
||||||
def get_dict(app: Any, key: str) -> Any:
|
|
||||||
return app.__app_dict[key]
|
|
||||||
|
|
||||||
def set_dict(app: Any, key: str, value: Any) -> None:
|
|
||||||
app.__app_dict[key] = value
|
|
||||||
|
|
||||||
app = mock.MagicMock(spec=Application)
|
|
||||||
app.__app_dict = {}
|
|
||||||
app.__getitem__ = get_dict
|
|
||||||
app.__setitem__ = set_dict
|
|
||||||
|
|
||||||
app._debug = False
|
|
||||||
app.on_response_prepare = Signal(app)
|
|
||||||
app.on_response_prepare.freeze()
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
|
||||||
transport = mock.Mock()
|
|
||||||
|
|
||||||
def get_extra_info(key: str) -> Optional[SSLContext]:
|
|
||||||
if key == "sslcontext":
|
|
||||||
return sslcontext
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
transport.get_extra_info.side_effect = get_extra_info
|
|
||||||
return transport
|
|
||||||
|
|
||||||
|
|
||||||
def make_mocked_request(
|
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
headers: Any = None,
|
|
||||||
*,
|
|
||||||
match_info: Any = sentinel,
|
|
||||||
version: HttpVersion = HttpVersion(1, 1),
|
|
||||||
closing: bool = False,
|
|
||||||
app: Any = None,
|
|
||||||
writer: Any = sentinel,
|
|
||||||
protocol: Any = sentinel,
|
|
||||||
transport: Any = sentinel,
|
|
||||||
payload: Any = sentinel,
|
|
||||||
sslcontext: Optional[SSLContext] = None,
|
|
||||||
client_max_size: int = 1024**2,
|
|
||||||
loop: Any = ...,
|
|
||||||
) -> Request:
|
|
||||||
"""Creates mocked web.Request testing purposes.
|
|
||||||
|
|
||||||
Useful in unit tests, when spinning full web server is overkill or
|
|
||||||
specific conditions and errors are hard to trigger.
|
|
||||||
"""
|
|
||||||
task = mock.Mock()
|
|
||||||
if loop is ...:
|
|
||||||
loop = mock.Mock()
|
|
||||||
loop.create_future.return_value = ()
|
|
||||||
|
|
||||||
if version < HttpVersion(1, 1):
|
|
||||||
closing = True
|
|
||||||
|
|
||||||
if headers:
|
|
||||||
headers = CIMultiDictProxy(CIMultiDict(headers))
|
|
||||||
raw_hdrs = tuple(
|
|
||||||
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
headers = CIMultiDictProxy(CIMultiDict())
|
|
||||||
raw_hdrs = ()
|
|
||||||
|
|
||||||
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
|
||||||
|
|
||||||
message = RawRequestMessage(
|
|
||||||
method,
|
|
||||||
path,
|
|
||||||
version,
|
|
||||||
headers,
|
|
||||||
raw_hdrs,
|
|
||||||
closing,
|
|
||||||
None,
|
|
||||||
False,
|
|
||||||
chunked,
|
|
||||||
URL(path),
|
|
||||||
)
|
|
||||||
if app is None:
|
|
||||||
app = _create_app_mock()
|
|
||||||
|
|
||||||
if transport is sentinel:
|
|
||||||
transport = _create_transport(sslcontext)
|
|
||||||
|
|
||||||
if protocol is sentinel:
|
|
||||||
protocol = mock.Mock()
|
|
||||||
protocol.transport = transport
|
|
||||||
|
|
||||||
if writer is sentinel:
|
|
||||||
writer = mock.Mock()
|
|
||||||
writer.write_headers = make_mocked_coro(None)
|
|
||||||
writer.write = make_mocked_coro(None)
|
|
||||||
writer.write_eof = make_mocked_coro(None)
|
|
||||||
writer.drain = make_mocked_coro(None)
|
|
||||||
writer.transport = transport
|
|
||||||
|
|
||||||
protocol.transport = transport
|
|
||||||
protocol.writer = writer
|
|
||||||
|
|
||||||
if payload is sentinel:
|
|
||||||
payload = mock.Mock()
|
|
||||||
|
|
||||||
req = Request(
|
|
||||||
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
|
||||||
)
|
|
||||||
|
|
||||||
match_info = UrlMappingMatchInfo(
|
|
||||||
{} if match_info is sentinel else match_info, mock.Mock()
|
|
||||||
)
|
|
||||||
match_info.add_app(app)
|
|
||||||
req._match_info = match_info
|
|
||||||
|
|
||||||
return req
|
|
||||||
|
|
||||||
|
|
||||||
def make_mocked_coro(
|
|
||||||
return_value: Any = sentinel, raise_exception: Any = sentinel
|
|
||||||
) -> Any:
|
|
||||||
"""Creates a coroutine mock."""
|
|
||||||
|
|
||||||
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
|
||||||
if raise_exception is not sentinel:
|
|
||||||
raise raise_exception
|
|
||||||
if not inspect.isawaitable(return_value):
|
|
||||||
return return_value
|
|
||||||
await return_value
|
|
||||||
|
|
||||||
return mock.Mock(wraps=mock_coro)
|
|
||||||
@@ -1,471 +0,0 @@
|
|||||||
from types import SimpleNamespace
|
|
||||||
from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar
|
|
||||||
|
|
||||||
import attr
|
|
||||||
from aiosignal import Signal
|
|
||||||
from multidict import CIMultiDict
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .client_reqrep import ClientResponse
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .client import ClientSession
|
|
||||||
|
|
||||||
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
|
||||||
|
|
||||||
class _SignalCallback(Protocol[_ParamT_contra]):
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
__client_session: ClientSession,
|
|
||||||
__trace_config_ctx: SimpleNamespace,
|
|
||||||
__params: _ParamT_contra,
|
|
||||||
) -> Awaitable[None]:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"TraceConfig",
|
|
||||||
"TraceRequestStartParams",
|
|
||||||
"TraceRequestEndParams",
|
|
||||||
"TraceRequestExceptionParams",
|
|
||||||
"TraceConnectionQueuedStartParams",
|
|
||||||
"TraceConnectionQueuedEndParams",
|
|
||||||
"TraceConnectionCreateStartParams",
|
|
||||||
"TraceConnectionCreateEndParams",
|
|
||||||
"TraceConnectionReuseconnParams",
|
|
||||||
"TraceDnsResolveHostStartParams",
|
|
||||||
"TraceDnsResolveHostEndParams",
|
|
||||||
"TraceDnsCacheHitParams",
|
|
||||||
"TraceDnsCacheMissParams",
|
|
||||||
"TraceRequestRedirectParams",
|
|
||||||
"TraceRequestChunkSentParams",
|
|
||||||
"TraceResponseChunkReceivedParams",
|
|
||||||
"TraceRequestHeadersSentParams",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TraceConfig:
|
|
||||||
"""First-class used to trace requests launched via ClientSession objects."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
|
||||||
) -> None:
|
|
||||||
self._on_request_start: Signal[
|
|
||||||
_SignalCallback[TraceRequestStartParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_request_chunk_sent: Signal[
|
|
||||||
_SignalCallback[TraceRequestChunkSentParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_response_chunk_received: Signal[
|
|
||||||
_SignalCallback[TraceResponseChunkReceivedParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
|
|
||||||
self
|
|
||||||
)
|
|
||||||
self._on_request_exception: Signal[
|
|
||||||
_SignalCallback[TraceRequestExceptionParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_request_redirect: Signal[
|
|
||||||
_SignalCallback[TraceRequestRedirectParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_connection_queued_start: Signal[
|
|
||||||
_SignalCallback[TraceConnectionQueuedStartParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_connection_queued_end: Signal[
|
|
||||||
_SignalCallback[TraceConnectionQueuedEndParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_connection_create_start: Signal[
|
|
||||||
_SignalCallback[TraceConnectionCreateStartParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_connection_create_end: Signal[
|
|
||||||
_SignalCallback[TraceConnectionCreateEndParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_connection_reuseconn: Signal[
|
|
||||||
_SignalCallback[TraceConnectionReuseconnParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_dns_resolvehost_start: Signal[
|
|
||||||
_SignalCallback[TraceDnsResolveHostStartParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_dns_resolvehost_end: Signal[
|
|
||||||
_SignalCallback[TraceDnsResolveHostEndParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_dns_cache_hit: Signal[
|
|
||||||
_SignalCallback[TraceDnsCacheHitParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_dns_cache_miss: Signal[
|
|
||||||
_SignalCallback[TraceDnsCacheMissParams]
|
|
||||||
] = Signal(self)
|
|
||||||
self._on_request_headers_sent: Signal[
|
|
||||||
_SignalCallback[TraceRequestHeadersSentParams]
|
|
||||||
] = Signal(self)
|
|
||||||
|
|
||||||
self._trace_config_ctx_factory = trace_config_ctx_factory
|
|
||||||
|
|
||||||
def trace_config_ctx(
|
|
||||||
self, trace_request_ctx: Optional[SimpleNamespace] = None
|
|
||||||
) -> SimpleNamespace:
|
|
||||||
"""Return a new trace_config_ctx instance"""
|
|
||||||
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
|
||||||
|
|
||||||
def freeze(self) -> None:
|
|
||||||
self._on_request_start.freeze()
|
|
||||||
self._on_request_chunk_sent.freeze()
|
|
||||||
self._on_response_chunk_received.freeze()
|
|
||||||
self._on_request_end.freeze()
|
|
||||||
self._on_request_exception.freeze()
|
|
||||||
self._on_request_redirect.freeze()
|
|
||||||
self._on_connection_queued_start.freeze()
|
|
||||||
self._on_connection_queued_end.freeze()
|
|
||||||
self._on_connection_create_start.freeze()
|
|
||||||
self._on_connection_create_end.freeze()
|
|
||||||
self._on_connection_reuseconn.freeze()
|
|
||||||
self._on_dns_resolvehost_start.freeze()
|
|
||||||
self._on_dns_resolvehost_end.freeze()
|
|
||||||
self._on_dns_cache_hit.freeze()
|
|
||||||
self._on_dns_cache_miss.freeze()
|
|
||||||
self._on_request_headers_sent.freeze()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
|
||||||
return self._on_request_start
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_chunk_sent(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
|
|
||||||
return self._on_request_chunk_sent
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_response_chunk_received(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
|
|
||||||
return self._on_response_chunk_received
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
|
|
||||||
return self._on_request_end
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_exception(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
|
|
||||||
return self._on_request_exception
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_redirect(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
|
|
||||||
return self._on_request_redirect
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_connection_queued_start(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
|
|
||||||
return self._on_connection_queued_start
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_connection_queued_end(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
|
|
||||||
return self._on_connection_queued_end
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_connection_create_start(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
|
|
||||||
return self._on_connection_create_start
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_connection_create_end(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
|
|
||||||
return self._on_connection_create_end
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_connection_reuseconn(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
|
|
||||||
return self._on_connection_reuseconn
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_dns_resolvehost_start(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
|
|
||||||
return self._on_dns_resolvehost_start
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_dns_resolvehost_end(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
|
|
||||||
return self._on_dns_resolvehost_end
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
|
|
||||||
return self._on_dns_cache_hit
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
|
||||||
return self._on_dns_cache_miss
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_request_headers_sent(
|
|
||||||
self,
|
|
||||||
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
|
||||||
return self._on_request_headers_sent
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestStartParams:
|
|
||||||
"""Parameters sent by the `on_request_start` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
headers: "CIMultiDict[str]"
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestChunkSentParams:
|
|
||||||
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
chunk: bytes
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceResponseChunkReceivedParams:
|
|
||||||
"""Parameters sent by the `on_response_chunk_received` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
chunk: bytes
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestEndParams:
|
|
||||||
"""Parameters sent by the `on_request_end` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
headers: "CIMultiDict[str]"
|
|
||||||
response: ClientResponse
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestExceptionParams:
|
|
||||||
"""Parameters sent by the `on_request_exception` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
headers: "CIMultiDict[str]"
|
|
||||||
exception: BaseException
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestRedirectParams:
|
|
||||||
"""Parameters sent by the `on_request_redirect` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
headers: "CIMultiDict[str]"
|
|
||||||
response: ClientResponse
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceConnectionQueuedStartParams:
|
|
||||||
"""Parameters sent by the `on_connection_queued_start` signal"""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceConnectionQueuedEndParams:
|
|
||||||
"""Parameters sent by the `on_connection_queued_end` signal"""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceConnectionCreateStartParams:
|
|
||||||
"""Parameters sent by the `on_connection_create_start` signal"""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceConnectionCreateEndParams:
|
|
||||||
"""Parameters sent by the `on_connection_create_end` signal"""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceConnectionReuseconnParams:
|
|
||||||
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceDnsResolveHostStartParams:
|
|
||||||
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
|
||||||
|
|
||||||
host: str
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceDnsResolveHostEndParams:
|
|
||||||
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
|
||||||
|
|
||||||
host: str
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceDnsCacheHitParams:
|
|
||||||
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
|
||||||
|
|
||||||
host: str
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceDnsCacheMissParams:
|
|
||||||
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
|
||||||
|
|
||||||
host: str
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class TraceRequestHeadersSentParams:
|
|
||||||
"""Parameters sent by the `on_request_headers_sent` signal"""
|
|
||||||
|
|
||||||
method: str
|
|
||||||
url: URL
|
|
||||||
headers: "CIMultiDict[str]"
|
|
||||||
|
|
||||||
|
|
||||||
class Trace:
|
|
||||||
"""Internal dependency holder class.
|
|
||||||
|
|
||||||
Used to keep together the main dependencies used
|
|
||||||
at the moment of send a signal.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
session: "ClientSession",
|
|
||||||
trace_config: TraceConfig,
|
|
||||||
trace_config_ctx: SimpleNamespace,
|
|
||||||
) -> None:
|
|
||||||
self._trace_config = trace_config
|
|
||||||
self._trace_config_ctx = trace_config_ctx
|
|
||||||
self._session = session
|
|
||||||
|
|
||||||
async def send_request_start(
|
|
||||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config.on_request_start.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestStartParams(method, url, headers),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_request_chunk_sent(
|
|
||||||
self, method: str, url: URL, chunk: bytes
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config.on_request_chunk_sent.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestChunkSentParams(method, url, chunk),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_response_chunk_received(
|
|
||||||
self, method: str, url: URL, chunk: bytes
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config.on_response_chunk_received.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceResponseChunkReceivedParams(method, url, chunk),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_request_end(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
url: URL,
|
|
||||||
headers: "CIMultiDict[str]",
|
|
||||||
response: ClientResponse,
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config.on_request_end.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestEndParams(method, url, headers, response),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_request_exception(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
url: URL,
|
|
||||||
headers: "CIMultiDict[str]",
|
|
||||||
exception: BaseException,
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config.on_request_exception.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestExceptionParams(method, url, headers, exception),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_request_redirect(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
url: URL,
|
|
||||||
headers: "CIMultiDict[str]",
|
|
||||||
response: ClientResponse,
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config._on_request_redirect.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestRedirectParams(method, url, headers, response),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_connection_queued_start(self) -> None:
|
|
||||||
return await self._trace_config.on_connection_queued_start.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_connection_queued_end(self) -> None:
|
|
||||||
return await self._trace_config.on_connection_queued_end.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_connection_create_start(self) -> None:
|
|
||||||
return await self._trace_config.on_connection_create_start.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_connection_create_end(self) -> None:
|
|
||||||
return await self._trace_config.on_connection_create_end.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_connection_reuseconn(self) -> None:
|
|
||||||
return await self._trace_config.on_connection_reuseconn.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_dns_resolvehost_start(self, host: str) -> None:
|
|
||||||
return await self._trace_config.on_dns_resolvehost_start.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_dns_resolvehost_end(self, host: str) -> None:
|
|
||||||
return await self._trace_config.on_dns_resolvehost_end.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_dns_cache_hit(self, host: str) -> None:
|
|
||||||
return await self._trace_config.on_dns_cache_hit.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_dns_cache_miss(self, host: str) -> None:
|
|
||||||
return await self._trace_config.on_dns_cache_miss.send(
|
|
||||||
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def send_request_headers(
|
|
||||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
|
||||||
) -> None:
|
|
||||||
return await self._trace_config._on_request_headers_sent.send(
|
|
||||||
self._session,
|
|
||||||
self._trace_config_ctx,
|
|
||||||
TraceRequestHeadersSentParams(method, url, headers),
|
|
||||||
)
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Iterable,
|
|
||||||
Mapping,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
DEFAULT_JSON_ENCODER = json.dumps
|
|
||||||
DEFAULT_JSON_DECODER = json.loads
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
_CIMultiDict = CIMultiDict[str]
|
|
||||||
_CIMultiDictProxy = CIMultiDictProxy[str]
|
|
||||||
_MultiDict = MultiDict[str]
|
|
||||||
_MultiDictProxy = MultiDictProxy[str]
|
|
||||||
from http.cookies import BaseCookie, Morsel
|
|
||||||
|
|
||||||
from .web import Request, StreamResponse
|
|
||||||
else:
|
|
||||||
_CIMultiDict = CIMultiDict
|
|
||||||
_CIMultiDictProxy = CIMultiDictProxy
|
|
||||||
_MultiDict = MultiDict
|
|
||||||
_MultiDictProxy = MultiDictProxy
|
|
||||||
|
|
||||||
Byteish = Union[bytes, bytearray, memoryview]
|
|
||||||
JSONEncoder = Callable[[Any], str]
|
|
||||||
JSONDecoder = Callable[[str], Any]
|
|
||||||
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
|
|
||||||
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
|
||||||
StrOrURL = Union[str, URL]
|
|
||||||
|
|
||||||
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
|
||||||
LooseCookiesIterables = Iterable[
|
|
||||||
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
|
||||||
]
|
|
||||||
LooseCookies = Union[
|
|
||||||
LooseCookiesMappings,
|
|
||||||
LooseCookiesIterables,
|
|
||||||
"BaseCookie[str]",
|
|
||||||
]
|
|
||||||
|
|
||||||
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
|
||||||
Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]]
|
|
||||||
|
|
||||||
PathLike = Union[str, "os.PathLike[str]"]
|
|
||||||
@@ -1,616 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from collections.abc import Iterable
|
|
||||||
from contextlib import suppress
|
|
||||||
from functools import partial
|
|
||||||
from importlib import import_module
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Iterable as TypingIterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
from weakref import WeakSet
|
|
||||||
|
|
||||||
from .abc import AbstractAccessLogger
|
|
||||||
from .helpers import AppKey as AppKey
|
|
||||||
from .log import access_logger
|
|
||||||
from .typedefs import PathLike
|
|
||||||
from .web_app import Application as Application, CleanupError as CleanupError
|
|
||||||
from .web_exceptions import (
|
|
||||||
HTTPAccepted as HTTPAccepted,
|
|
||||||
HTTPBadGateway as HTTPBadGateway,
|
|
||||||
HTTPBadRequest as HTTPBadRequest,
|
|
||||||
HTTPClientError as HTTPClientError,
|
|
||||||
HTTPConflict as HTTPConflict,
|
|
||||||
HTTPCreated as HTTPCreated,
|
|
||||||
HTTPError as HTTPError,
|
|
||||||
HTTPException as HTTPException,
|
|
||||||
HTTPExpectationFailed as HTTPExpectationFailed,
|
|
||||||
HTTPFailedDependency as HTTPFailedDependency,
|
|
||||||
HTTPForbidden as HTTPForbidden,
|
|
||||||
HTTPFound as HTTPFound,
|
|
||||||
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
|
||||||
HTTPGone as HTTPGone,
|
|
||||||
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
|
||||||
HTTPInternalServerError as HTTPInternalServerError,
|
|
||||||
HTTPLengthRequired as HTTPLengthRequired,
|
|
||||||
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
|
||||||
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
|
||||||
HTTPMove as HTTPMove,
|
|
||||||
HTTPMovedPermanently as HTTPMovedPermanently,
|
|
||||||
HTTPMultipleChoices as HTTPMultipleChoices,
|
|
||||||
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
|
||||||
HTTPNoContent as HTTPNoContent,
|
|
||||||
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
|
||||||
HTTPNotAcceptable as HTTPNotAcceptable,
|
|
||||||
HTTPNotExtended as HTTPNotExtended,
|
|
||||||
HTTPNotFound as HTTPNotFound,
|
|
||||||
HTTPNotImplemented as HTTPNotImplemented,
|
|
||||||
HTTPNotModified as HTTPNotModified,
|
|
||||||
HTTPOk as HTTPOk,
|
|
||||||
HTTPPartialContent as HTTPPartialContent,
|
|
||||||
HTTPPaymentRequired as HTTPPaymentRequired,
|
|
||||||
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
|
||||||
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
|
||||||
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
|
||||||
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
|
||||||
HTTPRedirection as HTTPRedirection,
|
|
||||||
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
|
||||||
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
|
||||||
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
|
||||||
HTTPRequestTimeout as HTTPRequestTimeout,
|
|
||||||
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
|
||||||
HTTPResetContent as HTTPResetContent,
|
|
||||||
HTTPSeeOther as HTTPSeeOther,
|
|
||||||
HTTPServerError as HTTPServerError,
|
|
||||||
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
|
||||||
HTTPSuccessful as HTTPSuccessful,
|
|
||||||
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
|
||||||
HTTPTooManyRequests as HTTPTooManyRequests,
|
|
||||||
HTTPUnauthorized as HTTPUnauthorized,
|
|
||||||
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
|
||||||
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
|
||||||
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
|
||||||
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
|
||||||
HTTPUseProxy as HTTPUseProxy,
|
|
||||||
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
|
||||||
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
|
||||||
NotAppKeyWarning as NotAppKeyWarning,
|
|
||||||
)
|
|
||||||
from .web_fileresponse import FileResponse as FileResponse
|
|
||||||
from .web_log import AccessLogger
|
|
||||||
from .web_middlewares import (
|
|
||||||
middleware as middleware,
|
|
||||||
normalize_path_middleware as normalize_path_middleware,
|
|
||||||
)
|
|
||||||
from .web_protocol import (
|
|
||||||
PayloadAccessError as PayloadAccessError,
|
|
||||||
RequestHandler as RequestHandler,
|
|
||||||
RequestPayloadError as RequestPayloadError,
|
|
||||||
)
|
|
||||||
from .web_request import (
|
|
||||||
BaseRequest as BaseRequest,
|
|
||||||
FileField as FileField,
|
|
||||||
Request as Request,
|
|
||||||
)
|
|
||||||
from .web_response import (
|
|
||||||
ContentCoding as ContentCoding,
|
|
||||||
Response as Response,
|
|
||||||
StreamResponse as StreamResponse,
|
|
||||||
json_response as json_response,
|
|
||||||
)
|
|
||||||
from .web_routedef import (
|
|
||||||
AbstractRouteDef as AbstractRouteDef,
|
|
||||||
RouteDef as RouteDef,
|
|
||||||
RouteTableDef as RouteTableDef,
|
|
||||||
StaticDef as StaticDef,
|
|
||||||
delete as delete,
|
|
||||||
get as get,
|
|
||||||
head as head,
|
|
||||||
options as options,
|
|
||||||
patch as patch,
|
|
||||||
post as post,
|
|
||||||
put as put,
|
|
||||||
route as route,
|
|
||||||
static as static,
|
|
||||||
view as view,
|
|
||||||
)
|
|
||||||
from .web_runner import (
|
|
||||||
AppRunner as AppRunner,
|
|
||||||
BaseRunner as BaseRunner,
|
|
||||||
BaseSite as BaseSite,
|
|
||||||
GracefulExit as GracefulExit,
|
|
||||||
NamedPipeSite as NamedPipeSite,
|
|
||||||
ServerRunner as ServerRunner,
|
|
||||||
SockSite as SockSite,
|
|
||||||
TCPSite as TCPSite,
|
|
||||||
UnixSite as UnixSite,
|
|
||||||
)
|
|
||||||
from .web_server import Server as Server
|
|
||||||
from .web_urldispatcher import (
|
|
||||||
AbstractResource as AbstractResource,
|
|
||||||
AbstractRoute as AbstractRoute,
|
|
||||||
DynamicResource as DynamicResource,
|
|
||||||
PlainResource as PlainResource,
|
|
||||||
PrefixedSubAppResource as PrefixedSubAppResource,
|
|
||||||
Resource as Resource,
|
|
||||||
ResourceRoute as ResourceRoute,
|
|
||||||
StaticResource as StaticResource,
|
|
||||||
UrlDispatcher as UrlDispatcher,
|
|
||||||
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
|
||||||
View as View,
|
|
||||||
)
|
|
||||||
from .web_ws import (
|
|
||||||
WebSocketReady as WebSocketReady,
|
|
||||||
WebSocketResponse as WebSocketResponse,
|
|
||||||
WSMsgType as WSMsgType,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
# web_app
|
|
||||||
"AppKey",
|
|
||||||
"Application",
|
|
||||||
"CleanupError",
|
|
||||||
# web_exceptions
|
|
||||||
"NotAppKeyWarning",
|
|
||||||
"HTTPAccepted",
|
|
||||||
"HTTPBadGateway",
|
|
||||||
"HTTPBadRequest",
|
|
||||||
"HTTPClientError",
|
|
||||||
"HTTPConflict",
|
|
||||||
"HTTPCreated",
|
|
||||||
"HTTPError",
|
|
||||||
"HTTPException",
|
|
||||||
"HTTPExpectationFailed",
|
|
||||||
"HTTPFailedDependency",
|
|
||||||
"HTTPForbidden",
|
|
||||||
"HTTPFound",
|
|
||||||
"HTTPGatewayTimeout",
|
|
||||||
"HTTPGone",
|
|
||||||
"HTTPInsufficientStorage",
|
|
||||||
"HTTPInternalServerError",
|
|
||||||
"HTTPLengthRequired",
|
|
||||||
"HTTPMethodNotAllowed",
|
|
||||||
"HTTPMisdirectedRequest",
|
|
||||||
"HTTPMove",
|
|
||||||
"HTTPMovedPermanently",
|
|
||||||
"HTTPMultipleChoices",
|
|
||||||
"HTTPNetworkAuthenticationRequired",
|
|
||||||
"HTTPNoContent",
|
|
||||||
"HTTPNonAuthoritativeInformation",
|
|
||||||
"HTTPNotAcceptable",
|
|
||||||
"HTTPNotExtended",
|
|
||||||
"HTTPNotFound",
|
|
||||||
"HTTPNotImplemented",
|
|
||||||
"HTTPNotModified",
|
|
||||||
"HTTPOk",
|
|
||||||
"HTTPPartialContent",
|
|
||||||
"HTTPPaymentRequired",
|
|
||||||
"HTTPPermanentRedirect",
|
|
||||||
"HTTPPreconditionFailed",
|
|
||||||
"HTTPPreconditionRequired",
|
|
||||||
"HTTPProxyAuthenticationRequired",
|
|
||||||
"HTTPRedirection",
|
|
||||||
"HTTPRequestEntityTooLarge",
|
|
||||||
"HTTPRequestHeaderFieldsTooLarge",
|
|
||||||
"HTTPRequestRangeNotSatisfiable",
|
|
||||||
"HTTPRequestTimeout",
|
|
||||||
"HTTPRequestURITooLong",
|
|
||||||
"HTTPResetContent",
|
|
||||||
"HTTPSeeOther",
|
|
||||||
"HTTPServerError",
|
|
||||||
"HTTPServiceUnavailable",
|
|
||||||
"HTTPSuccessful",
|
|
||||||
"HTTPTemporaryRedirect",
|
|
||||||
"HTTPTooManyRequests",
|
|
||||||
"HTTPUnauthorized",
|
|
||||||
"HTTPUnavailableForLegalReasons",
|
|
||||||
"HTTPUnprocessableEntity",
|
|
||||||
"HTTPUnsupportedMediaType",
|
|
||||||
"HTTPUpgradeRequired",
|
|
||||||
"HTTPUseProxy",
|
|
||||||
"HTTPVariantAlsoNegotiates",
|
|
||||||
"HTTPVersionNotSupported",
|
|
||||||
# web_fileresponse
|
|
||||||
"FileResponse",
|
|
||||||
# web_middlewares
|
|
||||||
"middleware",
|
|
||||||
"normalize_path_middleware",
|
|
||||||
# web_protocol
|
|
||||||
"PayloadAccessError",
|
|
||||||
"RequestHandler",
|
|
||||||
"RequestPayloadError",
|
|
||||||
# web_request
|
|
||||||
"BaseRequest",
|
|
||||||
"FileField",
|
|
||||||
"Request",
|
|
||||||
# web_response
|
|
||||||
"ContentCoding",
|
|
||||||
"Response",
|
|
||||||
"StreamResponse",
|
|
||||||
"json_response",
|
|
||||||
# web_routedef
|
|
||||||
"AbstractRouteDef",
|
|
||||||
"RouteDef",
|
|
||||||
"RouteTableDef",
|
|
||||||
"StaticDef",
|
|
||||||
"delete",
|
|
||||||
"get",
|
|
||||||
"head",
|
|
||||||
"options",
|
|
||||||
"patch",
|
|
||||||
"post",
|
|
||||||
"put",
|
|
||||||
"route",
|
|
||||||
"static",
|
|
||||||
"view",
|
|
||||||
# web_runner
|
|
||||||
"AppRunner",
|
|
||||||
"BaseRunner",
|
|
||||||
"BaseSite",
|
|
||||||
"GracefulExit",
|
|
||||||
"ServerRunner",
|
|
||||||
"SockSite",
|
|
||||||
"TCPSite",
|
|
||||||
"UnixSite",
|
|
||||||
"NamedPipeSite",
|
|
||||||
# web_server
|
|
||||||
"Server",
|
|
||||||
# web_urldispatcher
|
|
||||||
"AbstractResource",
|
|
||||||
"AbstractRoute",
|
|
||||||
"DynamicResource",
|
|
||||||
"PlainResource",
|
|
||||||
"PrefixedSubAppResource",
|
|
||||||
"Resource",
|
|
||||||
"ResourceRoute",
|
|
||||||
"StaticResource",
|
|
||||||
"UrlDispatcher",
|
|
||||||
"UrlMappingMatchInfo",
|
|
||||||
"View",
|
|
||||||
# web_ws
|
|
||||||
"WebSocketReady",
|
|
||||||
"WebSocketResponse",
|
|
||||||
"WSMsgType",
|
|
||||||
# web
|
|
||||||
"run_app",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ssl import SSLContext
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
SSLContext = Any # type: ignore[misc,assignment]
|
|
||||||
|
|
||||||
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
|
||||||
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
|
||||||
|
|
||||||
HostSequence = TypingIterable[str]
|
|
||||||
|
|
||||||
|
|
||||||
async def _run_app(
|
|
||||||
app: Union[Application, Awaitable[Application]],
|
|
||||||
*,
|
|
||||||
host: Optional[Union[str, HostSequence]] = None,
|
|
||||||
port: Optional[int] = None,
|
|
||||||
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
|
||||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
keepalive_timeout: float = 75.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
print: Optional[Callable[..., None]] = print,
|
|
||||||
backlog: int = 128,
|
|
||||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
|
||||||
access_log_format: str = AccessLogger.LOG_FORMAT,
|
|
||||||
access_log: Optional[logging.Logger] = access_logger,
|
|
||||||
handle_signals: bool = True,
|
|
||||||
reuse_address: Optional[bool] = None,
|
|
||||||
reuse_port: Optional[bool] = None,
|
|
||||||
handler_cancellation: bool = False,
|
|
||||||
) -> None:
|
|
||||||
async def wait(
|
|
||||||
starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float
|
|
||||||
) -> None:
|
|
||||||
# Wait for pending tasks for a given time limit.
|
|
||||||
t = asyncio.current_task()
|
|
||||||
assert t is not None
|
|
||||||
starting_tasks.add(t)
|
|
||||||
with suppress(asyncio.TimeoutError):
|
|
||||||
await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout)
|
|
||||||
|
|
||||||
async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None:
|
|
||||||
t = asyncio.current_task()
|
|
||||||
assert t is not None
|
|
||||||
exclude.add(t)
|
|
||||||
while tasks := asyncio.all_tasks().difference(exclude):
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# An internal function to actually do all dirty job for application running
|
|
||||||
if asyncio.iscoroutine(app):
|
|
||||||
app = await app
|
|
||||||
|
|
||||||
app = cast(Application, app)
|
|
||||||
|
|
||||||
runner = AppRunner(
|
|
||||||
app,
|
|
||||||
handle_signals=handle_signals,
|
|
||||||
access_log_class=access_log_class,
|
|
||||||
access_log_format=access_log_format,
|
|
||||||
access_log=access_log,
|
|
||||||
keepalive_timeout=keepalive_timeout,
|
|
||||||
shutdown_timeout=shutdown_timeout,
|
|
||||||
handler_cancellation=handler_cancellation,
|
|
||||||
)
|
|
||||||
|
|
||||||
await runner.setup()
|
|
||||||
# On shutdown we want to avoid waiting on tasks which run forever.
|
|
||||||
# It's very likely that all tasks which run forever will have been created by
|
|
||||||
# the time we have completed the application startup (in runner.setup()),
|
|
||||||
# so we just record all running tasks here and exclude them later.
|
|
||||||
starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks())
|
|
||||||
runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout)
|
|
||||||
|
|
||||||
sites: List[BaseSite] = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
if host is not None:
|
|
||||||
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
|
||||||
sites.append(
|
|
||||||
TCPSite(
|
|
||||||
runner,
|
|
||||||
host,
|
|
||||||
port,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
reuse_address=reuse_address,
|
|
||||||
reuse_port=reuse_port,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
for h in host:
|
|
||||||
sites.append(
|
|
||||||
TCPSite(
|
|
||||||
runner,
|
|
||||||
h,
|
|
||||||
port,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
reuse_address=reuse_address,
|
|
||||||
reuse_port=reuse_port,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif path is None and sock is None or port is not None:
|
|
||||||
sites.append(
|
|
||||||
TCPSite(
|
|
||||||
runner,
|
|
||||||
port=port,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
reuse_address=reuse_address,
|
|
||||||
reuse_port=reuse_port,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if path is not None:
|
|
||||||
if isinstance(path, (str, os.PathLike)):
|
|
||||||
sites.append(
|
|
||||||
UnixSite(
|
|
||||||
runner,
|
|
||||||
path,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
for p in path:
|
|
||||||
sites.append(
|
|
||||||
UnixSite(
|
|
||||||
runner,
|
|
||||||
p,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if sock is not None:
|
|
||||||
if not isinstance(sock, Iterable):
|
|
||||||
sites.append(
|
|
||||||
SockSite(
|
|
||||||
runner,
|
|
||||||
sock,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
for s in sock:
|
|
||||||
sites.append(
|
|
||||||
SockSite(
|
|
||||||
runner,
|
|
||||||
s,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for site in sites:
|
|
||||||
await site.start()
|
|
||||||
|
|
||||||
if print: # pragma: no branch
|
|
||||||
names = sorted(str(s.name) for s in runner.sites)
|
|
||||||
print(
|
|
||||||
"======== Running on {} ========\n"
|
|
||||||
"(Press CTRL+C to quit)".format(", ".join(names))
|
|
||||||
)
|
|
||||||
|
|
||||||
# sleep forever by 1 hour intervals,
|
|
||||||
while True:
|
|
||||||
await asyncio.sleep(3600)
|
|
||||||
finally:
|
|
||||||
await runner.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
def _cancel_tasks(
|
|
||||||
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
|
||||||
) -> None:
|
|
||||||
if not to_cancel:
|
|
||||||
return
|
|
||||||
|
|
||||||
for task in to_cancel:
|
|
||||||
task.cancel()
|
|
||||||
|
|
||||||
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
|
||||||
|
|
||||||
for task in to_cancel:
|
|
||||||
if task.cancelled():
|
|
||||||
continue
|
|
||||||
if task.exception() is not None:
|
|
||||||
loop.call_exception_handler(
|
|
||||||
{
|
|
||||||
"message": "unhandled exception during asyncio.run() shutdown",
|
|
||||||
"exception": task.exception(),
|
|
||||||
"task": task,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def run_app(
|
|
||||||
app: Union[Application, Awaitable[Application]],
|
|
||||||
*,
|
|
||||||
host: Optional[Union[str, HostSequence]] = None,
|
|
||||||
port: Optional[int] = None,
|
|
||||||
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
|
||||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
keepalive_timeout: float = 75.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
print: Optional[Callable[..., None]] = print,
|
|
||||||
backlog: int = 128,
|
|
||||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
|
||||||
access_log_format: str = AccessLogger.LOG_FORMAT,
|
|
||||||
access_log: Optional[logging.Logger] = access_logger,
|
|
||||||
handle_signals: bool = True,
|
|
||||||
reuse_address: Optional[bool] = None,
|
|
||||||
reuse_port: Optional[bool] = None,
|
|
||||||
handler_cancellation: bool = False,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Run an app locally"""
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
|
|
||||||
# Configure if and only if in debugging mode and using the default logger
|
|
||||||
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
|
||||||
if access_log.level == logging.NOTSET:
|
|
||||||
access_log.setLevel(logging.DEBUG)
|
|
||||||
if not access_log.hasHandlers():
|
|
||||||
access_log.addHandler(logging.StreamHandler())
|
|
||||||
|
|
||||||
main_task = loop.create_task(
|
|
||||||
_run_app(
|
|
||||||
app,
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
path=path,
|
|
||||||
sock=sock,
|
|
||||||
shutdown_timeout=shutdown_timeout,
|
|
||||||
keepalive_timeout=keepalive_timeout,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
print=print,
|
|
||||||
backlog=backlog,
|
|
||||||
access_log_class=access_log_class,
|
|
||||||
access_log_format=access_log_format,
|
|
||||||
access_log=access_log,
|
|
||||||
handle_signals=handle_signals,
|
|
||||||
reuse_address=reuse_address,
|
|
||||||
reuse_port=reuse_port,
|
|
||||||
handler_cancellation=handler_cancellation,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
loop.run_until_complete(main_task)
|
|
||||||
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
_cancel_tasks({main_task}, loop)
|
|
||||||
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
|
||||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv: List[str]) -> None:
|
|
||||||
arg_parser = ArgumentParser(
|
|
||||||
description="aiohttp.web Application server", prog="aiohttp.web"
|
|
||||||
)
|
|
||||||
arg_parser.add_argument(
|
|
||||||
"entry_func",
|
|
||||||
help=(
|
|
||||||
"Callable returning the `aiohttp.web.Application` instance to "
|
|
||||||
"run. Should be specified in the 'module:function' syntax."
|
|
||||||
),
|
|
||||||
metavar="entry-func",
|
|
||||||
)
|
|
||||||
arg_parser.add_argument(
|
|
||||||
"-H",
|
|
||||||
"--hostname",
|
|
||||||
help="TCP/IP hostname to serve on (default: %(default)r)",
|
|
||||||
default="localhost",
|
|
||||||
)
|
|
||||||
arg_parser.add_argument(
|
|
||||||
"-P",
|
|
||||||
"--port",
|
|
||||||
help="TCP/IP port to serve on (default: %(default)r)",
|
|
||||||
type=int,
|
|
||||||
default="8080",
|
|
||||||
)
|
|
||||||
arg_parser.add_argument(
|
|
||||||
"-U",
|
|
||||||
"--path",
|
|
||||||
help="Unix file system path to serve on. Specifying a path will cause "
|
|
||||||
"hostname and port arguments to be ignored.",
|
|
||||||
)
|
|
||||||
args, extra_argv = arg_parser.parse_known_args(argv)
|
|
||||||
|
|
||||||
# Import logic
|
|
||||||
mod_str, _, func_str = args.entry_func.partition(":")
|
|
||||||
if not func_str or not mod_str:
|
|
||||||
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
|
||||||
if mod_str.startswith("."):
|
|
||||||
arg_parser.error("relative module names not supported")
|
|
||||||
try:
|
|
||||||
module = import_module(mod_str)
|
|
||||||
except ImportError as ex:
|
|
||||||
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
|
||||||
try:
|
|
||||||
func = getattr(module, func_str)
|
|
||||||
except AttributeError:
|
|
||||||
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
|
||||||
|
|
||||||
# Compatibility logic
|
|
||||||
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
|
||||||
arg_parser.error(
|
|
||||||
"file system paths not supported by your operating" " environment"
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
|
||||||
|
|
||||||
app = func(extra_argv)
|
|
||||||
run_app(app, host=args.hostname, port=args.port, path=args.path)
|
|
||||||
arg_parser.exit(message="Stopped\n")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": # pragma: no branch
|
|
||||||
main(sys.argv[1:]) # pragma: no cover
|
|
||||||
@@ -1,596 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import warnings
|
|
||||||
from functools import partial, update_wrapper
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
AsyncIterator,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
MutableMapping,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
|
|
||||||
from aiosignal import Signal
|
|
||||||
from frozenlist import FrozenList
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .abc import (
|
|
||||||
AbstractAccessLogger,
|
|
||||||
AbstractMatchInfo,
|
|
||||||
AbstractRouter,
|
|
||||||
AbstractStreamWriter,
|
|
||||||
)
|
|
||||||
from .helpers import DEBUG, AppKey
|
|
||||||
from .http_parser import RawRequestMessage
|
|
||||||
from .log import web_logger
|
|
||||||
from .streams import StreamReader
|
|
||||||
from .typedefs import Middleware
|
|
||||||
from .web_exceptions import NotAppKeyWarning
|
|
||||||
from .web_log import AccessLogger
|
|
||||||
from .web_middlewares import _fix_request_current_app
|
|
||||||
from .web_protocol import RequestHandler
|
|
||||||
from .web_request import Request
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
from .web_routedef import AbstractRouteDef
|
|
||||||
from .web_server import Server
|
|
||||||
from .web_urldispatcher import (
|
|
||||||
AbstractResource,
|
|
||||||
AbstractRoute,
|
|
||||||
Domain,
|
|
||||||
MaskDomain,
|
|
||||||
MatchedSubAppResource,
|
|
||||||
PrefixedSubAppResource,
|
|
||||||
UrlDispatcher,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ("Application", "CleanupError")
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
|
|
||||||
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
|
|
||||||
_Middlewares = FrozenList[Middleware]
|
|
||||||
_MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
|
|
||||||
_Subapps = List["Application"]
|
|
||||||
else:
|
|
||||||
# No type checker mode, skip types
|
|
||||||
_AppSignal = Signal
|
|
||||||
_RespPrepareSignal = Signal
|
|
||||||
_Middlewares = FrozenList
|
|
||||||
_MiddlewaresHandlers = Optional[Sequence]
|
|
||||||
_Subapps = List
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_U = TypeVar("_U")
|
|
||||||
|
|
||||||
|
|
||||||
class Application(MutableMapping[Union[str, AppKey[Any]], Any]):
|
|
||||||
ATTRS = frozenset(
|
|
||||||
[
|
|
||||||
"logger",
|
|
||||||
"_debug",
|
|
||||||
"_router",
|
|
||||||
"_loop",
|
|
||||||
"_handler_args",
|
|
||||||
"_middlewares",
|
|
||||||
"_middlewares_handlers",
|
|
||||||
"_run_middlewares",
|
|
||||||
"_state",
|
|
||||||
"_frozen",
|
|
||||||
"_pre_frozen",
|
|
||||||
"_subapps",
|
|
||||||
"_on_response_prepare",
|
|
||||||
"_on_startup",
|
|
||||||
"_on_shutdown",
|
|
||||||
"_on_cleanup",
|
|
||||||
"_client_max_size",
|
|
||||||
"_cleanup_ctx",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
logger: logging.Logger = web_logger,
|
|
||||||
router: Optional[UrlDispatcher] = None,
|
|
||||||
middlewares: Iterable[Middleware] = (),
|
|
||||||
handler_args: Optional[Mapping[str, Any]] = None,
|
|
||||||
client_max_size: int = 1024**2,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
debug: Any = ..., # mypy doesn't support ellipsis
|
|
||||||
) -> None:
|
|
||||||
if router is None:
|
|
||||||
router = UrlDispatcher()
|
|
||||||
else:
|
|
||||||
warnings.warn(
|
|
||||||
"router argument is deprecated", DeprecationWarning, stacklevel=2
|
|
||||||
)
|
|
||||||
assert isinstance(router, AbstractRouter), router
|
|
||||||
|
|
||||||
if loop is not None:
|
|
||||||
warnings.warn(
|
|
||||||
"loop argument is deprecated", DeprecationWarning, stacklevel=2
|
|
||||||
)
|
|
||||||
|
|
||||||
if debug is not ...:
|
|
||||||
warnings.warn(
|
|
||||||
"debug argument is deprecated", DeprecationWarning, stacklevel=2
|
|
||||||
)
|
|
||||||
self._debug = debug
|
|
||||||
self._router: UrlDispatcher = router
|
|
||||||
self._loop = loop
|
|
||||||
self._handler_args = handler_args
|
|
||||||
self.logger = logger
|
|
||||||
|
|
||||||
self._middlewares: _Middlewares = FrozenList(middlewares)
|
|
||||||
|
|
||||||
# initialized on freezing
|
|
||||||
self._middlewares_handlers: _MiddlewaresHandlers = None
|
|
||||||
# initialized on freezing
|
|
||||||
self._run_middlewares: Optional[bool] = None
|
|
||||||
|
|
||||||
self._state: Dict[Union[AppKey[Any], str], object] = {}
|
|
||||||
self._frozen = False
|
|
||||||
self._pre_frozen = False
|
|
||||||
self._subapps: _Subapps = []
|
|
||||||
|
|
||||||
self._on_response_prepare: _RespPrepareSignal = Signal(self)
|
|
||||||
self._on_startup: _AppSignal = Signal(self)
|
|
||||||
self._on_shutdown: _AppSignal = Signal(self)
|
|
||||||
self._on_cleanup: _AppSignal = Signal(self)
|
|
||||||
self._cleanup_ctx = CleanupContext()
|
|
||||||
self._on_startup.append(self._cleanup_ctx._on_startup)
|
|
||||||
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
|
|
||||||
self._client_max_size = client_max_size
|
|
||||||
|
|
||||||
def __init_subclass__(cls: Type["Application"]) -> None:
|
|
||||||
warnings.warn(
|
|
||||||
"Inheritance class {} from web.Application "
|
|
||||||
"is discouraged".format(cls.__name__),
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
if DEBUG: # pragma: no cover
|
|
||||||
|
|
||||||
def __setattr__(self, name: str, val: Any) -> None:
|
|
||||||
if name not in self.ATTRS:
|
|
||||||
warnings.warn(
|
|
||||||
"Setting custom web.Application.{} attribute "
|
|
||||||
"is discouraged".format(name),
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
super().__setattr__(name, val)
|
|
||||||
|
|
||||||
# MutableMapping API
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
|
||||||
return self is other
|
|
||||||
|
|
||||||
@overload # type: ignore[override]
|
|
||||||
def __getitem__(self, key: AppKey[_T]) -> _T:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, key: str) -> Any:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
|
||||||
return self._state[key]
|
|
||||||
|
|
||||||
def _check_frozen(self) -> None:
|
|
||||||
if self._frozen:
|
|
||||||
warnings.warn(
|
|
||||||
"Changing state of started or joined " "application is deprecated",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
@overload # type: ignore[override]
|
|
||||||
def __setitem__(self, key: AppKey[_T], value: _T) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, key: str, value: Any) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None:
|
|
||||||
self._check_frozen()
|
|
||||||
if not isinstance(key, AppKey):
|
|
||||||
warnings.warn(
|
|
||||||
"It is recommended to use web.AppKey instances for keys.\n"
|
|
||||||
+ "https://docs.aiohttp.org/en/stable/web_advanced.html"
|
|
||||||
+ "#application-s-config",
|
|
||||||
category=NotAppKeyWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
self._state[key] = value
|
|
||||||
|
|
||||||
def __delitem__(self, key: Union[str, AppKey[_T]]) -> None:
|
|
||||||
self._check_frozen()
|
|
||||||
del self._state[key]
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._state)
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
|
||||||
return iter(self._state)
|
|
||||||
|
|
||||||
@overload # type: ignore[override]
|
|
||||||
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get(self, key: str, default: Any = ...) -> Any:
|
|
||||||
...
|
|
||||||
|
|
||||||
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
|
||||||
return self._state.get(key, default)
|
|
||||||
|
|
||||||
########
|
|
||||||
@property
|
|
||||||
def loop(self) -> asyncio.AbstractEventLoop:
|
|
||||||
# Technically the loop can be None
|
|
||||||
# but we mask it by explicit type cast
|
|
||||||
# to provide more convenient type annotation
|
|
||||||
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
|
|
||||||
return cast(asyncio.AbstractEventLoop, self._loop)
|
|
||||||
|
|
||||||
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
|
|
||||||
if loop is None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
if self._loop is not None and self._loop is not loop:
|
|
||||||
raise RuntimeError(
|
|
||||||
"web.Application instance initialized with different loop"
|
|
||||||
)
|
|
||||||
|
|
||||||
self._loop = loop
|
|
||||||
|
|
||||||
# set loop debug
|
|
||||||
if self._debug is ...:
|
|
||||||
self._debug = loop.get_debug()
|
|
||||||
|
|
||||||
# set loop to sub applications
|
|
||||||
for subapp in self._subapps:
|
|
||||||
subapp._set_loop(loop)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pre_frozen(self) -> bool:
|
|
||||||
return self._pre_frozen
|
|
||||||
|
|
||||||
def pre_freeze(self) -> None:
|
|
||||||
if self._pre_frozen:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._pre_frozen = True
|
|
||||||
self._middlewares.freeze()
|
|
||||||
self._router.freeze()
|
|
||||||
self._on_response_prepare.freeze()
|
|
||||||
self._cleanup_ctx.freeze()
|
|
||||||
self._on_startup.freeze()
|
|
||||||
self._on_shutdown.freeze()
|
|
||||||
self._on_cleanup.freeze()
|
|
||||||
self._middlewares_handlers = tuple(self._prepare_middleware())
|
|
||||||
|
|
||||||
# If current app and any subapp do not have middlewares avoid run all
|
|
||||||
# of the code footprint that it implies, which have a middleware
|
|
||||||
# hardcoded per app that sets up the current_app attribute. If no
|
|
||||||
# middlewares are configured the handler will receive the proper
|
|
||||||
# current_app without needing all of this code.
|
|
||||||
self._run_middlewares = True if self.middlewares else False
|
|
||||||
|
|
||||||
for subapp in self._subapps:
|
|
||||||
subapp.pre_freeze()
|
|
||||||
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
|
|
||||||
|
|
||||||
@property
|
|
||||||
def frozen(self) -> bool:
|
|
||||||
return self._frozen
|
|
||||||
|
|
||||||
def freeze(self) -> None:
|
|
||||||
if self._frozen:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.pre_freeze()
|
|
||||||
self._frozen = True
|
|
||||||
for subapp in self._subapps:
|
|
||||||
subapp.freeze()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def debug(self) -> bool:
|
|
||||||
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
|
|
||||||
return self._debug # type: ignore[no-any-return]
|
|
||||||
|
|
||||||
def _reg_subapp_signals(self, subapp: "Application") -> None:
|
|
||||||
def reg_handler(signame: str) -> None:
|
|
||||||
subsig = getattr(subapp, signame)
|
|
||||||
|
|
||||||
async def handler(app: "Application") -> None:
|
|
||||||
await subsig.send(subapp)
|
|
||||||
|
|
||||||
appsig = getattr(self, signame)
|
|
||||||
appsig.append(handler)
|
|
||||||
|
|
||||||
reg_handler("on_startup")
|
|
||||||
reg_handler("on_shutdown")
|
|
||||||
reg_handler("on_cleanup")
|
|
||||||
|
|
||||||
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
|
|
||||||
if not isinstance(prefix, str):
|
|
||||||
raise TypeError("Prefix must be str")
|
|
||||||
prefix = prefix.rstrip("/")
|
|
||||||
if not prefix:
|
|
||||||
raise ValueError("Prefix cannot be empty")
|
|
||||||
factory = partial(PrefixedSubAppResource, prefix, subapp)
|
|
||||||
return self._add_subapp(factory, subapp)
|
|
||||||
|
|
||||||
def _add_subapp(
|
|
||||||
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
|
|
||||||
) -> AbstractResource:
|
|
||||||
if self.frozen:
|
|
||||||
raise RuntimeError("Cannot add sub application to frozen application")
|
|
||||||
if subapp.frozen:
|
|
||||||
raise RuntimeError("Cannot add frozen application")
|
|
||||||
resource = resource_factory()
|
|
||||||
self.router.register_resource(resource)
|
|
||||||
self._reg_subapp_signals(subapp)
|
|
||||||
self._subapps.append(subapp)
|
|
||||||
subapp.pre_freeze()
|
|
||||||
if self._loop is not None:
|
|
||||||
subapp._set_loop(self._loop)
|
|
||||||
return resource
|
|
||||||
|
|
||||||
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
|
|
||||||
if not isinstance(domain, str):
|
|
||||||
raise TypeError("Domain must be str")
|
|
||||||
elif "*" in domain:
|
|
||||||
rule: Domain = MaskDomain(domain)
|
|
||||||
else:
|
|
||||||
rule = Domain(domain)
|
|
||||||
factory = partial(MatchedSubAppResource, rule, subapp)
|
|
||||||
return self._add_subapp(factory, subapp)
|
|
||||||
|
|
||||||
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
|
||||||
return self.router.add_routes(routes)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_response_prepare(self) -> _RespPrepareSignal:
|
|
||||||
return self._on_response_prepare
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_startup(self) -> _AppSignal:
|
|
||||||
return self._on_startup
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_shutdown(self) -> _AppSignal:
|
|
||||||
return self._on_shutdown
|
|
||||||
|
|
||||||
@property
|
|
||||||
def on_cleanup(self) -> _AppSignal:
|
|
||||||
return self._on_cleanup
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cleanup_ctx(self) -> "CleanupContext":
|
|
||||||
return self._cleanup_ctx
|
|
||||||
|
|
||||||
@property
|
|
||||||
def router(self) -> UrlDispatcher:
|
|
||||||
return self._router
|
|
||||||
|
|
||||||
@property
|
|
||||||
def middlewares(self) -> _Middlewares:
|
|
||||||
return self._middlewares
|
|
||||||
|
|
||||||
def _make_handler(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> Server:
|
|
||||||
|
|
||||||
if not issubclass(access_log_class, AbstractAccessLogger):
|
|
||||||
raise TypeError(
|
|
||||||
"access_log_class must be subclass of "
|
|
||||||
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._set_loop(loop)
|
|
||||||
self.freeze()
|
|
||||||
|
|
||||||
kwargs["debug"] = self._debug
|
|
||||||
kwargs["access_log_class"] = access_log_class
|
|
||||||
if self._handler_args:
|
|
||||||
for k, v in self._handler_args.items():
|
|
||||||
kwargs[k] = v
|
|
||||||
|
|
||||||
return Server(
|
|
||||||
self._handle, # type: ignore[arg-type]
|
|
||||||
request_factory=self._make_request,
|
|
||||||
loop=self._loop,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
def make_handler(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
||||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> Server:
|
|
||||||
|
|
||||||
warnings.warn(
|
|
||||||
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._make_handler(
|
|
||||||
loop=loop, access_log_class=access_log_class, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def startup(self) -> None:
|
|
||||||
"""Causes on_startup signal
|
|
||||||
|
|
||||||
Should be called in the event loop along with the request handler.
|
|
||||||
"""
|
|
||||||
await self.on_startup.send(self)
|
|
||||||
|
|
||||||
async def shutdown(self) -> None:
|
|
||||||
"""Causes on_shutdown signal
|
|
||||||
|
|
||||||
Should be called before cleanup()
|
|
||||||
"""
|
|
||||||
await self.on_shutdown.send(self)
|
|
||||||
|
|
||||||
async def cleanup(self) -> None:
|
|
||||||
"""Causes on_cleanup signal
|
|
||||||
|
|
||||||
Should be called after shutdown()
|
|
||||||
"""
|
|
||||||
if self.on_cleanup.frozen:
|
|
||||||
await self.on_cleanup.send(self)
|
|
||||||
else:
|
|
||||||
# If an exception occurs in startup, ensure cleanup contexts are completed.
|
|
||||||
await self._cleanup_ctx._on_cleanup(self)
|
|
||||||
|
|
||||||
def _make_request(
|
|
||||||
self,
|
|
||||||
message: RawRequestMessage,
|
|
||||||
payload: StreamReader,
|
|
||||||
protocol: RequestHandler,
|
|
||||||
writer: AbstractStreamWriter,
|
|
||||||
task: "asyncio.Task[None]",
|
|
||||||
_cls: Type[Request] = Request,
|
|
||||||
) -> Request:
|
|
||||||
return _cls(
|
|
||||||
message,
|
|
||||||
payload,
|
|
||||||
protocol,
|
|
||||||
writer,
|
|
||||||
task,
|
|
||||||
self._loop,
|
|
||||||
client_max_size=self._client_max_size,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]:
|
|
||||||
for m in reversed(self._middlewares):
|
|
||||||
if getattr(m, "__middleware_version__", None) == 1:
|
|
||||||
yield m, True
|
|
||||||
else:
|
|
||||||
warnings.warn(
|
|
||||||
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
yield m, False
|
|
||||||
|
|
||||||
yield _fix_request_current_app(self), True
|
|
||||||
|
|
||||||
async def _handle(self, request: Request) -> StreamResponse:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
debug = loop.get_debug()
|
|
||||||
match_info = await self._router.resolve(request)
|
|
||||||
if debug: # pragma: no cover
|
|
||||||
if not isinstance(match_info, AbstractMatchInfo):
|
|
||||||
raise TypeError(
|
|
||||||
"match_info should be AbstractMatchInfo "
|
|
||||||
"instance, not {!r}".format(match_info)
|
|
||||||
)
|
|
||||||
match_info.add_app(self)
|
|
||||||
|
|
||||||
match_info.freeze()
|
|
||||||
|
|
||||||
resp = None
|
|
||||||
request._match_info = match_info
|
|
||||||
expect = request.headers.get(hdrs.EXPECT)
|
|
||||||
if expect:
|
|
||||||
resp = await match_info.expect_handler(request)
|
|
||||||
await request.writer.drain()
|
|
||||||
|
|
||||||
if resp is None:
|
|
||||||
handler = match_info.handler
|
|
||||||
|
|
||||||
if self._run_middlewares:
|
|
||||||
for app in match_info.apps[::-1]:
|
|
||||||
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr]
|
|
||||||
if new_style:
|
|
||||||
handler = update_wrapper(
|
|
||||||
partial(m, handler=handler), handler
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
handler = await m(app, handler) # type: ignore[arg-type,assignment]
|
|
||||||
|
|
||||||
resp = await handler(request)
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def __call__(self) -> "Application":
|
|
||||||
"""gunicorn compatibility"""
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<Application 0x{id(self):x}>"
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class CleanupError(RuntimeError):
|
|
||||||
@property
|
|
||||||
def exceptions(self) -> List[BaseException]:
|
|
||||||
return cast(List[BaseException], self.args[1])
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
|
|
||||||
else:
|
|
||||||
_CleanupContextBase = FrozenList
|
|
||||||
|
|
||||||
|
|
||||||
class CleanupContext(_CleanupContextBase):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._exits: List[AsyncIterator[None]] = []
|
|
||||||
|
|
||||||
async def _on_startup(self, app: Application) -> None:
|
|
||||||
for cb in self:
|
|
||||||
it = cb(app).__aiter__()
|
|
||||||
await it.__anext__()
|
|
||||||
self._exits.append(it)
|
|
||||||
|
|
||||||
async def _on_cleanup(self, app: Application) -> None:
|
|
||||||
errors = []
|
|
||||||
for it in reversed(self._exits):
|
|
||||||
try:
|
|
||||||
await it.__anext__()
|
|
||||||
except StopAsyncIteration:
|
|
||||||
pass
|
|
||||||
except Exception as exc:
|
|
||||||
errors.append(exc)
|
|
||||||
else:
|
|
||||||
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
|
|
||||||
if errors:
|
|
||||||
if len(errors) == 1:
|
|
||||||
raise errors[0]
|
|
||||||
else:
|
|
||||||
raise CleanupError("Multiple errors on cleanup stage", errors)
|
|
||||||
@@ -1,452 +0,0 @@
|
|||||||
import warnings
|
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
|
||||||
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .typedefs import LooseHeaders, StrOrURL
|
|
||||||
from .web_response import Response
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"HTTPException",
|
|
||||||
"HTTPError",
|
|
||||||
"HTTPRedirection",
|
|
||||||
"HTTPSuccessful",
|
|
||||||
"HTTPOk",
|
|
||||||
"HTTPCreated",
|
|
||||||
"HTTPAccepted",
|
|
||||||
"HTTPNonAuthoritativeInformation",
|
|
||||||
"HTTPNoContent",
|
|
||||||
"HTTPResetContent",
|
|
||||||
"HTTPPartialContent",
|
|
||||||
"HTTPMove",
|
|
||||||
"HTTPMultipleChoices",
|
|
||||||
"HTTPMovedPermanently",
|
|
||||||
"HTTPFound",
|
|
||||||
"HTTPSeeOther",
|
|
||||||
"HTTPNotModified",
|
|
||||||
"HTTPUseProxy",
|
|
||||||
"HTTPTemporaryRedirect",
|
|
||||||
"HTTPPermanentRedirect",
|
|
||||||
"HTTPClientError",
|
|
||||||
"HTTPBadRequest",
|
|
||||||
"HTTPUnauthorized",
|
|
||||||
"HTTPPaymentRequired",
|
|
||||||
"HTTPForbidden",
|
|
||||||
"HTTPNotFound",
|
|
||||||
"HTTPMethodNotAllowed",
|
|
||||||
"HTTPNotAcceptable",
|
|
||||||
"HTTPProxyAuthenticationRequired",
|
|
||||||
"HTTPRequestTimeout",
|
|
||||||
"HTTPConflict",
|
|
||||||
"HTTPGone",
|
|
||||||
"HTTPLengthRequired",
|
|
||||||
"HTTPPreconditionFailed",
|
|
||||||
"HTTPRequestEntityTooLarge",
|
|
||||||
"HTTPRequestURITooLong",
|
|
||||||
"HTTPUnsupportedMediaType",
|
|
||||||
"HTTPRequestRangeNotSatisfiable",
|
|
||||||
"HTTPExpectationFailed",
|
|
||||||
"HTTPMisdirectedRequest",
|
|
||||||
"HTTPUnprocessableEntity",
|
|
||||||
"HTTPFailedDependency",
|
|
||||||
"HTTPUpgradeRequired",
|
|
||||||
"HTTPPreconditionRequired",
|
|
||||||
"HTTPTooManyRequests",
|
|
||||||
"HTTPRequestHeaderFieldsTooLarge",
|
|
||||||
"HTTPUnavailableForLegalReasons",
|
|
||||||
"HTTPServerError",
|
|
||||||
"HTTPInternalServerError",
|
|
||||||
"HTTPNotImplemented",
|
|
||||||
"HTTPBadGateway",
|
|
||||||
"HTTPServiceUnavailable",
|
|
||||||
"HTTPGatewayTimeout",
|
|
||||||
"HTTPVersionNotSupported",
|
|
||||||
"HTTPVariantAlsoNegotiates",
|
|
||||||
"HTTPInsufficientStorage",
|
|
||||||
"HTTPNotExtended",
|
|
||||||
"HTTPNetworkAuthenticationRequired",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NotAppKeyWarning(UserWarning):
|
|
||||||
"""Warning when not using AppKey in Application."""
|
|
||||||
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# HTTP Exceptions
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPException(Response, Exception):
|
|
||||||
|
|
||||||
# You should set in subclasses:
|
|
||||||
# status = 200
|
|
||||||
|
|
||||||
status_code = -1
|
|
||||||
empty_body = False
|
|
||||||
|
|
||||||
__http_exception__ = True
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
body: Any = None,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
if body is not None:
|
|
||||||
warnings.warn(
|
|
||||||
"body argument is deprecated for http web exceptions",
|
|
||||||
DeprecationWarning,
|
|
||||||
)
|
|
||||||
Response.__init__(
|
|
||||||
self,
|
|
||||||
status=self.status_code,
|
|
||||||
headers=headers,
|
|
||||||
reason=reason,
|
|
||||||
body=body,
|
|
||||||
text=text,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
Exception.__init__(self, self.reason)
|
|
||||||
if self.body is None and not self.empty_body:
|
|
||||||
self.text = f"{self.status}: {self.reason}"
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(HTTPException):
|
|
||||||
"""Base class for exceptions with status codes in the 400s and 500s."""
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRedirection(HTTPException):
|
|
||||||
"""Base class for exceptions with status codes in the 300s."""
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPSuccessful(HTTPException):
|
|
||||||
"""Base class for exceptions with status codes in the 200s."""
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPOk(HTTPSuccessful):
|
|
||||||
status_code = 200
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPCreated(HTTPSuccessful):
|
|
||||||
status_code = 201
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPAccepted(HTTPSuccessful):
|
|
||||||
status_code = 202
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
|
|
||||||
status_code = 203
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNoContent(HTTPSuccessful):
|
|
||||||
status_code = 204
|
|
||||||
empty_body = True
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPResetContent(HTTPSuccessful):
|
|
||||||
status_code = 205
|
|
||||||
empty_body = True
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPPartialContent(HTTPSuccessful):
|
|
||||||
status_code = 206
|
|
||||||
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# 3xx redirection
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMove(HTTPRedirection):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
location: StrOrURL,
|
|
||||||
*,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
body: Any = None,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
if not location:
|
|
||||||
raise ValueError("HTTP redirects need a location to redirect to.")
|
|
||||||
super().__init__(
|
|
||||||
headers=headers,
|
|
||||||
reason=reason,
|
|
||||||
body=body,
|
|
||||||
text=text,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
self.headers["Location"] = str(URL(location))
|
|
||||||
self.location = location
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMultipleChoices(HTTPMove):
|
|
||||||
status_code = 300
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMovedPermanently(HTTPMove):
|
|
||||||
status_code = 301
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPFound(HTTPMove):
|
|
||||||
status_code = 302
|
|
||||||
|
|
||||||
|
|
||||||
# This one is safe after a POST (the redirected location will be
|
|
||||||
# retrieved with GET):
|
|
||||||
class HTTPSeeOther(HTTPMove):
|
|
||||||
status_code = 303
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNotModified(HTTPRedirection):
|
|
||||||
# FIXME: this should include a date or etag header
|
|
||||||
status_code = 304
|
|
||||||
empty_body = True
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUseProxy(HTTPMove):
|
|
||||||
# Not a move, but looks a little like one
|
|
||||||
status_code = 305
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPTemporaryRedirect(HTTPMove):
|
|
||||||
status_code = 307
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPPermanentRedirect(HTTPMove):
|
|
||||||
status_code = 308
|
|
||||||
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# 4xx client error
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPClientError(HTTPError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPBadRequest(HTTPClientError):
|
|
||||||
status_code = 400
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUnauthorized(HTTPClientError):
|
|
||||||
status_code = 401
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPPaymentRequired(HTTPClientError):
|
|
||||||
status_code = 402
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPForbidden(HTTPClientError):
|
|
||||||
status_code = 403
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNotFound(HTTPClientError):
|
|
||||||
status_code = 404
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMethodNotAllowed(HTTPClientError):
|
|
||||||
status_code = 405
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
allowed_methods: Iterable[str],
|
|
||||||
*,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
body: Any = None,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
allow = ",".join(sorted(allowed_methods))
|
|
||||||
super().__init__(
|
|
||||||
headers=headers,
|
|
||||||
reason=reason,
|
|
||||||
body=body,
|
|
||||||
text=text,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
self.headers["Allow"] = allow
|
|
||||||
self.allowed_methods: Set[str] = set(allowed_methods)
|
|
||||||
self.method = method.upper()
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNotAcceptable(HTTPClientError):
|
|
||||||
status_code = 406
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPProxyAuthenticationRequired(HTTPClientError):
|
|
||||||
status_code = 407
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequestTimeout(HTTPClientError):
|
|
||||||
status_code = 408
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPConflict(HTTPClientError):
|
|
||||||
status_code = 409
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPGone(HTTPClientError):
|
|
||||||
status_code = 410
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPLengthRequired(HTTPClientError):
|
|
||||||
status_code = 411
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPPreconditionFailed(HTTPClientError):
|
|
||||||
status_code = 412
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequestEntityTooLarge(HTTPClientError):
|
|
||||||
status_code = 413
|
|
||||||
|
|
||||||
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
|
|
||||||
kwargs.setdefault(
|
|
||||||
"text",
|
|
||||||
"Maximum request body size {} exceeded, "
|
|
||||||
"actual body size {}".format(max_size, actual_size),
|
|
||||||
)
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequestURITooLong(HTTPClientError):
|
|
||||||
status_code = 414
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUnsupportedMediaType(HTTPClientError):
|
|
||||||
status_code = 415
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
|
|
||||||
status_code = 416
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPExpectationFailed(HTTPClientError):
|
|
||||||
status_code = 417
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMisdirectedRequest(HTTPClientError):
|
|
||||||
status_code = 421
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUnprocessableEntity(HTTPClientError):
|
|
||||||
status_code = 422
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPFailedDependency(HTTPClientError):
|
|
||||||
status_code = 424
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUpgradeRequired(HTTPClientError):
|
|
||||||
status_code = 426
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPPreconditionRequired(HTTPClientError):
|
|
||||||
status_code = 428
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPTooManyRequests(HTTPClientError):
|
|
||||||
status_code = 429
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
|
|
||||||
status_code = 431
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPUnavailableForLegalReasons(HTTPClientError):
|
|
||||||
status_code = 451
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
link: Optional[StrOrURL],
|
|
||||||
*,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
body: Any = None,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
headers=headers,
|
|
||||||
reason=reason,
|
|
||||||
body=body,
|
|
||||||
text=text,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
self._link = None
|
|
||||||
if link:
|
|
||||||
self._link = URL(link)
|
|
||||||
self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def link(self) -> Optional[URL]:
|
|
||||||
return self._link
|
|
||||||
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# 5xx Server Error
|
|
||||||
############################################################
|
|
||||||
# Response status codes beginning with the digit "5" indicate cases in
|
|
||||||
# which the server is aware that it has erred or is incapable of
|
|
||||||
# performing the request. Except when responding to a HEAD request, the
|
|
||||||
# server SHOULD include an entity containing an explanation of the error
|
|
||||||
# situation, and whether it is a temporary or permanent condition. User
|
|
||||||
# agents SHOULD display any included entity to the user. These response
|
|
||||||
# codes are applicable to any request method.
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPServerError(HTTPError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPInternalServerError(HTTPServerError):
|
|
||||||
status_code = 500
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNotImplemented(HTTPServerError):
|
|
||||||
status_code = 501
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPBadGateway(HTTPServerError):
|
|
||||||
status_code = 502
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPServiceUnavailable(HTTPServerError):
|
|
||||||
status_code = 503
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPGatewayTimeout(HTTPServerError):
|
|
||||||
status_code = 504
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPVersionNotSupported(HTTPServerError):
|
|
||||||
status_code = 505
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPVariantAlsoNegotiates(HTTPServerError):
|
|
||||||
status_code = 506
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPInsufficientStorage(HTTPServerError):
|
|
||||||
status_code = 507
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNotExtended(HTTPServerError):
|
|
||||||
status_code = 510
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPNetworkAuthenticationRequired(HTTPServerError):
|
|
||||||
status_code = 511
|
|
||||||
@@ -1,285 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import mimetypes
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
from typing import ( # noqa
|
|
||||||
IO,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Final,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .helpers import ETAG_ANY, ETag, must_be_empty_body
|
|
||||||
from .typedefs import LooseHeaders, PathLike
|
|
||||||
from .web_exceptions import (
|
|
||||||
HTTPNotModified,
|
|
||||||
HTTPPartialContent,
|
|
||||||
HTTPPreconditionFailed,
|
|
||||||
HTTPRequestRangeNotSatisfiable,
|
|
||||||
)
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
|
|
||||||
__all__ = ("FileResponse",)
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_request import BaseRequest
|
|
||||||
|
|
||||||
|
|
||||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
|
||||||
|
|
||||||
|
|
||||||
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
|
||||||
|
|
||||||
|
|
||||||
class FileResponse(StreamResponse):
|
|
||||||
"""A response object can be used to send files."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
path: PathLike,
|
|
||||||
chunk_size: int = 256 * 1024,
|
|
||||||
status: int = 200,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(status=status, reason=reason, headers=headers)
|
|
||||||
|
|
||||||
self._path = pathlib.Path(path)
|
|
||||||
self._chunk_size = chunk_size
|
|
||||||
|
|
||||||
async def _sendfile_fallback(
|
|
||||||
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
|
||||||
) -> AbstractStreamWriter:
|
|
||||||
# To keep memory usage low,fobj is transferred in chunks
|
|
||||||
# controlled by the constructor's chunk_size argument.
|
|
||||||
|
|
||||||
chunk_size = self._chunk_size
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
await loop.run_in_executor(None, fobj.seek, offset)
|
|
||||||
|
|
||||||
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
|
|
||||||
while chunk:
|
|
||||||
await writer.write(chunk)
|
|
||||||
count = count - chunk_size
|
|
||||||
if count <= 0:
|
|
||||||
break
|
|
||||||
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
|
||||||
|
|
||||||
await writer.drain()
|
|
||||||
return writer
|
|
||||||
|
|
||||||
async def _sendfile(
|
|
||||||
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
|
||||||
) -> AbstractStreamWriter:
|
|
||||||
writer = await super().prepare(request)
|
|
||||||
assert writer is not None
|
|
||||||
|
|
||||||
if NOSENDFILE or self.compression:
|
|
||||||
return await self._sendfile_fallback(writer, fobj, offset, count)
|
|
||||||
|
|
||||||
loop = request._loop
|
|
||||||
transport = request.transport
|
|
||||||
assert transport is not None
|
|
||||||
|
|
||||||
try:
|
|
||||||
await loop.sendfile(transport, fobj, offset, count)
|
|
||||||
except NotImplementedError:
|
|
||||||
return await self._sendfile_fallback(writer, fobj, offset, count)
|
|
||||||
|
|
||||||
await super().write_eof()
|
|
||||||
return writer
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
|
|
||||||
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
|
||||||
return True
|
|
||||||
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
|
|
||||||
|
|
||||||
async def _not_modified(
|
|
||||||
self, request: "BaseRequest", etag_value: str, last_modified: float
|
|
||||||
) -> Optional[AbstractStreamWriter]:
|
|
||||||
self.set_status(HTTPNotModified.status_code)
|
|
||||||
self._length_check = False
|
|
||||||
self.etag = etag_value # type: ignore[assignment]
|
|
||||||
self.last_modified = last_modified # type: ignore[assignment]
|
|
||||||
# Delete any Content-Length headers provided by user. HTTP 304
|
|
||||||
# should always have empty response body
|
|
||||||
return await super().prepare(request)
|
|
||||||
|
|
||||||
async def _precondition_failed(
|
|
||||||
self, request: "BaseRequest"
|
|
||||||
) -> Optional[AbstractStreamWriter]:
|
|
||||||
self.set_status(HTTPPreconditionFailed.status_code)
|
|
||||||
self.content_length = 0
|
|
||||||
return await super().prepare(request)
|
|
||||||
|
|
||||||
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
|
||||||
filepath = self._path
|
|
||||||
|
|
||||||
gzip = False
|
|
||||||
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
|
|
||||||
gzip_path = filepath.with_name(filepath.name + ".gz")
|
|
||||||
|
|
||||||
if gzip_path.is_file():
|
|
||||||
filepath = gzip_path
|
|
||||||
gzip = True
|
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
|
|
||||||
|
|
||||||
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
|
||||||
last_modified = st.st_mtime
|
|
||||||
|
|
||||||
# https://tools.ietf.org/html/rfc7232#section-6
|
|
||||||
ifmatch = request.if_match
|
|
||||||
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
|
|
||||||
return await self._precondition_failed(request)
|
|
||||||
|
|
||||||
unmodsince = request.if_unmodified_since
|
|
||||||
if (
|
|
||||||
unmodsince is not None
|
|
||||||
and ifmatch is None
|
|
||||||
and st.st_mtime > unmodsince.timestamp()
|
|
||||||
):
|
|
||||||
return await self._precondition_failed(request)
|
|
||||||
|
|
||||||
ifnonematch = request.if_none_match
|
|
||||||
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
|
|
||||||
return await self._not_modified(request, etag_value, last_modified)
|
|
||||||
|
|
||||||
modsince = request.if_modified_since
|
|
||||||
if (
|
|
||||||
modsince is not None
|
|
||||||
and ifnonematch is None
|
|
||||||
and st.st_mtime <= modsince.timestamp()
|
|
||||||
):
|
|
||||||
return await self._not_modified(request, etag_value, last_modified)
|
|
||||||
|
|
||||||
if hdrs.CONTENT_TYPE not in self.headers:
|
|
||||||
ct, encoding = mimetypes.guess_type(str(filepath))
|
|
||||||
if not ct:
|
|
||||||
ct = "application/octet-stream"
|
|
||||||
should_set_ct = True
|
|
||||||
else:
|
|
||||||
encoding = "gzip" if gzip else None
|
|
||||||
should_set_ct = False
|
|
||||||
|
|
||||||
status = self._status
|
|
||||||
file_size = st.st_size
|
|
||||||
count = file_size
|
|
||||||
|
|
||||||
start = None
|
|
||||||
|
|
||||||
ifrange = request.if_range
|
|
||||||
if ifrange is None or st.st_mtime <= ifrange.timestamp():
|
|
||||||
# If-Range header check:
|
|
||||||
# condition = cached date >= last modification date
|
|
||||||
# return 206 if True else 200.
|
|
||||||
# if False:
|
|
||||||
# Range header would not be processed, return 200
|
|
||||||
# if True but Range header missing
|
|
||||||
# return 200
|
|
||||||
try:
|
|
||||||
rng = request.http_range
|
|
||||||
start = rng.start
|
|
||||||
end = rng.stop
|
|
||||||
except ValueError:
|
|
||||||
# https://tools.ietf.org/html/rfc7233:
|
|
||||||
# A server generating a 416 (Range Not Satisfiable) response to
|
|
||||||
# a byte-range request SHOULD send a Content-Range header field
|
|
||||||
# with an unsatisfied-range value.
|
|
||||||
# The complete-length in a 416 response indicates the current
|
|
||||||
# length of the selected representation.
|
|
||||||
#
|
|
||||||
# Will do the same below. Many servers ignore this and do not
|
|
||||||
# send a Content-Range header with HTTP 416
|
|
||||||
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
|
||||||
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
|
||||||
return await super().prepare(request)
|
|
||||||
|
|
||||||
# If a range request has been made, convert start, end slice
|
|
||||||
# notation into file pointer offset and count
|
|
||||||
if start is not None or end is not None:
|
|
||||||
if start < 0 and end is None: # return tail of file
|
|
||||||
start += file_size
|
|
||||||
if start < 0:
|
|
||||||
# if Range:bytes=-1000 in request header but file size
|
|
||||||
# is only 200, there would be trouble without this
|
|
||||||
start = 0
|
|
||||||
count = file_size - start
|
|
||||||
else:
|
|
||||||
# rfc7233:If the last-byte-pos value is
|
|
||||||
# absent, or if the value is greater than or equal to
|
|
||||||
# the current length of the representation data,
|
|
||||||
# the byte range is interpreted as the remainder
|
|
||||||
# of the representation (i.e., the server replaces the
|
|
||||||
# value of last-byte-pos with a value that is one less than
|
|
||||||
# the current length of the selected representation).
|
|
||||||
count = (
|
|
||||||
min(end if end is not None else file_size, file_size) - start
|
|
||||||
)
|
|
||||||
|
|
||||||
if start >= file_size:
|
|
||||||
# HTTP 416 should be returned in this case.
|
|
||||||
#
|
|
||||||
# According to https://tools.ietf.org/html/rfc7233:
|
|
||||||
# If a valid byte-range-set includes at least one
|
|
||||||
# byte-range-spec with a first-byte-pos that is less than
|
|
||||||
# the current length of the representation, or at least one
|
|
||||||
# suffix-byte-range-spec with a non-zero suffix-length,
|
|
||||||
# then the byte-range-set is satisfiable. Otherwise, the
|
|
||||||
# byte-range-set is unsatisfiable.
|
|
||||||
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
|
||||||
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
|
||||||
return await super().prepare(request)
|
|
||||||
|
|
||||||
status = HTTPPartialContent.status_code
|
|
||||||
# Even though you are sending the whole file, you should still
|
|
||||||
# return a HTTP 206 for a Range request.
|
|
||||||
self.set_status(status)
|
|
||||||
|
|
||||||
if should_set_ct:
|
|
||||||
self.content_type = ct # type: ignore[assignment]
|
|
||||||
if encoding:
|
|
||||||
self.headers[hdrs.CONTENT_ENCODING] = encoding
|
|
||||||
if gzip:
|
|
||||||
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
|
||||||
|
|
||||||
self.etag = etag_value # type: ignore[assignment]
|
|
||||||
self.last_modified = st.st_mtime # type: ignore[assignment]
|
|
||||||
self.content_length = count
|
|
||||||
|
|
||||||
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
|
|
||||||
|
|
||||||
real_start = cast(int, start)
|
|
||||||
|
|
||||||
if status == HTTPPartialContent.status_code:
|
|
||||||
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
|
||||||
real_start, real_start + count - 1, file_size
|
|
||||||
)
|
|
||||||
|
|
||||||
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
|
||||||
if count == 0 or must_be_empty_body(request.method, self.status):
|
|
||||||
return await super().prepare(request)
|
|
||||||
|
|
||||||
fobj = await loop.run_in_executor(None, filepath.open, "rb")
|
|
||||||
if start: # be aware that start could be None or int=0 here.
|
|
||||||
offset = start
|
|
||||||
else:
|
|
||||||
offset = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await self._sendfile(request, fobj, offset, count)
|
|
||||||
finally:
|
|
||||||
await asyncio.shield(loop.run_in_executor(None, fobj.close))
|
|
||||||
@@ -1,213 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import time as time_mod
|
|
||||||
from collections import namedtuple
|
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
|
|
||||||
|
|
||||||
from .abc import AbstractAccessLogger
|
|
||||||
from .web_request import BaseRequest
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
|
|
||||||
KeyMethod = namedtuple("KeyMethod", "key method")
|
|
||||||
|
|
||||||
|
|
||||||
class AccessLogger(AbstractAccessLogger):
|
|
||||||
"""Helper object to log access.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
log = logging.getLogger("spam")
|
|
||||||
log_format = "%a %{User-Agent}i"
|
|
||||||
access_logger = AccessLogger(log, log_format)
|
|
||||||
access_logger.log(request, response, time)
|
|
||||||
|
|
||||||
Format:
|
|
||||||
%% The percent sign
|
|
||||||
%a Remote IP-address (IP-address of proxy if using reverse proxy)
|
|
||||||
%t Time when the request was started to process
|
|
||||||
%P The process ID of the child that serviced the request
|
|
||||||
%r First line of request
|
|
||||||
%s Response status code
|
|
||||||
%b Size of response in bytes, including HTTP headers
|
|
||||||
%T Time taken to serve the request, in seconds
|
|
||||||
%Tf Time taken to serve the request, in seconds with floating fraction
|
|
||||||
in .06f format
|
|
||||||
%D Time taken to serve the request, in microseconds
|
|
||||||
%{FOO}i request.headers['FOO']
|
|
||||||
%{FOO}o response.headers['FOO']
|
|
||||||
%{FOO}e os.environ['FOO']
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
LOG_FORMAT_MAP = {
|
|
||||||
"a": "remote_address",
|
|
||||||
"t": "request_start_time",
|
|
||||||
"P": "process_id",
|
|
||||||
"r": "first_request_line",
|
|
||||||
"s": "response_status",
|
|
||||||
"b": "response_size",
|
|
||||||
"T": "request_time",
|
|
||||||
"Tf": "request_time_frac",
|
|
||||||
"D": "request_time_micro",
|
|
||||||
"i": "request_header",
|
|
||||||
"o": "response_header",
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
|
|
||||||
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
|
|
||||||
CLEANUP_RE = re.compile(r"(%[^s])")
|
|
||||||
_FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
|
|
||||||
|
|
||||||
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
|
|
||||||
"""Initialise the logger.
|
|
||||||
|
|
||||||
logger is a logger object to be used for logging.
|
|
||||||
log_format is a string with apache compatible log format description.
|
|
||||||
|
|
||||||
"""
|
|
||||||
super().__init__(logger, log_format=log_format)
|
|
||||||
|
|
||||||
_compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
|
|
||||||
if not _compiled_format:
|
|
||||||
_compiled_format = self.compile_format(log_format)
|
|
||||||
AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
|
|
||||||
|
|
||||||
self._log_format, self._methods = _compiled_format
|
|
||||||
|
|
||||||
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
|
|
||||||
"""Translate log_format into form usable by modulo formatting
|
|
||||||
|
|
||||||
All known atoms will be replaced with %s
|
|
||||||
Also methods for formatting of those atoms will be added to
|
|
||||||
_methods in appropriate order
|
|
||||||
|
|
||||||
For example we have log_format = "%a %t"
|
|
||||||
This format will be translated to "%s %s"
|
|
||||||
Also contents of _methods will be
|
|
||||||
[self._format_a, self._format_t]
|
|
||||||
These method will be called and results will be passed
|
|
||||||
to translated string format.
|
|
||||||
|
|
||||||
Each _format_* method receive 'args' which is list of arguments
|
|
||||||
given to self.log
|
|
||||||
|
|
||||||
Exceptions are _format_e, _format_i and _format_o methods which
|
|
||||||
also receive key name (by functools.partial)
|
|
||||||
|
|
||||||
"""
|
|
||||||
# list of (key, method) tuples, we don't use an OrderedDict as users
|
|
||||||
# can repeat the same key more than once
|
|
||||||
methods = list()
|
|
||||||
|
|
||||||
for atom in self.FORMAT_RE.findall(log_format):
|
|
||||||
if atom[1] == "":
|
|
||||||
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
|
|
||||||
m = getattr(AccessLogger, "_format_%s" % atom[0])
|
|
||||||
key_method = KeyMethod(format_key1, m)
|
|
||||||
else:
|
|
||||||
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
|
|
||||||
m = getattr(AccessLogger, "_format_%s" % atom[2])
|
|
||||||
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
|
|
||||||
|
|
||||||
methods.append(key_method)
|
|
||||||
|
|
||||||
log_format = self.FORMAT_RE.sub(r"%s", log_format)
|
|
||||||
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
|
|
||||||
return log_format, methods
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_i(
|
|
||||||
key: str, request: BaseRequest, response: StreamResponse, time: float
|
|
||||||
) -> str:
|
|
||||||
if request is None:
|
|
||||||
return "(no headers)"
|
|
||||||
|
|
||||||
# suboptimal, make istr(key) once
|
|
||||||
return request.headers.get(key, "-")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_o(
|
|
||||||
key: str, request: BaseRequest, response: StreamResponse, time: float
|
|
||||||
) -> str:
|
|
||||||
# suboptimal, make istr(key) once
|
|
||||||
return response.headers.get(key, "-")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
if request is None:
|
|
||||||
return "-"
|
|
||||||
ip = request.remote
|
|
||||||
return ip if ip is not None else "-"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone))
|
|
||||||
now = datetime.datetime.now(tz)
|
|
||||||
start_time = now - datetime.timedelta(seconds=time)
|
|
||||||
return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
return "<%s>" % os.getpid()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
if request is None:
|
|
||||||
return "-"
|
|
||||||
return "{} {} HTTP/{}.{}".format(
|
|
||||||
request.method,
|
|
||||||
request.path_qs,
|
|
||||||
request.version.major,
|
|
||||||
request.version.minor,
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
|
||||||
return response.status
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
|
||||||
return response.body_length
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
return str(round(time))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
return "%06f" % time
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
|
||||||
return str(round(time * 1000000))
|
|
||||||
|
|
||||||
def _format_line(
|
|
||||||
self, request: BaseRequest, response: StreamResponse, time: float
|
|
||||||
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
|
|
||||||
return [(key, method(request, response, time)) for key, method in self._methods]
|
|
||||||
|
|
||||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
|
||||||
if not self.logger.isEnabledFor(logging.INFO):
|
|
||||||
# Avoid formatting the log line if it will not be emitted.
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
fmt_info = self._format_line(request, response, time)
|
|
||||||
|
|
||||||
values = list()
|
|
||||||
extra = dict()
|
|
||||||
for key, value in fmt_info:
|
|
||||||
values.append(value)
|
|
||||||
|
|
||||||
if key.__class__ is str:
|
|
||||||
extra[key] = value
|
|
||||||
else:
|
|
||||||
k1, k2 = key # type: ignore[misc]
|
|
||||||
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
|
|
||||||
dct[k2] = value # type: ignore[index,has-type]
|
|
||||||
extra[k1] = dct # type: ignore[has-type,assignment]
|
|
||||||
|
|
||||||
self.logger.info(self._log_format % tuple(values), extra=extra)
|
|
||||||
except Exception:
|
|
||||||
self.logger.exception("Error in logging")
|
|
||||||
@@ -1,116 +0,0 @@
|
|||||||
import re
|
|
||||||
from typing import TYPE_CHECKING, Tuple, Type, TypeVar
|
|
||||||
|
|
||||||
from .typedefs import Handler, Middleware
|
|
||||||
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
|
|
||||||
from .web_request import Request
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
from .web_urldispatcher import SystemRoute
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"middleware",
|
|
||||||
"normalize_path_middleware",
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_app import Application
|
|
||||||
|
|
||||||
_Func = TypeVar("_Func")
|
|
||||||
|
|
||||||
|
|
||||||
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
|
||||||
alt_request = request.clone(rel_url=path)
|
|
||||||
|
|
||||||
match_info = await request.app.router.resolve(alt_request)
|
|
||||||
alt_request._match_info = match_info
|
|
||||||
|
|
||||||
if match_info.http_exception is None:
|
|
||||||
return True, alt_request
|
|
||||||
|
|
||||||
return False, request
|
|
||||||
|
|
||||||
|
|
||||||
def middleware(f: _Func) -> _Func:
|
|
||||||
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
|
||||||
return f
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_path_middleware(
|
|
||||||
*,
|
|
||||||
append_slash: bool = True,
|
|
||||||
remove_slash: bool = False,
|
|
||||||
merge_slashes: bool = True,
|
|
||||||
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
|
|
||||||
) -> Middleware:
|
|
||||||
"""Factory for producing a middleware that normalizes the path of a request.
|
|
||||||
|
|
||||||
Normalizing means:
|
|
||||||
- Add or remove a trailing slash to the path.
|
|
||||||
- Double slashes are replaced by one.
|
|
||||||
|
|
||||||
The middleware returns as soon as it finds a path that resolves
|
|
||||||
correctly. The order if both merge and append/remove are enabled is
|
|
||||||
1) merge slashes
|
|
||||||
2) append/remove slash
|
|
||||||
3) both merge slashes and append/remove slash.
|
|
||||||
If the path resolves with at least one of those conditions, it will
|
|
||||||
redirect to the new path.
|
|
||||||
|
|
||||||
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
|
||||||
are `True` the factory will raise an assertion error
|
|
||||||
|
|
||||||
If `append_slash` is `True` the middleware will append a slash when
|
|
||||||
needed. If a resource is defined with trailing slash and the request
|
|
||||||
comes without it, it will append it automatically.
|
|
||||||
|
|
||||||
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
|
||||||
the middleware will remove trailing slashes and redirect if the resource
|
|
||||||
is defined
|
|
||||||
|
|
||||||
If merge_slashes is True, merge multiple consecutive slashes in the
|
|
||||||
path into one.
|
|
||||||
"""
|
|
||||||
correct_configuration = not (append_slash and remove_slash)
|
|
||||||
assert correct_configuration, "Cannot both remove and append slash"
|
|
||||||
|
|
||||||
@middleware
|
|
||||||
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
|
||||||
if isinstance(request.match_info.route, SystemRoute):
|
|
||||||
paths_to_check = []
|
|
||||||
if "?" in request.raw_path:
|
|
||||||
path, query = request.raw_path.split("?", 1)
|
|
||||||
query = "?" + query
|
|
||||||
else:
|
|
||||||
query = ""
|
|
||||||
path = request.raw_path
|
|
||||||
|
|
||||||
if merge_slashes:
|
|
||||||
paths_to_check.append(re.sub("//+", "/", path))
|
|
||||||
if append_slash and not request.path.endswith("/"):
|
|
||||||
paths_to_check.append(path + "/")
|
|
||||||
if remove_slash and request.path.endswith("/"):
|
|
||||||
paths_to_check.append(path[:-1])
|
|
||||||
if merge_slashes and append_slash:
|
|
||||||
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
|
||||||
if merge_slashes and remove_slash:
|
|
||||||
merged_slashes = re.sub("//+", "/", path)
|
|
||||||
paths_to_check.append(merged_slashes[:-1])
|
|
||||||
|
|
||||||
for path in paths_to_check:
|
|
||||||
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
|
||||||
resolves, request = await _check_request_resolves(request, path)
|
|
||||||
if resolves:
|
|
||||||
raise redirect_class(request.raw_path + query)
|
|
||||||
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
return impl
|
|
||||||
|
|
||||||
|
|
||||||
def _fix_request_current_app(app: "Application") -> Middleware:
|
|
||||||
@middleware
|
|
||||||
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
|
||||||
with request.match_info.set_current_app(app):
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
return impl
|
|
||||||
@@ -1,698 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import asyncio.streams
|
|
||||||
import traceback
|
|
||||||
import warnings
|
|
||||||
from collections import deque
|
|
||||||
from contextlib import suppress
|
|
||||||
from html import escape as html_escape
|
|
||||||
from http import HTTPStatus
|
|
||||||
from logging import Logger
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Deque,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
import attr
|
|
||||||
import yarl
|
|
||||||
|
|
||||||
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .helpers import ceil_timeout
|
|
||||||
from .http import (
|
|
||||||
HttpProcessingError,
|
|
||||||
HttpRequestParser,
|
|
||||||
HttpVersion10,
|
|
||||||
RawRequestMessage,
|
|
||||||
StreamWriter,
|
|
||||||
)
|
|
||||||
from .log import access_logger, server_logger
|
|
||||||
from .streams import EMPTY_PAYLOAD, StreamReader
|
|
||||||
from .tcp_helpers import tcp_keepalive
|
|
||||||
from .web_exceptions import HTTPException
|
|
||||||
from .web_log import AccessLogger
|
|
||||||
from .web_request import BaseRequest
|
|
||||||
from .web_response import Response, StreamResponse
|
|
||||||
|
|
||||||
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_server import Server
|
|
||||||
|
|
||||||
|
|
||||||
_RequestFactory = Callable[
|
|
||||||
[
|
|
||||||
RawRequestMessage,
|
|
||||||
StreamReader,
|
|
||||||
"RequestHandler",
|
|
||||||
AbstractStreamWriter,
|
|
||||||
"asyncio.Task[None]",
|
|
||||||
],
|
|
||||||
BaseRequest,
|
|
||||||
]
|
|
||||||
|
|
||||||
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
|
||||||
|
|
||||||
ERROR = RawRequestMessage(
|
|
||||||
"UNKNOWN",
|
|
||||||
"/",
|
|
||||||
HttpVersion10,
|
|
||||||
{}, # type: ignore[arg-type]
|
|
||||||
{}, # type: ignore[arg-type]
|
|
||||||
True,
|
|
||||||
None,
|
|
||||||
False,
|
|
||||||
False,
|
|
||||||
yarl.URL("/"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestPayloadError(Exception):
|
|
||||||
"""Payload parsing error."""
|
|
||||||
|
|
||||||
|
|
||||||
class PayloadAccessError(Exception):
|
|
||||||
"""Payload was accessed after response was sent."""
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class _ErrInfo:
|
|
||||||
status: int
|
|
||||||
exc: BaseException
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
|
||||||
|
|
||||||
|
|
||||||
class RequestHandler(BaseProtocol):
|
|
||||||
"""HTTP protocol implementation.
|
|
||||||
|
|
||||||
RequestHandler handles incoming HTTP request. It reads request line,
|
|
||||||
request headers and request payload and calls handle_request() method.
|
|
||||||
By default it always returns with 404 response.
|
|
||||||
|
|
||||||
RequestHandler handles errors in incoming request, like bad
|
|
||||||
status line, bad headers or incomplete payload. If any error occurs,
|
|
||||||
connection gets closed.
|
|
||||||
|
|
||||||
keepalive_timeout -- number of seconds before closing
|
|
||||||
keep-alive connection
|
|
||||||
|
|
||||||
tcp_keepalive -- TCP keep-alive is on, default is on
|
|
||||||
|
|
||||||
debug -- enable debug mode
|
|
||||||
|
|
||||||
logger -- custom logger object
|
|
||||||
|
|
||||||
access_log_class -- custom class for access_logger
|
|
||||||
|
|
||||||
access_log -- custom logging object
|
|
||||||
|
|
||||||
access_log_format -- access log format string
|
|
||||||
|
|
||||||
loop -- Optional event loop
|
|
||||||
|
|
||||||
max_line_size -- Optional maximum header line size
|
|
||||||
|
|
||||||
max_field_size -- Optional maximum header field size
|
|
||||||
|
|
||||||
max_headers -- Optional maximum header size
|
|
||||||
|
|
||||||
timeout_ceil_threshold -- Optional value to specify
|
|
||||||
threshold to ceil() timeout
|
|
||||||
values
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
KEEPALIVE_RESCHEDULE_DELAY = 1
|
|
||||||
|
|
||||||
__slots__ = (
|
|
||||||
"_request_count",
|
|
||||||
"_keepalive",
|
|
||||||
"_manager",
|
|
||||||
"_request_handler",
|
|
||||||
"_request_factory",
|
|
||||||
"_tcp_keepalive",
|
|
||||||
"_keepalive_time",
|
|
||||||
"_keepalive_handle",
|
|
||||||
"_keepalive_timeout",
|
|
||||||
"_lingering_time",
|
|
||||||
"_messages",
|
|
||||||
"_message_tail",
|
|
||||||
"_waiter",
|
|
||||||
"_task_handler",
|
|
||||||
"_upgrade",
|
|
||||||
"_payload_parser",
|
|
||||||
"_request_parser",
|
|
||||||
"_reading_paused",
|
|
||||||
"logger",
|
|
||||||
"debug",
|
|
||||||
"access_log",
|
|
||||||
"access_logger",
|
|
||||||
"_close",
|
|
||||||
"_force_close",
|
|
||||||
"_current_request",
|
|
||||||
"_timeout_ceil_threshold",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
manager: "Server",
|
|
||||||
*,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
|
|
||||||
tcp_keepalive: bool = True,
|
|
||||||
logger: Logger = server_logger,
|
|
||||||
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
|
||||||
access_log: Logger = access_logger,
|
|
||||||
access_log_format: str = AccessLogger.LOG_FORMAT,
|
|
||||||
debug: bool = False,
|
|
||||||
max_line_size: int = 8190,
|
|
||||||
max_headers: int = 32768,
|
|
||||||
max_field_size: int = 8190,
|
|
||||||
lingering_time: float = 10.0,
|
|
||||||
read_bufsize: int = 2**16,
|
|
||||||
auto_decompress: bool = True,
|
|
||||||
timeout_ceil_threshold: float = 5,
|
|
||||||
):
|
|
||||||
super().__init__(loop)
|
|
||||||
|
|
||||||
self._request_count = 0
|
|
||||||
self._keepalive = False
|
|
||||||
self._current_request: Optional[BaseRequest] = None
|
|
||||||
self._manager: Optional[Server] = manager
|
|
||||||
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
|
||||||
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
|
||||||
|
|
||||||
self._tcp_keepalive = tcp_keepalive
|
|
||||||
# placeholder to be replaced on keepalive timeout setup
|
|
||||||
self._keepalive_time = 0.0
|
|
||||||
self._keepalive_handle: Optional[asyncio.Handle] = None
|
|
||||||
self._keepalive_timeout = keepalive_timeout
|
|
||||||
self._lingering_time = float(lingering_time)
|
|
||||||
|
|
||||||
self._messages: Deque[_MsgType] = deque()
|
|
||||||
self._message_tail = b""
|
|
||||||
|
|
||||||
self._waiter: Optional[asyncio.Future[None]] = None
|
|
||||||
self._task_handler: Optional[asyncio.Task[None]] = None
|
|
||||||
|
|
||||||
self._upgrade = False
|
|
||||||
self._payload_parser: Any = None
|
|
||||||
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
|
||||||
self,
|
|
||||||
loop,
|
|
||||||
read_bufsize,
|
|
||||||
max_line_size=max_line_size,
|
|
||||||
max_field_size=max_field_size,
|
|
||||||
max_headers=max_headers,
|
|
||||||
payload_exception=RequestPayloadError,
|
|
||||||
auto_decompress=auto_decompress,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._timeout_ceil_threshold: float = 5
|
|
||||||
try:
|
|
||||||
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.logger = logger
|
|
||||||
self.debug = debug
|
|
||||||
self.access_log = access_log
|
|
||||||
if access_log:
|
|
||||||
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
|
||||||
access_log, access_log_format
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.access_logger = None
|
|
||||||
|
|
||||||
self._close = False
|
|
||||||
self._force_close = False
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return "<{} {}>".format(
|
|
||||||
self.__class__.__name__,
|
|
||||||
"connected" if self.transport is not None else "disconnected",
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def keepalive_timeout(self) -> float:
|
|
||||||
return self._keepalive_timeout
|
|
||||||
|
|
||||||
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
|
||||||
"""Do worker process exit preparations.
|
|
||||||
|
|
||||||
We need to clean up everything and stop accepting requests.
|
|
||||||
It is especially important for keep-alive connections.
|
|
||||||
"""
|
|
||||||
self._force_close = True
|
|
||||||
|
|
||||||
if self._keepalive_handle is not None:
|
|
||||||
self._keepalive_handle.cancel()
|
|
||||||
|
|
||||||
if self._waiter:
|
|
||||||
self._waiter.cancel()
|
|
||||||
|
|
||||||
# wait for handlers
|
|
||||||
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
|
||||||
async with ceil_timeout(timeout):
|
|
||||||
if self._current_request is not None:
|
|
||||||
self._current_request._cancel(asyncio.CancelledError())
|
|
||||||
|
|
||||||
if self._task_handler is not None and not self._task_handler.done():
|
|
||||||
await self._task_handler
|
|
||||||
|
|
||||||
# force-close non-idle handler
|
|
||||||
if self._task_handler is not None:
|
|
||||||
self._task_handler.cancel()
|
|
||||||
|
|
||||||
if self.transport is not None:
|
|
||||||
self.transport.close()
|
|
||||||
self.transport = None
|
|
||||||
|
|
||||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
|
||||||
super().connection_made(transport)
|
|
||||||
|
|
||||||
real_transport = cast(asyncio.Transport, transport)
|
|
||||||
if self._tcp_keepalive:
|
|
||||||
tcp_keepalive(real_transport)
|
|
||||||
|
|
||||||
self._task_handler = self._loop.create_task(self.start())
|
|
||||||
assert self._manager is not None
|
|
||||||
self._manager.connection_made(self, real_transport)
|
|
||||||
|
|
||||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
|
||||||
if self._manager is None:
|
|
||||||
return
|
|
||||||
self._manager.connection_lost(self, exc)
|
|
||||||
|
|
||||||
super().connection_lost(exc)
|
|
||||||
|
|
||||||
# Grab value before setting _manager to None.
|
|
||||||
handler_cancellation = self._manager.handler_cancellation
|
|
||||||
|
|
||||||
self._manager = None
|
|
||||||
self._force_close = True
|
|
||||||
self._request_factory = None
|
|
||||||
self._request_handler = None
|
|
||||||
self._request_parser = None
|
|
||||||
|
|
||||||
if self._keepalive_handle is not None:
|
|
||||||
self._keepalive_handle.cancel()
|
|
||||||
|
|
||||||
if self._current_request is not None:
|
|
||||||
if exc is None:
|
|
||||||
exc = ConnectionResetError("Connection lost")
|
|
||||||
self._current_request._cancel(exc)
|
|
||||||
|
|
||||||
if self._waiter is not None:
|
|
||||||
self._waiter.cancel()
|
|
||||||
|
|
||||||
if handler_cancellation and self._task_handler is not None:
|
|
||||||
self._task_handler.cancel()
|
|
||||||
|
|
||||||
self._task_handler = None
|
|
||||||
|
|
||||||
if self._payload_parser is not None:
|
|
||||||
self._payload_parser.feed_eof()
|
|
||||||
self._payload_parser = None
|
|
||||||
|
|
||||||
def set_parser(self, parser: Any) -> None:
|
|
||||||
# Actual type is WebReader
|
|
||||||
assert self._payload_parser is None
|
|
||||||
|
|
||||||
self._payload_parser = parser
|
|
||||||
|
|
||||||
if self._message_tail:
|
|
||||||
self._payload_parser.feed_data(self._message_tail)
|
|
||||||
self._message_tail = b""
|
|
||||||
|
|
||||||
def eof_received(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def data_received(self, data: bytes) -> None:
|
|
||||||
if self._force_close or self._close:
|
|
||||||
return
|
|
||||||
# parse http messages
|
|
||||||
messages: Sequence[_MsgType]
|
|
||||||
if self._payload_parser is None and not self._upgrade:
|
|
||||||
assert self._request_parser is not None
|
|
||||||
try:
|
|
||||||
messages, upgraded, tail = self._request_parser.feed_data(data)
|
|
||||||
except HttpProcessingError as exc:
|
|
||||||
messages = [
|
|
||||||
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
|
||||||
]
|
|
||||||
upgraded = False
|
|
||||||
tail = b""
|
|
||||||
|
|
||||||
for msg, payload in messages or ():
|
|
||||||
self._request_count += 1
|
|
||||||
self._messages.append((msg, payload))
|
|
||||||
|
|
||||||
waiter = self._waiter
|
|
||||||
if messages and waiter is not None and not waiter.done():
|
|
||||||
# don't set result twice
|
|
||||||
waiter.set_result(None)
|
|
||||||
|
|
||||||
self._upgrade = upgraded
|
|
||||||
if upgraded and tail:
|
|
||||||
self._message_tail = tail
|
|
||||||
|
|
||||||
# no parser, just store
|
|
||||||
elif self._payload_parser is None and self._upgrade and data:
|
|
||||||
self._message_tail += data
|
|
||||||
|
|
||||||
# feed payload
|
|
||||||
elif data:
|
|
||||||
eof, tail = self._payload_parser.feed_data(data)
|
|
||||||
if eof:
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def keep_alive(self, val: bool) -> None:
|
|
||||||
"""Set keep-alive connection mode.
|
|
||||||
|
|
||||||
:param bool val: new state.
|
|
||||||
"""
|
|
||||||
self._keepalive = val
|
|
||||||
if self._keepalive_handle:
|
|
||||||
self._keepalive_handle.cancel()
|
|
||||||
self._keepalive_handle = None
|
|
||||||
|
|
||||||
def close(self) -> None:
|
|
||||||
"""Close connection.
|
|
||||||
|
|
||||||
Stop accepting new pipelining messages and close
|
|
||||||
connection when handlers done processing messages.
|
|
||||||
"""
|
|
||||||
self._close = True
|
|
||||||
if self._waiter:
|
|
||||||
self._waiter.cancel()
|
|
||||||
|
|
||||||
def force_close(self) -> None:
|
|
||||||
"""Forcefully close connection."""
|
|
||||||
self._force_close = True
|
|
||||||
if self._waiter:
|
|
||||||
self._waiter.cancel()
|
|
||||||
if self.transport is not None:
|
|
||||||
self.transport.close()
|
|
||||||
self.transport = None
|
|
||||||
|
|
||||||
def log_access(
|
|
||||||
self, request: BaseRequest, response: StreamResponse, time: float
|
|
||||||
) -> None:
|
|
||||||
if self.access_logger is not None:
|
|
||||||
self.access_logger.log(request, response, self._loop.time() - time)
|
|
||||||
|
|
||||||
def log_debug(self, *args: Any, **kw: Any) -> None:
|
|
||||||
if self.debug:
|
|
||||||
self.logger.debug(*args, **kw)
|
|
||||||
|
|
||||||
def log_exception(self, *args: Any, **kw: Any) -> None:
|
|
||||||
self.logger.exception(*args, **kw)
|
|
||||||
|
|
||||||
def _process_keepalive(self) -> None:
|
|
||||||
if self._force_close or not self._keepalive:
|
|
||||||
return
|
|
||||||
|
|
||||||
next = self._keepalive_time + self._keepalive_timeout
|
|
||||||
|
|
||||||
# handler in idle state
|
|
||||||
if self._waiter:
|
|
||||||
if self._loop.time() > next:
|
|
||||||
self.force_close()
|
|
||||||
return
|
|
||||||
|
|
||||||
# not all request handlers are done,
|
|
||||||
# reschedule itself to next second
|
|
||||||
self._keepalive_handle = self._loop.call_later(
|
|
||||||
self.KEEPALIVE_RESCHEDULE_DELAY,
|
|
||||||
self._process_keepalive,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _handle_request(
|
|
||||||
self,
|
|
||||||
request: BaseRequest,
|
|
||||||
start_time: float,
|
|
||||||
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
|
||||||
) -> Tuple[StreamResponse, bool]:
|
|
||||||
assert self._request_handler is not None
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
self._current_request = request
|
|
||||||
resp = await request_handler(request)
|
|
||||||
finally:
|
|
||||||
self._current_request = None
|
|
||||||
except HTTPException as exc:
|
|
||||||
resp = exc
|
|
||||||
reset = await self.finish_response(request, resp, start_time)
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
raise
|
|
||||||
except asyncio.TimeoutError as exc:
|
|
||||||
self.log_debug("Request handler timed out.", exc_info=exc)
|
|
||||||
resp = self.handle_error(request, 504)
|
|
||||||
reset = await self.finish_response(request, resp, start_time)
|
|
||||||
except Exception as exc:
|
|
||||||
resp = self.handle_error(request, 500, exc)
|
|
||||||
reset = await self.finish_response(request, resp, start_time)
|
|
||||||
else:
|
|
||||||
# Deprecation warning (See #2415)
|
|
||||||
if getattr(resp, "__http_exception__", False):
|
|
||||||
warnings.warn(
|
|
||||||
"returning HTTPException object is deprecated "
|
|
||||||
"(#2415) and will be removed, "
|
|
||||||
"please raise the exception instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
)
|
|
||||||
|
|
||||||
reset = await self.finish_response(request, resp, start_time)
|
|
||||||
|
|
||||||
return resp, reset
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
"""Process incoming request.
|
|
||||||
|
|
||||||
It reads request line, request headers and request payload, then
|
|
||||||
calls handle_request() method. Subclass has to override
|
|
||||||
handle_request(). start() handles various exceptions in request
|
|
||||||
or response handling. Connection is being closed always unless
|
|
||||||
keep_alive(True) specified.
|
|
||||||
"""
|
|
||||||
loop = self._loop
|
|
||||||
handler = self._task_handler
|
|
||||||
assert handler is not None
|
|
||||||
manager = self._manager
|
|
||||||
assert manager is not None
|
|
||||||
keepalive_timeout = self._keepalive_timeout
|
|
||||||
resp = None
|
|
||||||
assert self._request_factory is not None
|
|
||||||
assert self._request_handler is not None
|
|
||||||
|
|
||||||
while not self._force_close:
|
|
||||||
if not self._messages:
|
|
||||||
try:
|
|
||||||
# wait for next request
|
|
||||||
self._waiter = loop.create_future()
|
|
||||||
await self._waiter
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
break
|
|
||||||
finally:
|
|
||||||
self._waiter = None
|
|
||||||
|
|
||||||
message, payload = self._messages.popleft()
|
|
||||||
|
|
||||||
start = loop.time()
|
|
||||||
|
|
||||||
manager.requests_count += 1
|
|
||||||
writer = StreamWriter(self, loop)
|
|
||||||
if isinstance(message, _ErrInfo):
|
|
||||||
# make request_factory work
|
|
||||||
request_handler = self._make_error_handler(message)
|
|
||||||
message = ERROR
|
|
||||||
else:
|
|
||||||
request_handler = self._request_handler
|
|
||||||
|
|
||||||
request = self._request_factory(message, payload, self, writer, handler)
|
|
||||||
try:
|
|
||||||
# a new task is used for copy context vars (#3406)
|
|
||||||
task = self._loop.create_task(
|
|
||||||
self._handle_request(request, start, request_handler)
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
resp, reset = await task
|
|
||||||
except (asyncio.CancelledError, ConnectionError):
|
|
||||||
self.log_debug("Ignored premature client disconnection")
|
|
||||||
break
|
|
||||||
|
|
||||||
# Drop the processed task from asyncio.Task.all_tasks() early
|
|
||||||
del task
|
|
||||||
if reset:
|
|
||||||
self.log_debug("Ignored premature client disconnection 2")
|
|
||||||
break
|
|
||||||
|
|
||||||
# notify server about keep-alive
|
|
||||||
self._keepalive = bool(resp.keep_alive)
|
|
||||||
|
|
||||||
# check payload
|
|
||||||
if not payload.is_eof():
|
|
||||||
lingering_time = self._lingering_time
|
|
||||||
if not self._force_close and lingering_time:
|
|
||||||
self.log_debug(
|
|
||||||
"Start lingering close timer for %s sec.", lingering_time
|
|
||||||
)
|
|
||||||
|
|
||||||
now = loop.time()
|
|
||||||
end_t = now + lingering_time
|
|
||||||
|
|
||||||
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
|
|
||||||
while not payload.is_eof() and now < end_t:
|
|
||||||
async with ceil_timeout(end_t - now):
|
|
||||||
# read and ignore
|
|
||||||
await payload.readany()
|
|
||||||
now = loop.time()
|
|
||||||
|
|
||||||
# if payload still uncompleted
|
|
||||||
if not payload.is_eof() and not self._force_close:
|
|
||||||
self.log_debug("Uncompleted request.")
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
payload.set_exception(PayloadAccessError())
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self.log_debug("Ignored premature client disconnection ")
|
|
||||||
break
|
|
||||||
except RuntimeError as exc:
|
|
||||||
if self.debug:
|
|
||||||
self.log_exception("Unhandled runtime exception", exc_info=exc)
|
|
||||||
self.force_close()
|
|
||||||
except Exception as exc:
|
|
||||||
self.log_exception("Unhandled exception", exc_info=exc)
|
|
||||||
self.force_close()
|
|
||||||
finally:
|
|
||||||
if self.transport is None and resp is not None:
|
|
||||||
self.log_debug("Ignored premature client disconnection.")
|
|
||||||
elif not self._force_close:
|
|
||||||
if self._keepalive and not self._close:
|
|
||||||
# start keep-alive timer
|
|
||||||
if keepalive_timeout is not None:
|
|
||||||
now = self._loop.time()
|
|
||||||
self._keepalive_time = now
|
|
||||||
if self._keepalive_handle is None:
|
|
||||||
self._keepalive_handle = loop.call_at(
|
|
||||||
now + keepalive_timeout, self._process_keepalive
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
# remove handler, close transport if no handlers left
|
|
||||||
if not self._force_close:
|
|
||||||
self._task_handler = None
|
|
||||||
if self.transport is not None:
|
|
||||||
self.transport.close()
|
|
||||||
|
|
||||||
async def finish_response(
|
|
||||||
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
|
||||||
) -> bool:
|
|
||||||
"""Prepare the response and write_eof, then log access.
|
|
||||||
|
|
||||||
This has to
|
|
||||||
be called within the context of any exception so the access logger
|
|
||||||
can get exception information. Returns True if the client disconnects
|
|
||||||
prematurely.
|
|
||||||
"""
|
|
||||||
if self._request_parser is not None:
|
|
||||||
self._request_parser.set_upgraded(False)
|
|
||||||
self._upgrade = False
|
|
||||||
if self._message_tail:
|
|
||||||
self._request_parser.feed_data(self._message_tail)
|
|
||||||
self._message_tail = b""
|
|
||||||
try:
|
|
||||||
prepare_meth = resp.prepare
|
|
||||||
except AttributeError:
|
|
||||||
if resp is None:
|
|
||||||
raise RuntimeError("Missing return " "statement on request handler")
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Web-handler should return "
|
|
||||||
"a response instance, "
|
|
||||||
"got {!r}".format(resp)
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await prepare_meth(request)
|
|
||||||
await resp.write_eof()
|
|
||||||
except ConnectionError:
|
|
||||||
self.log_access(request, resp, start_time)
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
self.log_access(request, resp, start_time)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def handle_error(
|
|
||||||
self,
|
|
||||||
request: BaseRequest,
|
|
||||||
status: int = 500,
|
|
||||||
exc: Optional[BaseException] = None,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
) -> StreamResponse:
|
|
||||||
"""Handle errors.
|
|
||||||
|
|
||||||
Returns HTTP response with specific status code. Logs additional
|
|
||||||
information. It always closes current connection.
|
|
||||||
"""
|
|
||||||
self.log_exception("Error handling request", exc_info=exc)
|
|
||||||
|
|
||||||
# some data already got sent, connection is broken
|
|
||||||
if request.writer.output_size > 0:
|
|
||||||
raise ConnectionError(
|
|
||||||
"Response is sent already, cannot send another response "
|
|
||||||
"with the error message"
|
|
||||||
)
|
|
||||||
|
|
||||||
ct = "text/plain"
|
|
||||||
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
|
||||||
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
||||||
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
|
||||||
tb = None
|
|
||||||
if self.debug:
|
|
||||||
with suppress(Exception):
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
|
|
||||||
if "text/html" in request.headers.get("Accept", ""):
|
|
||||||
if tb:
|
|
||||||
tb = html_escape(tb)
|
|
||||||
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
|
||||||
message = (
|
|
||||||
"<html><head>"
|
|
||||||
"<title>{title}</title>"
|
|
||||||
"</head><body>\n<h1>{title}</h1>"
|
|
||||||
"\n{msg}\n</body></html>\n"
|
|
||||||
).format(title=title, msg=msg)
|
|
||||||
ct = "text/html"
|
|
||||||
else:
|
|
||||||
if tb:
|
|
||||||
msg = tb
|
|
||||||
message = title + "\n\n" + msg
|
|
||||||
|
|
||||||
resp = Response(status=status, text=message, content_type=ct)
|
|
||||||
resp.force_close()
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def _make_error_handler(
|
|
||||||
self, err_info: _ErrInfo
|
|
||||||
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
|
||||||
async def handler(request: BaseRequest) -> StreamResponse:
|
|
||||||
return self.handle_error(
|
|
||||||
request, err_info.status, err_info.exc, err_info.message
|
|
||||||
)
|
|
||||||
|
|
||||||
return handler
|
|
||||||
@@ -1,898 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import datetime
|
|
||||||
import io
|
|
||||||
import re
|
|
||||||
import socket
|
|
||||||
import string
|
|
||||||
import tempfile
|
|
||||||
import types
|
|
||||||
import warnings
|
|
||||||
from http.cookies import SimpleCookie
|
|
||||||
from types import MappingProxyType
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
Final,
|
|
||||||
Iterator,
|
|
||||||
Mapping,
|
|
||||||
MutableMapping,
|
|
||||||
Optional,
|
|
||||||
Pattern,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
from urllib.parse import parse_qsl
|
|
||||||
|
|
||||||
import attr
|
|
||||||
from multidict import (
|
|
||||||
CIMultiDict,
|
|
||||||
CIMultiDictProxy,
|
|
||||||
MultiDict,
|
|
||||||
MultiDictProxy,
|
|
||||||
MultiMapping,
|
|
||||||
)
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .helpers import (
|
|
||||||
_SENTINEL,
|
|
||||||
DEBUG,
|
|
||||||
ETAG_ANY,
|
|
||||||
LIST_QUOTED_ETAG_RE,
|
|
||||||
ChainMapProxy,
|
|
||||||
ETag,
|
|
||||||
HeadersMixin,
|
|
||||||
parse_http_date,
|
|
||||||
reify,
|
|
||||||
sentinel,
|
|
||||||
)
|
|
||||||
from .http_parser import RawRequestMessage
|
|
||||||
from .http_writer import HttpVersion
|
|
||||||
from .multipart import BodyPartReader, MultipartReader
|
|
||||||
from .streams import EmptyStreamReader, StreamReader
|
|
||||||
from .typedefs import (
|
|
||||||
DEFAULT_JSON_DECODER,
|
|
||||||
JSONDecoder,
|
|
||||||
LooseHeaders,
|
|
||||||
RawHeaders,
|
|
||||||
StrOrURL,
|
|
||||||
)
|
|
||||||
from .web_exceptions import HTTPRequestEntityTooLarge
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
|
|
||||||
__all__ = ("BaseRequest", "FileField", "Request")
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_app import Application
|
|
||||||
from .web_protocol import RequestHandler
|
|
||||||
from .web_urldispatcher import UrlMappingMatchInfo
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
|
||||||
class FileField:
|
|
||||||
name: str
|
|
||||||
filename: str
|
|
||||||
file: io.BufferedReader
|
|
||||||
content_type: str
|
|
||||||
headers: "CIMultiDictProxy[str]"
|
|
||||||
|
|
||||||
|
|
||||||
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
|
|
||||||
# '-' at the end to prevent interpretation as range in a char class
|
|
||||||
|
|
||||||
_TOKEN: Final[str] = rf"[{_TCHAR}]+"
|
|
||||||
|
|
||||||
_QDTEXT: Final[str] = r"[{}]".format(
|
|
||||||
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
|
|
||||||
)
|
|
||||||
# qdtext includes 0x5C to escape 0x5D ('\]')
|
|
||||||
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
|
|
||||||
|
|
||||||
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
|
|
||||||
|
|
||||||
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
|
|
||||||
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
|
|
||||||
)
|
|
||||||
|
|
||||||
_FORWARDED_PAIR: Final[
|
|
||||||
str
|
|
||||||
] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
|
|
||||||
token=_TOKEN, quoted_string=_QUOTED_STRING
|
|
||||||
)
|
|
||||||
|
|
||||||
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
|
|
||||||
# same pattern as _QUOTED_PAIR but contains a capture group
|
|
||||||
|
|
||||||
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# HTTP Request
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
|
|
||||||
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|
||||||
|
|
||||||
POST_METHODS = {
|
|
||||||
hdrs.METH_PATCH,
|
|
||||||
hdrs.METH_POST,
|
|
||||||
hdrs.METH_PUT,
|
|
||||||
hdrs.METH_TRACE,
|
|
||||||
hdrs.METH_DELETE,
|
|
||||||
}
|
|
||||||
|
|
||||||
ATTRS = HeadersMixin.ATTRS | frozenset(
|
|
||||||
[
|
|
||||||
"_message",
|
|
||||||
"_protocol",
|
|
||||||
"_payload_writer",
|
|
||||||
"_payload",
|
|
||||||
"_headers",
|
|
||||||
"_method",
|
|
||||||
"_version",
|
|
||||||
"_rel_url",
|
|
||||||
"_post",
|
|
||||||
"_read_bytes",
|
|
||||||
"_state",
|
|
||||||
"_cache",
|
|
||||||
"_task",
|
|
||||||
"_client_max_size",
|
|
||||||
"_loop",
|
|
||||||
"_transport_sslcontext",
|
|
||||||
"_transport_peername",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
message: RawRequestMessage,
|
|
||||||
payload: StreamReader,
|
|
||||||
protocol: "RequestHandler",
|
|
||||||
payload_writer: AbstractStreamWriter,
|
|
||||||
task: "asyncio.Task[None]",
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
*,
|
|
||||||
client_max_size: int = 1024**2,
|
|
||||||
state: Optional[Dict[str, Any]] = None,
|
|
||||||
scheme: Optional[str] = None,
|
|
||||||
host: Optional[str] = None,
|
|
||||||
remote: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
if state is None:
|
|
||||||
state = {}
|
|
||||||
self._message = message
|
|
||||||
self._protocol = protocol
|
|
||||||
self._payload_writer = payload_writer
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
self._headers = message.headers
|
|
||||||
self._method = message.method
|
|
||||||
self._version = message.version
|
|
||||||
self._cache: Dict[str, Any] = {}
|
|
||||||
url = message.url
|
|
||||||
if url.is_absolute():
|
|
||||||
# absolute URL is given,
|
|
||||||
# override auto-calculating url, host, and scheme
|
|
||||||
# all other properties should be good
|
|
||||||
self._cache["url"] = url
|
|
||||||
self._cache["host"] = url.host
|
|
||||||
self._cache["scheme"] = url.scheme
|
|
||||||
self._rel_url = url.relative()
|
|
||||||
else:
|
|
||||||
self._rel_url = message.url
|
|
||||||
self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
|
|
||||||
self._read_bytes: Optional[bytes] = None
|
|
||||||
|
|
||||||
self._state = state
|
|
||||||
self._task = task
|
|
||||||
self._client_max_size = client_max_size
|
|
||||||
self._loop = loop
|
|
||||||
|
|
||||||
transport = self._protocol.transport
|
|
||||||
assert transport is not None
|
|
||||||
self._transport_sslcontext = transport.get_extra_info("sslcontext")
|
|
||||||
self._transport_peername = transport.get_extra_info("peername")
|
|
||||||
|
|
||||||
if scheme is not None:
|
|
||||||
self._cache["scheme"] = scheme
|
|
||||||
if host is not None:
|
|
||||||
self._cache["host"] = host
|
|
||||||
if remote is not None:
|
|
||||||
self._cache["remote"] = remote
|
|
||||||
|
|
||||||
def clone(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
method: Union[str, _SENTINEL] = sentinel,
|
|
||||||
rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
|
|
||||||
headers: Union[LooseHeaders, _SENTINEL] = sentinel,
|
|
||||||
scheme: Union[str, _SENTINEL] = sentinel,
|
|
||||||
host: Union[str, _SENTINEL] = sentinel,
|
|
||||||
remote: Union[str, _SENTINEL] = sentinel,
|
|
||||||
client_max_size: Union[int, _SENTINEL] = sentinel,
|
|
||||||
) -> "BaseRequest":
|
|
||||||
"""Clone itself with replacement some attributes.
|
|
||||||
|
|
||||||
Creates and returns a new instance of Request object. If no parameters
|
|
||||||
are given, an exact copy is returned. If a parameter is not passed, it
|
|
||||||
will reuse the one from the current request object.
|
|
||||||
"""
|
|
||||||
if self._read_bytes:
|
|
||||||
raise RuntimeError("Cannot clone request " "after reading its content")
|
|
||||||
|
|
||||||
dct: Dict[str, Any] = {}
|
|
||||||
if method is not sentinel:
|
|
||||||
dct["method"] = method
|
|
||||||
if rel_url is not sentinel:
|
|
||||||
new_url: URL = URL(rel_url)
|
|
||||||
dct["url"] = new_url
|
|
||||||
dct["path"] = str(new_url)
|
|
||||||
if headers is not sentinel:
|
|
||||||
# a copy semantic
|
|
||||||
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
|
|
||||||
dct["raw_headers"] = tuple(
|
|
||||||
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
message = self._message._replace(**dct)
|
|
||||||
|
|
||||||
kwargs = {}
|
|
||||||
if scheme is not sentinel:
|
|
||||||
kwargs["scheme"] = scheme
|
|
||||||
if host is not sentinel:
|
|
||||||
kwargs["host"] = host
|
|
||||||
if remote is not sentinel:
|
|
||||||
kwargs["remote"] = remote
|
|
||||||
if client_max_size is sentinel:
|
|
||||||
client_max_size = self._client_max_size
|
|
||||||
|
|
||||||
return self.__class__(
|
|
||||||
message,
|
|
||||||
self._payload,
|
|
||||||
self._protocol,
|
|
||||||
self._payload_writer,
|
|
||||||
self._task,
|
|
||||||
self._loop,
|
|
||||||
client_max_size=client_max_size,
|
|
||||||
state=self._state.copy(),
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def task(self) -> "asyncio.Task[None]":
|
|
||||||
return self._task
|
|
||||||
|
|
||||||
@property
|
|
||||||
def protocol(self) -> "RequestHandler":
|
|
||||||
return self._protocol
|
|
||||||
|
|
||||||
@property
|
|
||||||
def transport(self) -> Optional[asyncio.Transport]:
|
|
||||||
if self._protocol is None:
|
|
||||||
return None
|
|
||||||
return self._protocol.transport
|
|
||||||
|
|
||||||
@property
|
|
||||||
def writer(self) -> AbstractStreamWriter:
|
|
||||||
return self._payload_writer
|
|
||||||
|
|
||||||
@property
|
|
||||||
def client_max_size(self) -> int:
|
|
||||||
return self._client_max_size
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def message(self) -> RawRequestMessage:
|
|
||||||
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
|
|
||||||
return self._message
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def rel_url(self) -> URL:
|
|
||||||
return self._rel_url
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def loop(self) -> asyncio.AbstractEventLoop:
|
|
||||||
warnings.warn(
|
|
||||||
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
|
|
||||||
)
|
|
||||||
return self._loop
|
|
||||||
|
|
||||||
# MutableMapping API
|
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> Any:
|
|
||||||
return self._state[key]
|
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Any) -> None:
|
|
||||||
self._state[key] = value
|
|
||||||
|
|
||||||
def __delitem__(self, key: str) -> None:
|
|
||||||
del self._state[key]
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._state)
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[str]:
|
|
||||||
return iter(self._state)
|
|
||||||
|
|
||||||
########
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def secure(self) -> bool:
|
|
||||||
"""A bool indicating if the request is handled with SSL."""
|
|
||||||
return self.scheme == "https"
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
|
|
||||||
"""A tuple containing all parsed Forwarded header(s).
|
|
||||||
|
|
||||||
Makes an effort to parse Forwarded headers as specified by RFC 7239:
|
|
||||||
|
|
||||||
- It adds one (immutable) dictionary per Forwarded 'field-value', ie
|
|
||||||
per proxy. The element corresponds to the data in the Forwarded
|
|
||||||
field-value added by the first proxy encountered by the client. Each
|
|
||||||
subsequent item corresponds to those added by later proxies.
|
|
||||||
- It checks that every value has valid syntax in general as specified
|
|
||||||
in section 4: either a 'token' or a 'quoted-string'.
|
|
||||||
- It un-escapes found escape sequences.
|
|
||||||
- It does NOT validate 'by' and 'for' contents as specified in section
|
|
||||||
6.
|
|
||||||
- It does NOT validate 'host' contents (Host ABNF).
|
|
||||||
- It does NOT validate 'proto' contents for valid URI scheme names.
|
|
||||||
|
|
||||||
Returns a tuple containing one or more immutable dicts
|
|
||||||
"""
|
|
||||||
elems = []
|
|
||||||
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
|
|
||||||
length = len(field_value)
|
|
||||||
pos = 0
|
|
||||||
need_separator = False
|
|
||||||
elem: Dict[str, str] = {}
|
|
||||||
elems.append(types.MappingProxyType(elem))
|
|
||||||
while 0 <= pos < length:
|
|
||||||
match = _FORWARDED_PAIR_RE.match(field_value, pos)
|
|
||||||
if match is not None: # got a valid forwarded-pair
|
|
||||||
if need_separator:
|
|
||||||
# bad syntax here, skip to next comma
|
|
||||||
pos = field_value.find(",", pos)
|
|
||||||
else:
|
|
||||||
name, value, port = match.groups()
|
|
||||||
if value[0] == '"':
|
|
||||||
# quoted string: remove quotes and unescape
|
|
||||||
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
|
|
||||||
if port:
|
|
||||||
value += port
|
|
||||||
elem[name.lower()] = value
|
|
||||||
pos += len(match.group(0))
|
|
||||||
need_separator = True
|
|
||||||
elif field_value[pos] == ",": # next forwarded-element
|
|
||||||
need_separator = False
|
|
||||||
elem = {}
|
|
||||||
elems.append(types.MappingProxyType(elem))
|
|
||||||
pos += 1
|
|
||||||
elif field_value[pos] == ";": # next forwarded-pair
|
|
||||||
need_separator = False
|
|
||||||
pos += 1
|
|
||||||
elif field_value[pos] in " \t":
|
|
||||||
# Allow whitespace even between forwarded-pairs, though
|
|
||||||
# RFC 7239 doesn't. This simplifies code and is in line
|
|
||||||
# with Postel's law.
|
|
||||||
pos += 1
|
|
||||||
else:
|
|
||||||
# bad syntax here, skip to next comma
|
|
||||||
pos = field_value.find(",", pos)
|
|
||||||
return tuple(elems)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def scheme(self) -> str:
|
|
||||||
"""A string representing the scheme of the request.
|
|
||||||
|
|
||||||
Hostname is resolved in this order:
|
|
||||||
|
|
||||||
- overridden value by .clone(scheme=new_scheme) call.
|
|
||||||
- type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
|
|
||||||
|
|
||||||
'http' or 'https'.
|
|
||||||
"""
|
|
||||||
if self._transport_sslcontext:
|
|
||||||
return "https"
|
|
||||||
else:
|
|
||||||
return "http"
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def method(self) -> str:
|
|
||||||
"""Read only property for getting HTTP method.
|
|
||||||
|
|
||||||
The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
|
|
||||||
"""
|
|
||||||
return self._method
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def version(self) -> HttpVersion:
|
|
||||||
"""Read only property for getting HTTP version of request.
|
|
||||||
|
|
||||||
Returns aiohttp.protocol.HttpVersion instance.
|
|
||||||
"""
|
|
||||||
return self._version
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def host(self) -> str:
|
|
||||||
"""Hostname of the request.
|
|
||||||
|
|
||||||
Hostname is resolved in this order:
|
|
||||||
|
|
||||||
- overridden value by .clone(host=new_host) call.
|
|
||||||
- HOST HTTP header
|
|
||||||
- socket.getfqdn() value
|
|
||||||
"""
|
|
||||||
host = self._message.headers.get(hdrs.HOST)
|
|
||||||
if host is not None:
|
|
||||||
return host
|
|
||||||
return socket.getfqdn()
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def remote(self) -> Optional[str]:
|
|
||||||
"""Remote IP of client initiated HTTP request.
|
|
||||||
|
|
||||||
The IP is resolved in this order:
|
|
||||||
|
|
||||||
- overridden value by .clone(remote=new_remote) call.
|
|
||||||
- peername of opened socket
|
|
||||||
"""
|
|
||||||
if self._transport_peername is None:
|
|
||||||
return None
|
|
||||||
if isinstance(self._transport_peername, (list, tuple)):
|
|
||||||
return str(self._transport_peername[0])
|
|
||||||
return str(self._transport_peername)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def url(self) -> URL:
|
|
||||||
url = URL.build(scheme=self.scheme, host=self.host)
|
|
||||||
return url.join(self._rel_url)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def path(self) -> str:
|
|
||||||
"""The URL including *PATH INFO* without the host or scheme.
|
|
||||||
|
|
||||||
E.g., ``/app/blog``
|
|
||||||
"""
|
|
||||||
return self._rel_url.path
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def path_qs(self) -> str:
|
|
||||||
"""The URL including PATH_INFO and the query string.
|
|
||||||
|
|
||||||
E.g, /app/blog?id=10
|
|
||||||
"""
|
|
||||||
return str(self._rel_url)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def raw_path(self) -> str:
|
|
||||||
"""The URL including raw *PATH INFO* without the host or scheme.
|
|
||||||
|
|
||||||
Warning, the path is unquoted and may contains non valid URL characters
|
|
||||||
|
|
||||||
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
|
|
||||||
"""
|
|
||||||
return self._message.path
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def query(self) -> "MultiMapping[str]":
|
|
||||||
"""A multidict with all the variables in the query string."""
|
|
||||||
return MultiDictProxy(self._rel_url.query)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def query_string(self) -> str:
|
|
||||||
"""The query string in the URL.
|
|
||||||
|
|
||||||
E.g., id=10
|
|
||||||
"""
|
|
||||||
return self._rel_url.query_string
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def headers(self) -> "MultiMapping[str]":
|
|
||||||
"""A case-insensitive multidict proxy with all headers."""
|
|
||||||
return self._headers
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def raw_headers(self) -> RawHeaders:
|
|
||||||
"""A sequence of pairs for all headers."""
|
|
||||||
return self._message.raw_headers
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def if_modified_since(self) -> Optional[datetime.datetime]:
|
|
||||||
"""The value of If-Modified-Since HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `datetime` object.
|
|
||||||
"""
|
|
||||||
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def if_unmodified_since(self) -> Optional[datetime.datetime]:
|
|
||||||
"""The value of If-Unmodified-Since HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `datetime` object.
|
|
||||||
"""
|
|
||||||
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _etag_values(etag_header: str) -> Iterator[ETag]:
|
|
||||||
"""Extract `ETag` objects from raw header."""
|
|
||||||
if etag_header == ETAG_ANY:
|
|
||||||
yield ETag(
|
|
||||||
is_weak=False,
|
|
||||||
value=ETAG_ANY,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
|
|
||||||
is_weak, value, garbage = match.group(2, 3, 4)
|
|
||||||
# Any symbol captured by 4th group means
|
|
||||||
# that the following sequence is invalid.
|
|
||||||
if garbage:
|
|
||||||
break
|
|
||||||
|
|
||||||
yield ETag(
|
|
||||||
is_weak=bool(is_weak),
|
|
||||||
value=value,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _if_match_or_none_impl(
|
|
||||||
cls, header_value: Optional[str]
|
|
||||||
) -> Optional[Tuple[ETag, ...]]:
|
|
||||||
if not header_value:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return tuple(cls._etag_values(header_value))
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def if_match(self) -> Optional[Tuple[ETag, ...]]:
|
|
||||||
"""The value of If-Match HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `tuple` of `ETag` objects.
|
|
||||||
"""
|
|
||||||
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
|
|
||||||
"""The value of If-None-Match HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `tuple` of `ETag` objects.
|
|
||||||
"""
|
|
||||||
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def if_range(self) -> Optional[datetime.datetime]:
|
|
||||||
"""The value of If-Range HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `datetime` object.
|
|
||||||
"""
|
|
||||||
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def keep_alive(self) -> bool:
|
|
||||||
"""Is keepalive enabled by client?"""
|
|
||||||
return not self._message.should_close
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def cookies(self) -> Mapping[str, str]:
|
|
||||||
"""Return request cookies.
|
|
||||||
|
|
||||||
A read-only dictionary-like object.
|
|
||||||
"""
|
|
||||||
raw = self.headers.get(hdrs.COOKIE, "")
|
|
||||||
parsed = SimpleCookie(raw)
|
|
||||||
return MappingProxyType({key: val.value for key, val in parsed.items()})
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def http_range(self) -> slice:
|
|
||||||
"""The content of Range HTTP header.
|
|
||||||
|
|
||||||
Return a slice instance.
|
|
||||||
|
|
||||||
"""
|
|
||||||
rng = self._headers.get(hdrs.RANGE)
|
|
||||||
start, end = None, None
|
|
||||||
if rng is not None:
|
|
||||||
try:
|
|
||||||
pattern = r"^bytes=(\d*)-(\d*)$"
|
|
||||||
start, end = re.findall(pattern, rng)[0]
|
|
||||||
except IndexError: # pattern was not found in header
|
|
||||||
raise ValueError("range not in acceptable format")
|
|
||||||
|
|
||||||
end = int(end) if end else None
|
|
||||||
start = int(start) if start else None
|
|
||||||
|
|
||||||
if start is None and end is not None:
|
|
||||||
# end with no start is to return tail of content
|
|
||||||
start = -end
|
|
||||||
end = None
|
|
||||||
|
|
||||||
if start is not None and end is not None:
|
|
||||||
# end is inclusive in range header, exclusive for slice
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
if start >= end:
|
|
||||||
raise ValueError("start cannot be after end")
|
|
||||||
|
|
||||||
if start is end is None: # No valid range supplied
|
|
||||||
raise ValueError("No start or end of range specified")
|
|
||||||
|
|
||||||
return slice(start, end, 1)
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def content(self) -> StreamReader:
|
|
||||||
"""Return raw payload stream."""
|
|
||||||
return self._payload
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_body(self) -> bool:
|
|
||||||
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
|
||||||
warnings.warn(
|
|
||||||
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
|
|
||||||
)
|
|
||||||
return not self._payload.at_eof()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def can_read_body(self) -> bool:
|
|
||||||
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
|
||||||
return not self._payload.at_eof()
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def body_exists(self) -> bool:
|
|
||||||
"""Return True if request has HTTP BODY, False otherwise."""
|
|
||||||
return type(self._payload) is not EmptyStreamReader
|
|
||||||
|
|
||||||
async def release(self) -> None:
|
|
||||||
"""Release request.
|
|
||||||
|
|
||||||
Eat unread part of HTTP BODY if present.
|
|
||||||
"""
|
|
||||||
while not self._payload.at_eof():
|
|
||||||
await self._payload.readany()
|
|
||||||
|
|
||||||
async def read(self) -> bytes:
|
|
||||||
"""Read request body if present.
|
|
||||||
|
|
||||||
Returns bytes object with full request content.
|
|
||||||
"""
|
|
||||||
if self._read_bytes is None:
|
|
||||||
body = bytearray()
|
|
||||||
while True:
|
|
||||||
chunk = await self._payload.readany()
|
|
||||||
body.extend(chunk)
|
|
||||||
if self._client_max_size:
|
|
||||||
body_size = len(body)
|
|
||||||
if body_size >= self._client_max_size:
|
|
||||||
raise HTTPRequestEntityTooLarge(
|
|
||||||
max_size=self._client_max_size, actual_size=body_size
|
|
||||||
)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
self._read_bytes = bytes(body)
|
|
||||||
return self._read_bytes
|
|
||||||
|
|
||||||
async def text(self) -> str:
|
|
||||||
"""Return BODY as text using encoding from .charset."""
|
|
||||||
bytes_body = await self.read()
|
|
||||||
encoding = self.charset or "utf-8"
|
|
||||||
return bytes_body.decode(encoding)
|
|
||||||
|
|
||||||
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
|
|
||||||
"""Return BODY as JSON."""
|
|
||||||
body = await self.text()
|
|
||||||
return loads(body)
|
|
||||||
|
|
||||||
async def multipart(self) -> MultipartReader:
|
|
||||||
"""Return async iterator to process BODY as multipart."""
|
|
||||||
return MultipartReader(self._headers, self._payload)
|
|
||||||
|
|
||||||
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
|
|
||||||
"""Return POST parameters."""
|
|
||||||
if self._post is not None:
|
|
||||||
return self._post
|
|
||||||
if self._method not in self.POST_METHODS:
|
|
||||||
self._post = MultiDictProxy(MultiDict())
|
|
||||||
return self._post
|
|
||||||
|
|
||||||
content_type = self.content_type
|
|
||||||
if content_type not in (
|
|
||||||
"",
|
|
||||||
"application/x-www-form-urlencoded",
|
|
||||||
"multipart/form-data",
|
|
||||||
):
|
|
||||||
self._post = MultiDictProxy(MultiDict())
|
|
||||||
return self._post
|
|
||||||
|
|
||||||
out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
|
|
||||||
|
|
||||||
if content_type == "multipart/form-data":
|
|
||||||
multipart = await self.multipart()
|
|
||||||
max_size = self._client_max_size
|
|
||||||
|
|
||||||
field = await multipart.next()
|
|
||||||
while field is not None:
|
|
||||||
size = 0
|
|
||||||
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
|
|
||||||
|
|
||||||
if isinstance(field, BodyPartReader):
|
|
||||||
assert field.name is not None
|
|
||||||
|
|
||||||
# Note that according to RFC 7578, the Content-Type header
|
|
||||||
# is optional, even for files, so we can't assume it's
|
|
||||||
# present.
|
|
||||||
# https://tools.ietf.org/html/rfc7578#section-4.4
|
|
||||||
if field.filename:
|
|
||||||
# store file in temp file
|
|
||||||
tmp = tempfile.TemporaryFile()
|
|
||||||
chunk = await field.read_chunk(size=2**16)
|
|
||||||
while chunk:
|
|
||||||
chunk = field.decode(chunk)
|
|
||||||
tmp.write(chunk)
|
|
||||||
size += len(chunk)
|
|
||||||
if 0 < max_size < size:
|
|
||||||
tmp.close()
|
|
||||||
raise HTTPRequestEntityTooLarge(
|
|
||||||
max_size=max_size, actual_size=size
|
|
||||||
)
|
|
||||||
chunk = await field.read_chunk(size=2**16)
|
|
||||||
tmp.seek(0)
|
|
||||||
|
|
||||||
if field_ct is None:
|
|
||||||
field_ct = "application/octet-stream"
|
|
||||||
|
|
||||||
ff = FileField(
|
|
||||||
field.name,
|
|
||||||
field.filename,
|
|
||||||
cast(io.BufferedReader, tmp),
|
|
||||||
field_ct,
|
|
||||||
field.headers,
|
|
||||||
)
|
|
||||||
out.add(field.name, ff)
|
|
||||||
else:
|
|
||||||
# deal with ordinary data
|
|
||||||
value = await field.read(decode=True)
|
|
||||||
if field_ct is None or field_ct.startswith("text/"):
|
|
||||||
charset = field.get_charset(default="utf-8")
|
|
||||||
out.add(field.name, value.decode(charset))
|
|
||||||
else:
|
|
||||||
out.add(field.name, value)
|
|
||||||
size += len(value)
|
|
||||||
if 0 < max_size < size:
|
|
||||||
raise HTTPRequestEntityTooLarge(
|
|
||||||
max_size=max_size, actual_size=size
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
"To decode nested multipart you need " "to use custom reader",
|
|
||||||
)
|
|
||||||
|
|
||||||
field = await multipart.next()
|
|
||||||
else:
|
|
||||||
data = await self.read()
|
|
||||||
if data:
|
|
||||||
charset = self.charset or "utf-8"
|
|
||||||
out.extend(
|
|
||||||
parse_qsl(
|
|
||||||
data.rstrip().decode(charset),
|
|
||||||
keep_blank_values=True,
|
|
||||||
encoding=charset,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._post = MultiDictProxy(out)
|
|
||||||
return self._post
|
|
||||||
|
|
||||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
|
||||||
"""Extra info from protocol transport"""
|
|
||||||
protocol = self._protocol
|
|
||||||
if protocol is None:
|
|
||||||
return default
|
|
||||||
|
|
||||||
transport = protocol.transport
|
|
||||||
if transport is None:
|
|
||||||
return default
|
|
||||||
|
|
||||||
return transport.get_extra_info(name, default)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
|
|
||||||
"ascii"
|
|
||||||
)
|
|
||||||
return "<{} {} {} >".format(
|
|
||||||
self.__class__.__name__, self._method, ascii_encodable_path
|
|
||||||
)
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
|
||||||
return id(self) == id(other)
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def _prepare_hook(self, response: StreamResponse) -> None:
|
|
||||||
return
|
|
||||||
|
|
||||||
def _cancel(self, exc: BaseException) -> None:
|
|
||||||
self._payload.set_exception(exc)
|
|
||||||
|
|
||||||
|
|
||||||
class Request(BaseRequest):
|
|
||||||
|
|
||||||
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
|
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# matchdict, route_name, handler
|
|
||||||
# or information about traversal lookup
|
|
||||||
|
|
||||||
# initialized after route resolving
|
|
||||||
self._match_info: Optional[UrlMappingMatchInfo] = None
|
|
||||||
|
|
||||||
if DEBUG:
|
|
||||||
|
|
||||||
def __setattr__(self, name: str, val: Any) -> None:
|
|
||||||
if name not in self.ATTRS:
|
|
||||||
warnings.warn(
|
|
||||||
"Setting custom {}.{} attribute "
|
|
||||||
"is discouraged".format(self.__class__.__name__, name),
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
super().__setattr__(name, val)
|
|
||||||
|
|
||||||
def clone(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
method: Union[str, _SENTINEL] = sentinel,
|
|
||||||
rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
|
|
||||||
headers: Union[LooseHeaders, _SENTINEL] = sentinel,
|
|
||||||
scheme: Union[str, _SENTINEL] = sentinel,
|
|
||||||
host: Union[str, _SENTINEL] = sentinel,
|
|
||||||
remote: Union[str, _SENTINEL] = sentinel,
|
|
||||||
client_max_size: Union[int, _SENTINEL] = sentinel,
|
|
||||||
) -> "Request":
|
|
||||||
ret = super().clone(
|
|
||||||
method=method,
|
|
||||||
rel_url=rel_url,
|
|
||||||
headers=headers,
|
|
||||||
scheme=scheme,
|
|
||||||
host=host,
|
|
||||||
remote=remote,
|
|
||||||
client_max_size=client_max_size,
|
|
||||||
)
|
|
||||||
new_ret = cast(Request, ret)
|
|
||||||
new_ret._match_info = self._match_info
|
|
||||||
return new_ret
|
|
||||||
|
|
||||||
@reify
|
|
||||||
def match_info(self) -> "UrlMappingMatchInfo":
|
|
||||||
"""Result of route resolving."""
|
|
||||||
match_info = self._match_info
|
|
||||||
assert match_info is not None
|
|
||||||
return match_info
|
|
||||||
|
|
||||||
@property
|
|
||||||
def app(self) -> "Application":
|
|
||||||
"""Application instance."""
|
|
||||||
match_info = self._match_info
|
|
||||||
assert match_info is not None
|
|
||||||
return match_info.current_app
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config_dict(self) -> ChainMapProxy:
|
|
||||||
match_info = self._match_info
|
|
||||||
assert match_info is not None
|
|
||||||
lst = match_info.apps
|
|
||||||
app = self.app
|
|
||||||
idx = lst.index(app)
|
|
||||||
sublist = list(reversed(lst[: idx + 1]))
|
|
||||||
return ChainMapProxy(sublist)
|
|
||||||
|
|
||||||
async def _prepare_hook(self, response: StreamResponse) -> None:
|
|
||||||
match_info = self._match_info
|
|
||||||
if match_info is None:
|
|
||||||
return
|
|
||||||
for app in match_info._apps:
|
|
||||||
await app.on_response_prepare.send(self, response)
|
|
||||||
@@ -1,817 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import collections.abc
|
|
||||||
import datetime
|
|
||||||
import enum
|
|
||||||
import json
|
|
||||||
import math
|
|
||||||
import time
|
|
||||||
import warnings
|
|
||||||
from concurrent.futures import Executor
|
|
||||||
from http import HTTPStatus
|
|
||||||
from http.cookies import SimpleCookie
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
MutableMapping,
|
|
||||||
Optional,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
from multidict import CIMultiDict, istr
|
|
||||||
|
|
||||||
from . import hdrs, payload
|
|
||||||
from .abc import AbstractStreamWriter
|
|
||||||
from .compression_utils import ZLibCompressor
|
|
||||||
from .helpers import (
|
|
||||||
ETAG_ANY,
|
|
||||||
QUOTED_ETAG_RE,
|
|
||||||
ETag,
|
|
||||||
HeadersMixin,
|
|
||||||
must_be_empty_body,
|
|
||||||
parse_http_date,
|
|
||||||
rfc822_formatted_time,
|
|
||||||
sentinel,
|
|
||||||
should_remove_content_length,
|
|
||||||
validate_etag_value,
|
|
||||||
)
|
|
||||||
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
|
||||||
from .payload import Payload
|
|
||||||
from .typedefs import JSONEncoder, LooseHeaders
|
|
||||||
|
|
||||||
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_request import BaseRequest
|
|
||||||
|
|
||||||
BaseClass = MutableMapping[str, Any]
|
|
||||||
else:
|
|
||||||
BaseClass = collections.abc.MutableMapping
|
|
||||||
|
|
||||||
|
|
||||||
class ContentCoding(enum.Enum):
|
|
||||||
# The content codings that we have support for.
|
|
||||||
#
|
|
||||||
# Additional registered codings are listed at:
|
|
||||||
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
|
||||||
deflate = "deflate"
|
|
||||||
gzip = "gzip"
|
|
||||||
identity = "identity"
|
|
||||||
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# HTTP Response classes
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
|
|
||||||
class StreamResponse(BaseClass, HeadersMixin):
|
|
||||||
|
|
||||||
_length_check = True
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
status: int = 200,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
) -> None:
|
|
||||||
self._body = None
|
|
||||||
self._keep_alive: Optional[bool] = None
|
|
||||||
self._chunked = False
|
|
||||||
self._compression = False
|
|
||||||
self._compression_force: Optional[ContentCoding] = None
|
|
||||||
self._cookies = SimpleCookie()
|
|
||||||
|
|
||||||
self._req: Optional[BaseRequest] = None
|
|
||||||
self._payload_writer: Optional[AbstractStreamWriter] = None
|
|
||||||
self._eof_sent = False
|
|
||||||
self._must_be_empty_body: Optional[bool] = None
|
|
||||||
self._body_length = 0
|
|
||||||
self._state: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
if headers is not None:
|
|
||||||
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
|
||||||
else:
|
|
||||||
self._headers = CIMultiDict()
|
|
||||||
|
|
||||||
self.set_status(status, reason)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def prepared(self) -> bool:
|
|
||||||
return self._payload_writer is not None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def task(self) -> "Optional[asyncio.Task[None]]":
|
|
||||||
if self._req:
|
|
||||||
return self._req.task
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self) -> int:
|
|
||||||
return self._status
|
|
||||||
|
|
||||||
@property
|
|
||||||
def chunked(self) -> bool:
|
|
||||||
return self._chunked
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compression(self) -> bool:
|
|
||||||
return self._compression
|
|
||||||
|
|
||||||
@property
|
|
||||||
def reason(self) -> str:
|
|
||||||
return self._reason
|
|
||||||
|
|
||||||
def set_status(
|
|
||||||
self,
|
|
||||||
status: int,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
assert not self.prepared, (
|
|
||||||
"Cannot change the response status code after " "the headers have been sent"
|
|
||||||
)
|
|
||||||
self._status = int(status)
|
|
||||||
if reason is None:
|
|
||||||
try:
|
|
||||||
reason = HTTPStatus(self._status).phrase
|
|
||||||
except ValueError:
|
|
||||||
reason = ""
|
|
||||||
self._reason = reason
|
|
||||||
|
|
||||||
@property
|
|
||||||
def keep_alive(self) -> Optional[bool]:
|
|
||||||
return self._keep_alive
|
|
||||||
|
|
||||||
def force_close(self) -> None:
|
|
||||||
self._keep_alive = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def body_length(self) -> int:
|
|
||||||
return self._body_length
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_length(self) -> int:
|
|
||||||
warnings.warn("output_length is deprecated", DeprecationWarning)
|
|
||||||
assert self._payload_writer
|
|
||||||
return self._payload_writer.buffer_size
|
|
||||||
|
|
||||||
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
|
||||||
"""Enables automatic chunked transfer encoding."""
|
|
||||||
self._chunked = True
|
|
||||||
|
|
||||||
if hdrs.CONTENT_LENGTH in self._headers:
|
|
||||||
raise RuntimeError(
|
|
||||||
"You can't enable chunked encoding when " "a content length is set"
|
|
||||||
)
|
|
||||||
if chunk_size is not None:
|
|
||||||
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
|
||||||
|
|
||||||
def enable_compression(
|
|
||||||
self, force: Optional[Union[bool, ContentCoding]] = None
|
|
||||||
) -> None:
|
|
||||||
"""Enables response compression encoding."""
|
|
||||||
# Backwards compatibility for when force was a bool <0.17.
|
|
||||||
if type(force) == bool:
|
|
||||||
force = ContentCoding.deflate if force else ContentCoding.identity
|
|
||||||
warnings.warn(
|
|
||||||
"Using boolean for force is deprecated #3318", DeprecationWarning
|
|
||||||
)
|
|
||||||
elif force is not None:
|
|
||||||
assert isinstance(force, ContentCoding), (
|
|
||||||
"force should one of " "None, bool or " "ContentEncoding"
|
|
||||||
)
|
|
||||||
|
|
||||||
self._compression = True
|
|
||||||
self._compression_force = force
|
|
||||||
|
|
||||||
@property
|
|
||||||
def headers(self) -> "CIMultiDict[str]":
|
|
||||||
return self._headers
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cookies(self) -> SimpleCookie:
|
|
||||||
return self._cookies
|
|
||||||
|
|
||||||
def set_cookie(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
value: str,
|
|
||||||
*,
|
|
||||||
expires: Optional[str] = None,
|
|
||||||
domain: Optional[str] = None,
|
|
||||||
max_age: Optional[Union[int, str]] = None,
|
|
||||||
path: str = "/",
|
|
||||||
secure: Optional[bool] = None,
|
|
||||||
httponly: Optional[bool] = None,
|
|
||||||
version: Optional[str] = None,
|
|
||||||
samesite: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set or update response cookie.
|
|
||||||
|
|
||||||
Sets new cookie or updates existent with new value.
|
|
||||||
Also updates only those params which are not None.
|
|
||||||
"""
|
|
||||||
old = self._cookies.get(name)
|
|
||||||
if old is not None and old.coded_value == "":
|
|
||||||
# deleted cookie
|
|
||||||
self._cookies.pop(name, None)
|
|
||||||
|
|
||||||
self._cookies[name] = value
|
|
||||||
c = self._cookies[name]
|
|
||||||
|
|
||||||
if expires is not None:
|
|
||||||
c["expires"] = expires
|
|
||||||
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
|
||||||
del c["expires"]
|
|
||||||
|
|
||||||
if domain is not None:
|
|
||||||
c["domain"] = domain
|
|
||||||
|
|
||||||
if max_age is not None:
|
|
||||||
c["max-age"] = str(max_age)
|
|
||||||
elif "max-age" in c:
|
|
||||||
del c["max-age"]
|
|
||||||
|
|
||||||
c["path"] = path
|
|
||||||
|
|
||||||
if secure is not None:
|
|
||||||
c["secure"] = secure
|
|
||||||
if httponly is not None:
|
|
||||||
c["httponly"] = httponly
|
|
||||||
if version is not None:
|
|
||||||
c["version"] = version
|
|
||||||
if samesite is not None:
|
|
||||||
c["samesite"] = samesite
|
|
||||||
|
|
||||||
def del_cookie(
|
|
||||||
self, name: str, *, domain: Optional[str] = None, path: str = "/"
|
|
||||||
) -> None:
|
|
||||||
"""Delete cookie.
|
|
||||||
|
|
||||||
Creates new empty expired cookie.
|
|
||||||
"""
|
|
||||||
# TODO: do we need domain/path here?
|
|
||||||
self._cookies.pop(name, None)
|
|
||||||
self.set_cookie(
|
|
||||||
name,
|
|
||||||
"",
|
|
||||||
max_age=0,
|
|
||||||
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
|
||||||
domain=domain,
|
|
||||||
path=path,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_length(self) -> Optional[int]:
|
|
||||||
# Just a placeholder for adding setter
|
|
||||||
return super().content_length
|
|
||||||
|
|
||||||
@content_length.setter
|
|
||||||
def content_length(self, value: Optional[int]) -> None:
|
|
||||||
if value is not None:
|
|
||||||
value = int(value)
|
|
||||||
if self._chunked:
|
|
||||||
raise RuntimeError(
|
|
||||||
"You can't set content length when " "chunked encoding is enable"
|
|
||||||
)
|
|
||||||
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
|
||||||
else:
|
|
||||||
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_type(self) -> str:
|
|
||||||
# Just a placeholder for adding setter
|
|
||||||
return super().content_type
|
|
||||||
|
|
||||||
@content_type.setter
|
|
||||||
def content_type(self, value: str) -> None:
|
|
||||||
self.content_type # read header values if needed
|
|
||||||
self._content_type = str(value)
|
|
||||||
self._generate_content_type_header()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def charset(self) -> Optional[str]:
|
|
||||||
# Just a placeholder for adding setter
|
|
||||||
return super().charset
|
|
||||||
|
|
||||||
@charset.setter
|
|
||||||
def charset(self, value: Optional[str]) -> None:
|
|
||||||
ctype = self.content_type # read header values if needed
|
|
||||||
if ctype == "application/octet-stream":
|
|
||||||
raise RuntimeError(
|
|
||||||
"Setting charset for application/octet-stream "
|
|
||||||
"doesn't make sense, setup content_type first"
|
|
||||||
)
|
|
||||||
assert self._content_dict is not None
|
|
||||||
if value is None:
|
|
||||||
self._content_dict.pop("charset", None)
|
|
||||||
else:
|
|
||||||
self._content_dict["charset"] = str(value).lower()
|
|
||||||
self._generate_content_type_header()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_modified(self) -> Optional[datetime.datetime]:
|
|
||||||
"""The value of Last-Modified HTTP header, or None.
|
|
||||||
|
|
||||||
This header is represented as a `datetime` object.
|
|
||||||
"""
|
|
||||||
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
|
||||||
|
|
||||||
@last_modified.setter
|
|
||||||
def last_modified(
|
|
||||||
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
|
||||||
) -> None:
|
|
||||||
if value is None:
|
|
||||||
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
|
||||||
elif isinstance(value, (int, float)):
|
|
||||||
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
|
||||||
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
|
||||||
)
|
|
||||||
elif isinstance(value, datetime.datetime):
|
|
||||||
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
|
||||||
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
|
||||||
)
|
|
||||||
elif isinstance(value, str):
|
|
||||||
self._headers[hdrs.LAST_MODIFIED] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def etag(self) -> Optional[ETag]:
|
|
||||||
quoted_value = self._headers.get(hdrs.ETAG)
|
|
||||||
if not quoted_value:
|
|
||||||
return None
|
|
||||||
elif quoted_value == ETAG_ANY:
|
|
||||||
return ETag(value=ETAG_ANY)
|
|
||||||
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
|
||||||
if not match:
|
|
||||||
return None
|
|
||||||
is_weak, value = match.group(1, 2)
|
|
||||||
return ETag(
|
|
||||||
is_weak=bool(is_weak),
|
|
||||||
value=value,
|
|
||||||
)
|
|
||||||
|
|
||||||
@etag.setter
|
|
||||||
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
|
||||||
if value is None:
|
|
||||||
self._headers.pop(hdrs.ETAG, None)
|
|
||||||
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
|
||||||
isinstance(value, ETag) and value.value == ETAG_ANY
|
|
||||||
):
|
|
||||||
self._headers[hdrs.ETAG] = ETAG_ANY
|
|
||||||
elif isinstance(value, str):
|
|
||||||
validate_etag_value(value)
|
|
||||||
self._headers[hdrs.ETAG] = f'"{value}"'
|
|
||||||
elif isinstance(value, ETag) and isinstance(value.value, str):
|
|
||||||
validate_etag_value(value.value)
|
|
||||||
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
|
||||||
self._headers[hdrs.ETAG] = hdr_value
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
f"Unsupported etag type: {type(value)}. "
|
|
||||||
f"etag must be str, ETag or None"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _generate_content_type_header(
|
|
||||||
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
|
||||||
) -> None:
|
|
||||||
assert self._content_dict is not None
|
|
||||||
assert self._content_type is not None
|
|
||||||
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
|
||||||
if params:
|
|
||||||
ctype = self._content_type + "; " + params
|
|
||||||
else:
|
|
||||||
ctype = self._content_type
|
|
||||||
self._headers[CONTENT_TYPE] = ctype
|
|
||||||
|
|
||||||
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
|
||||||
if coding != ContentCoding.identity:
|
|
||||||
assert self._payload_writer is not None
|
|
||||||
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
|
||||||
self._payload_writer.enable_compression(coding.value)
|
|
||||||
# Compressed payload may have different content length,
|
|
||||||
# remove the header
|
|
||||||
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
|
||||||
|
|
||||||
async def _start_compression(self, request: "BaseRequest") -> None:
|
|
||||||
if self._compression_force:
|
|
||||||
await self._do_start_compression(self._compression_force)
|
|
||||||
else:
|
|
||||||
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
|
||||||
for coding in ContentCoding:
|
|
||||||
if coding.value in accept_encoding:
|
|
||||||
await self._do_start_compression(coding)
|
|
||||||
return
|
|
||||||
|
|
||||||
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
|
||||||
if self._eof_sent:
|
|
||||||
return None
|
|
||||||
if self._payload_writer is not None:
|
|
||||||
return self._payload_writer
|
|
||||||
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
|
|
||||||
return await self._start(request)
|
|
||||||
|
|
||||||
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
|
||||||
self._req = request
|
|
||||||
writer = self._payload_writer = request._payload_writer
|
|
||||||
|
|
||||||
await self._prepare_headers()
|
|
||||||
await request._prepare_hook(self)
|
|
||||||
await self._write_headers()
|
|
||||||
|
|
||||||
return writer
|
|
||||||
|
|
||||||
async def _prepare_headers(self) -> None:
|
|
||||||
request = self._req
|
|
||||||
assert request is not None
|
|
||||||
writer = self._payload_writer
|
|
||||||
assert writer is not None
|
|
||||||
keep_alive = self._keep_alive
|
|
||||||
if keep_alive is None:
|
|
||||||
keep_alive = request.keep_alive
|
|
||||||
self._keep_alive = keep_alive
|
|
||||||
|
|
||||||
version = request.version
|
|
||||||
|
|
||||||
headers = self._headers
|
|
||||||
for cookie in self._cookies.values():
|
|
||||||
value = cookie.output(header="")[1:]
|
|
||||||
headers.add(hdrs.SET_COOKIE, value)
|
|
||||||
|
|
||||||
if self._compression:
|
|
||||||
await self._start_compression(request)
|
|
||||||
|
|
||||||
if self._chunked:
|
|
||||||
if version != HttpVersion11:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Using chunked encoding is forbidden "
|
|
||||||
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
|
||||||
)
|
|
||||||
if not self._must_be_empty_body:
|
|
||||||
writer.enable_chunking()
|
|
||||||
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
|
||||||
if hdrs.CONTENT_LENGTH in headers:
|
|
||||||
del headers[hdrs.CONTENT_LENGTH]
|
|
||||||
elif self._length_check:
|
|
||||||
writer.length = self.content_length
|
|
||||||
if writer.length is None:
|
|
||||||
if version >= HttpVersion11:
|
|
||||||
if not self._must_be_empty_body:
|
|
||||||
writer.enable_chunking()
|
|
||||||
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
|
||||||
elif not self._must_be_empty_body:
|
|
||||||
keep_alive = False
|
|
||||||
|
|
||||||
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
|
||||||
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
|
||||||
if self._must_be_empty_body:
|
|
||||||
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
|
|
||||||
request.method, self.status
|
|
||||||
):
|
|
||||||
del headers[hdrs.CONTENT_LENGTH]
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
|
|
||||||
if hdrs.TRANSFER_ENCODING in headers:
|
|
||||||
del headers[hdrs.TRANSFER_ENCODING]
|
|
||||||
else:
|
|
||||||
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
|
||||||
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
|
||||||
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
|
||||||
|
|
||||||
# connection header
|
|
||||||
if hdrs.CONNECTION not in headers:
|
|
||||||
if keep_alive:
|
|
||||||
if version == HttpVersion10:
|
|
||||||
headers[hdrs.CONNECTION] = "keep-alive"
|
|
||||||
else:
|
|
||||||
if version == HttpVersion11:
|
|
||||||
headers[hdrs.CONNECTION] = "close"
|
|
||||||
|
|
||||||
async def _write_headers(self) -> None:
|
|
||||||
request = self._req
|
|
||||||
assert request is not None
|
|
||||||
writer = self._payload_writer
|
|
||||||
assert writer is not None
|
|
||||||
# status line
|
|
||||||
version = request.version
|
|
||||||
status_line = "HTTP/{}.{} {} {}".format(
|
|
||||||
version[0], version[1], self._status, self._reason
|
|
||||||
)
|
|
||||||
await writer.write_headers(status_line, self._headers)
|
|
||||||
|
|
||||||
async def write(self, data: bytes) -> None:
|
|
||||||
assert isinstance(
|
|
||||||
data, (bytes, bytearray, memoryview)
|
|
||||||
), "data argument must be byte-ish (%r)" % type(data)
|
|
||||||
|
|
||||||
if self._eof_sent:
|
|
||||||
raise RuntimeError("Cannot call write() after write_eof()")
|
|
||||||
if self._payload_writer is None:
|
|
||||||
raise RuntimeError("Cannot call write() before prepare()")
|
|
||||||
|
|
||||||
await self._payload_writer.write(data)
|
|
||||||
|
|
||||||
async def drain(self) -> None:
|
|
||||||
assert not self._eof_sent, "EOF has already been sent"
|
|
||||||
assert self._payload_writer is not None, "Response has not been started"
|
|
||||||
warnings.warn(
|
|
||||||
"drain method is deprecated, use await resp.write()",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
await self._payload_writer.drain()
|
|
||||||
|
|
||||||
async def write_eof(self, data: bytes = b"") -> None:
|
|
||||||
assert isinstance(
|
|
||||||
data, (bytes, bytearray, memoryview)
|
|
||||||
), "data argument must be byte-ish (%r)" % type(data)
|
|
||||||
|
|
||||||
if self._eof_sent:
|
|
||||||
return
|
|
||||||
|
|
||||||
assert self._payload_writer is not None, "Response has not been started"
|
|
||||||
|
|
||||||
await self._payload_writer.write_eof(data)
|
|
||||||
self._eof_sent = True
|
|
||||||
self._req = None
|
|
||||||
self._body_length = self._payload_writer.output_size
|
|
||||||
self._payload_writer = None
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
if self._eof_sent:
|
|
||||||
info = "eof"
|
|
||||||
elif self.prepared:
|
|
||||||
assert self._req is not None
|
|
||||||
info = f"{self._req.method} {self._req.path} "
|
|
||||||
else:
|
|
||||||
info = "not prepared"
|
|
||||||
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> Any:
|
|
||||||
return self._state[key]
|
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Any) -> None:
|
|
||||||
self._state[key] = value
|
|
||||||
|
|
||||||
def __delitem__(self, key: str) -> None:
|
|
||||||
del self._state[key]
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._state)
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[str]:
|
|
||||||
return iter(self._state)
|
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
|
||||||
return hash(id(self))
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
|
||||||
return self is other
|
|
||||||
|
|
||||||
|
|
||||||
class Response(StreamResponse):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
body: Any = None,
|
|
||||||
status: int = 200,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
charset: Optional[str] = None,
|
|
||||||
zlib_executor_size: Optional[int] = None,
|
|
||||||
zlib_executor: Optional[Executor] = None,
|
|
||||||
) -> None:
|
|
||||||
if body is not None and text is not None:
|
|
||||||
raise ValueError("body and text are not allowed together")
|
|
||||||
|
|
||||||
if headers is None:
|
|
||||||
real_headers: CIMultiDict[str] = CIMultiDict()
|
|
||||||
elif not isinstance(headers, CIMultiDict):
|
|
||||||
real_headers = CIMultiDict(headers)
|
|
||||||
else:
|
|
||||||
real_headers = headers # = cast('CIMultiDict[str]', headers)
|
|
||||||
|
|
||||||
if content_type is not None and "charset" in content_type:
|
|
||||||
raise ValueError("charset must not be in content_type " "argument")
|
|
||||||
|
|
||||||
if text is not None:
|
|
||||||
if hdrs.CONTENT_TYPE in real_headers:
|
|
||||||
if content_type or charset:
|
|
||||||
raise ValueError(
|
|
||||||
"passing both Content-Type header and "
|
|
||||||
"content_type or charset params "
|
|
||||||
"is forbidden"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# fast path for filling headers
|
|
||||||
if not isinstance(text, str):
|
|
||||||
raise TypeError("text argument must be str (%r)" % type(text))
|
|
||||||
if content_type is None:
|
|
||||||
content_type = "text/plain"
|
|
||||||
if charset is None:
|
|
||||||
charset = "utf-8"
|
|
||||||
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
|
||||||
body = text.encode(charset)
|
|
||||||
text = None
|
|
||||||
else:
|
|
||||||
if hdrs.CONTENT_TYPE in real_headers:
|
|
||||||
if content_type is not None or charset is not None:
|
|
||||||
raise ValueError(
|
|
||||||
"passing both Content-Type header and "
|
|
||||||
"content_type or charset params "
|
|
||||||
"is forbidden"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if content_type is not None:
|
|
||||||
if charset is not None:
|
|
||||||
content_type += "; charset=" + charset
|
|
||||||
real_headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
|
|
||||||
super().__init__(status=status, reason=reason, headers=real_headers)
|
|
||||||
|
|
||||||
if text is not None:
|
|
||||||
self.text = text
|
|
||||||
else:
|
|
||||||
self.body = body
|
|
||||||
|
|
||||||
self._compressed_body: Optional[bytes] = None
|
|
||||||
self._zlib_executor_size = zlib_executor_size
|
|
||||||
self._zlib_executor = zlib_executor
|
|
||||||
|
|
||||||
@property
|
|
||||||
def body(self) -> Optional[Union[bytes, Payload]]:
|
|
||||||
return self._body
|
|
||||||
|
|
||||||
@body.setter
|
|
||||||
def body(self, body: bytes) -> None:
|
|
||||||
if body is None:
|
|
||||||
self._body: Optional[bytes] = None
|
|
||||||
self._body_payload: bool = False
|
|
||||||
elif isinstance(body, (bytes, bytearray)):
|
|
||||||
self._body = body
|
|
||||||
self._body_payload = False
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
|
||||||
except payload.LookupError:
|
|
||||||
raise ValueError("Unsupported body type %r" % type(body))
|
|
||||||
|
|
||||||
self._body_payload = True
|
|
||||||
|
|
||||||
headers = self._headers
|
|
||||||
|
|
||||||
# set content-type
|
|
||||||
if hdrs.CONTENT_TYPE not in headers:
|
|
||||||
headers[hdrs.CONTENT_TYPE] = body.content_type
|
|
||||||
|
|
||||||
# copy payload headers
|
|
||||||
if body.headers:
|
|
||||||
for (key, value) in body.headers.items():
|
|
||||||
if key not in headers:
|
|
||||||
headers[key] = value
|
|
||||||
|
|
||||||
self._compressed_body = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def text(self) -> Optional[str]:
|
|
||||||
if self._body is None:
|
|
||||||
return None
|
|
||||||
return self._body.decode(self.charset or "utf-8")
|
|
||||||
|
|
||||||
@text.setter
|
|
||||||
def text(self, text: str) -> None:
|
|
||||||
assert text is None or isinstance(
|
|
||||||
text, str
|
|
||||||
), "text argument must be str (%r)" % type(text)
|
|
||||||
|
|
||||||
if self.content_type == "application/octet-stream":
|
|
||||||
self.content_type = "text/plain"
|
|
||||||
if self.charset is None:
|
|
||||||
self.charset = "utf-8"
|
|
||||||
|
|
||||||
self._body = text.encode(self.charset)
|
|
||||||
self._body_payload = False
|
|
||||||
self._compressed_body = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def content_length(self) -> Optional[int]:
|
|
||||||
if self._chunked:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if hdrs.CONTENT_LENGTH in self._headers:
|
|
||||||
return super().content_length
|
|
||||||
|
|
||||||
if self._compressed_body is not None:
|
|
||||||
# Return length of the compressed body
|
|
||||||
return len(self._compressed_body)
|
|
||||||
elif self._body_payload:
|
|
||||||
# A payload without content length, or a compressed payload
|
|
||||||
return None
|
|
||||||
elif self._body is not None:
|
|
||||||
return len(self._body)
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
@content_length.setter
|
|
||||||
def content_length(self, value: Optional[int]) -> None:
|
|
||||||
raise RuntimeError("Content length is set automatically")
|
|
||||||
|
|
||||||
async def write_eof(self, data: bytes = b"") -> None:
|
|
||||||
if self._eof_sent:
|
|
||||||
return
|
|
||||||
if self._compressed_body is None:
|
|
||||||
body: Optional[Union[bytes, Payload]] = self._body
|
|
||||||
else:
|
|
||||||
body = self._compressed_body
|
|
||||||
assert not data, f"data arg is not supported, got {data!r}"
|
|
||||||
assert self._req is not None
|
|
||||||
assert self._payload_writer is not None
|
|
||||||
if body is not None:
|
|
||||||
if self._must_be_empty_body:
|
|
||||||
await super().write_eof()
|
|
||||||
elif self._body_payload:
|
|
||||||
payload = cast(Payload, body)
|
|
||||||
await payload.write(self._payload_writer)
|
|
||||||
await super().write_eof()
|
|
||||||
else:
|
|
||||||
await super().write_eof(cast(bytes, body))
|
|
||||||
else:
|
|
||||||
await super().write_eof()
|
|
||||||
|
|
||||||
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
|
||||||
if should_remove_content_length(request.method, self.status):
|
|
||||||
if hdrs.CONTENT_LENGTH in self._headers:
|
|
||||||
del self._headers[hdrs.CONTENT_LENGTH]
|
|
||||||
elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
|
|
||||||
if self._body_payload:
|
|
||||||
size = cast(Payload, self._body).size
|
|
||||||
if size is not None:
|
|
||||||
self._headers[hdrs.CONTENT_LENGTH] = str(size)
|
|
||||||
else:
|
|
||||||
body_len = len(self._body) if self._body else "0"
|
|
||||||
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
|
|
||||||
if body_len != "0" or (
|
|
||||||
self.status != 304 and request.method.upper() != hdrs.METH_HEAD
|
|
||||||
):
|
|
||||||
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
|
|
||||||
|
|
||||||
return await super()._start(request)
|
|
||||||
|
|
||||||
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
|
||||||
if self._body_payload or self._chunked:
|
|
||||||
return await super()._do_start_compression(coding)
|
|
||||||
|
|
||||||
if coding != ContentCoding.identity:
|
|
||||||
# Instead of using _payload_writer.enable_compression,
|
|
||||||
# compress the whole body
|
|
||||||
compressor = ZLibCompressor(
|
|
||||||
encoding=str(coding.value),
|
|
||||||
max_sync_chunk_size=self._zlib_executor_size,
|
|
||||||
executor=self._zlib_executor,
|
|
||||||
)
|
|
||||||
assert self._body is not None
|
|
||||||
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
|
|
||||||
warnings.warn(
|
|
||||||
"Synchronous compression of large response bodies "
|
|
||||||
f"({len(self._body)} bytes) might block the async event loop. "
|
|
||||||
"Consider providing a custom value to zlib_executor_size/"
|
|
||||||
"zlib_executor response properties or disabling compression on it."
|
|
||||||
)
|
|
||||||
self._compressed_body = (
|
|
||||||
await compressor.compress(self._body) + compressor.flush()
|
|
||||||
)
|
|
||||||
assert self._compressed_body is not None
|
|
||||||
|
|
||||||
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
|
||||||
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
|
|
||||||
|
|
||||||
|
|
||||||
def json_response(
|
|
||||||
data: Any = sentinel,
|
|
||||||
*,
|
|
||||||
text: Optional[str] = None,
|
|
||||||
body: Optional[bytes] = None,
|
|
||||||
status: int = 200,
|
|
||||||
reason: Optional[str] = None,
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
content_type: str = "application/json",
|
|
||||||
dumps: JSONEncoder = json.dumps,
|
|
||||||
) -> Response:
|
|
||||||
if data is not sentinel:
|
|
||||||
if text or body:
|
|
||||||
raise ValueError("only one of data, text, or body should be specified")
|
|
||||||
else:
|
|
||||||
text = dumps(data)
|
|
||||||
return Response(
|
|
||||||
text=text,
|
|
||||||
body=body,
|
|
||||||
status=status,
|
|
||||||
reason=reason,
|
|
||||||
headers=headers,
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
@@ -1,216 +0,0 @@
|
|||||||
import abc
|
|
||||||
import os # noqa
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
|
|
||||||
import attr
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
from .abc import AbstractView
|
|
||||||
from .typedefs import Handler, PathLike
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_request import Request
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
from .web_urldispatcher import AbstractRoute, UrlDispatcher
|
|
||||||
else:
|
|
||||||
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"AbstractRouteDef",
|
|
||||||
"RouteDef",
|
|
||||||
"StaticDef",
|
|
||||||
"RouteTableDef",
|
|
||||||
"head",
|
|
||||||
"options",
|
|
||||||
"get",
|
|
||||||
"post",
|
|
||||||
"patch",
|
|
||||||
"put",
|
|
||||||
"delete",
|
|
||||||
"route",
|
|
||||||
"view",
|
|
||||||
"static",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractRouteDef(abc.ABC):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
|
|
||||||
_HandlerType = Union[Type[AbstractView], Handler]
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
|
||||||
class RouteDef(AbstractRouteDef):
|
|
||||||
method: str
|
|
||||||
path: str
|
|
||||||
handler: _HandlerType
|
|
||||||
kwargs: Dict[str, Any]
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
info = []
|
|
||||||
for name, value in sorted(self.kwargs.items()):
|
|
||||||
info.append(f", {name}={value!r}")
|
|
||||||
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
|
|
||||||
method=self.method, path=self.path, handler=self.handler, info="".join(info)
|
|
||||||
)
|
|
||||||
|
|
||||||
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
|
||||||
if self.method in hdrs.METH_ALL:
|
|
||||||
reg = getattr(router, "add_" + self.method.lower())
|
|
||||||
return [reg(self.path, self.handler, **self.kwargs)]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
router.add_route(self.method, self.path, self.handler, **self.kwargs)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
|
||||||
class StaticDef(AbstractRouteDef):
|
|
||||||
prefix: str
|
|
||||||
path: PathLike
|
|
||||||
kwargs: Dict[str, Any]
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
info = []
|
|
||||||
for name, value in sorted(self.kwargs.items()):
|
|
||||||
info.append(f", {name}={value!r}")
|
|
||||||
return "<StaticDef {prefix} -> {path}" "{info}>".format(
|
|
||||||
prefix=self.prefix, path=self.path, info="".join(info)
|
|
||||||
)
|
|
||||||
|
|
||||||
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
|
||||||
resource = router.add_static(self.prefix, self.path, **self.kwargs)
|
|
||||||
routes = resource.get_info().get("routes", {})
|
|
||||||
return list(routes.values())
|
|
||||||
|
|
||||||
|
|
||||||
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return RouteDef(method, path, handler, kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_HEAD, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get(
|
|
||||||
path: str,
|
|
||||||
handler: _HandlerType,
|
|
||||||
*,
|
|
||||||
name: Optional[str] = None,
|
|
||||||
allow_head: bool = True,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> RouteDef:
|
|
||||||
return route(
|
|
||||||
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_POST, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_PUT, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_PATCH, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
|
||||||
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
|
|
||||||
return StaticDef(prefix, path, kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
_Deco = Callable[[_HandlerType], _HandlerType]
|
|
||||||
|
|
||||||
|
|
||||||
class RouteTableDef(Sequence[AbstractRouteDef]):
|
|
||||||
"""Route definition table"""
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._items: List[AbstractRouteDef] = []
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<RouteTableDef count={len(self._items)}>"
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, index: int) -> AbstractRouteDef:
|
|
||||||
...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
|
|
||||||
...
|
|
||||||
|
|
||||||
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
|
||||||
return self._items[index]
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
|
||||||
return iter(self._items)
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self._items)
|
|
||||||
|
|
||||||
def __contains__(self, item: object) -> bool:
|
|
||||||
return item in self._items
|
|
||||||
|
|
||||||
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
def inner(handler: _HandlerType) -> _HandlerType:
|
|
||||||
self._items.append(RouteDef(method, path, handler, kwargs))
|
|
||||||
return handler
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
def head(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_HEAD, path, **kwargs)
|
|
||||||
|
|
||||||
def get(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_GET, path, **kwargs)
|
|
||||||
|
|
||||||
def post(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_POST, path, **kwargs)
|
|
||||||
|
|
||||||
def put(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_PUT, path, **kwargs)
|
|
||||||
|
|
||||||
def patch(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_PATCH, path, **kwargs)
|
|
||||||
|
|
||||||
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
|
||||||
|
|
||||||
def options(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
|
|
||||||
|
|
||||||
def view(self, path: str, **kwargs: Any) -> _Deco:
|
|
||||||
return self.route(hdrs.METH_ANY, path, **kwargs)
|
|
||||||
|
|
||||||
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
|
|
||||||
self._items.append(StaticDef(prefix, path, kwargs))
|
|
||||||
@@ -1,406 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import signal
|
|
||||||
import socket
|
|
||||||
import warnings
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Any, Awaitable, Callable, List, Optional, Set
|
|
||||||
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .typedefs import PathLike
|
|
||||||
from .web_app import Application
|
|
||||||
from .web_server import Server
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ssl import SSLContext
|
|
||||||
except ImportError:
|
|
||||||
SSLContext = object # type: ignore[misc,assignment]
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"BaseSite",
|
|
||||||
"TCPSite",
|
|
||||||
"UnixSite",
|
|
||||||
"NamedPipeSite",
|
|
||||||
"SockSite",
|
|
||||||
"BaseRunner",
|
|
||||||
"AppRunner",
|
|
||||||
"ServerRunner",
|
|
||||||
"GracefulExit",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GracefulExit(SystemExit):
|
|
||||||
code = 1
|
|
||||||
|
|
||||||
|
|
||||||
def _raise_graceful_exit() -> None:
|
|
||||||
raise GracefulExit()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseSite(ABC):
|
|
||||||
__slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
runner: "BaseRunner",
|
|
||||||
*,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
backlog: int = 128,
|
|
||||||
) -> None:
|
|
||||||
if runner.server is None:
|
|
||||||
raise RuntimeError("Call runner.setup() before making a site")
|
|
||||||
if shutdown_timeout != 60.0:
|
|
||||||
msg = "shutdown_timeout should be set on BaseRunner"
|
|
||||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
|
||||||
runner._shutdown_timeout = shutdown_timeout
|
|
||||||
self._runner = runner
|
|
||||||
self._ssl_context = ssl_context
|
|
||||||
self._backlog = backlog
|
|
||||||
self._server: Optional[asyncio.AbstractServer] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def name(self) -> str:
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def start(self) -> None:
|
|
||||||
self._runner._reg_site(self)
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
self._runner._check_site(self)
|
|
||||||
if self._server is not None: # Maybe not started yet
|
|
||||||
self._server.close()
|
|
||||||
|
|
||||||
self._runner._unreg_site(self)
|
|
||||||
|
|
||||||
|
|
||||||
class TCPSite(BaseSite):
|
|
||||||
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
runner: "BaseRunner",
|
|
||||||
host: Optional[str] = None,
|
|
||||||
port: Optional[int] = None,
|
|
||||||
*,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
backlog: int = 128,
|
|
||||||
reuse_address: Optional[bool] = None,
|
|
||||||
reuse_port: Optional[bool] = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
runner,
|
|
||||||
shutdown_timeout=shutdown_timeout,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
self._host = host
|
|
||||||
if port is None:
|
|
||||||
port = 8443 if self._ssl_context else 8080
|
|
||||||
self._port = port
|
|
||||||
self._reuse_address = reuse_address
|
|
||||||
self._reuse_port = reuse_port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
scheme = "https" if self._ssl_context else "http"
|
|
||||||
host = "0.0.0.0" if self._host is None else self._host
|
|
||||||
return str(URL.build(scheme=scheme, host=host, port=self._port))
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
await super().start()
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
server = self._runner.server
|
|
||||||
assert server is not None
|
|
||||||
self._server = await loop.create_server(
|
|
||||||
server,
|
|
||||||
self._host,
|
|
||||||
self._port,
|
|
||||||
ssl=self._ssl_context,
|
|
||||||
backlog=self._backlog,
|
|
||||||
reuse_address=self._reuse_address,
|
|
||||||
reuse_port=self._reuse_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UnixSite(BaseSite):
|
|
||||||
__slots__ = ("_path",)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
runner: "BaseRunner",
|
|
||||||
path: PathLike,
|
|
||||||
*,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
backlog: int = 128,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
runner,
|
|
||||||
shutdown_timeout=shutdown_timeout,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
self._path = path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
scheme = "https" if self._ssl_context else "http"
|
|
||||||
return f"{scheme}://unix:{self._path}:"
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
await super().start()
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
server = self._runner.server
|
|
||||||
assert server is not None
|
|
||||||
self._server = await loop.create_unix_server(
|
|
||||||
server,
|
|
||||||
self._path,
|
|
||||||
ssl=self._ssl_context,
|
|
||||||
backlog=self._backlog,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NamedPipeSite(BaseSite):
|
|
||||||
__slots__ = ("_path",)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
|
|
||||||
) -> None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
if not isinstance(
|
|
||||||
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
|
||||||
):
|
|
||||||
raise RuntimeError(
|
|
||||||
"Named Pipes only available in proactor" "loop under windows"
|
|
||||||
)
|
|
||||||
super().__init__(runner, shutdown_timeout=shutdown_timeout)
|
|
||||||
self._path = path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._path
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
await super().start()
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
server = self._runner.server
|
|
||||||
assert server is not None
|
|
||||||
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
|
|
||||||
server, self._path
|
|
||||||
)
|
|
||||||
self._server = _server[0]
|
|
||||||
|
|
||||||
|
|
||||||
class SockSite(BaseSite):
|
|
||||||
__slots__ = ("_sock", "_name")
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
runner: "BaseRunner",
|
|
||||||
sock: socket.socket,
|
|
||||||
*,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
ssl_context: Optional[SSLContext] = None,
|
|
||||||
backlog: int = 128,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(
|
|
||||||
runner,
|
|
||||||
shutdown_timeout=shutdown_timeout,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
backlog=backlog,
|
|
||||||
)
|
|
||||||
self._sock = sock
|
|
||||||
scheme = "https" if self._ssl_context else "http"
|
|
||||||
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
|
|
||||||
name = f"{scheme}://unix:{sock.getsockname()}:"
|
|
||||||
else:
|
|
||||||
host, port = sock.getsockname()[:2]
|
|
||||||
name = str(URL.build(scheme=scheme, host=host, port=port))
|
|
||||||
self._name = name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
await super().start()
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
server = self._runner.server
|
|
||||||
assert server is not None
|
|
||||||
self._server = await loop.create_server(
|
|
||||||
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseRunner(ABC):
|
|
||||||
__slots__ = (
|
|
||||||
"shutdown_callback",
|
|
||||||
"_handle_signals",
|
|
||||||
"_kwargs",
|
|
||||||
"_server",
|
|
||||||
"_sites",
|
|
||||||
"_shutdown_timeout",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
handle_signals: bool = False,
|
|
||||||
shutdown_timeout: float = 60.0,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> None:
|
|
||||||
self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None
|
|
||||||
self._handle_signals = handle_signals
|
|
||||||
self._kwargs = kwargs
|
|
||||||
self._server: Optional[Server] = None
|
|
||||||
self._sites: List[BaseSite] = []
|
|
||||||
self._shutdown_timeout = shutdown_timeout
|
|
||||||
|
|
||||||
@property
|
|
||||||
def server(self) -> Optional[Server]:
|
|
||||||
return self._server
|
|
||||||
|
|
||||||
@property
|
|
||||||
def addresses(self) -> List[Any]:
|
|
||||||
ret: List[Any] = []
|
|
||||||
for site in self._sites:
|
|
||||||
server = site._server
|
|
||||||
if server is not None:
|
|
||||||
sockets = server.sockets # type: ignore[attr-defined]
|
|
||||||
if sockets is not None:
|
|
||||||
for sock in sockets:
|
|
||||||
ret.append(sock.getsockname())
|
|
||||||
return ret
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sites(self) -> Set[BaseSite]:
|
|
||||||
return set(self._sites)
|
|
||||||
|
|
||||||
async def setup(self) -> None:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
if self._handle_signals:
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
|
|
||||||
loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
|
|
||||||
except NotImplementedError: # pragma: no cover
|
|
||||||
# add_signal_handler is not implemented on Windows
|
|
||||||
pass
|
|
||||||
|
|
||||||
self._server = await self._make_server()
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def shutdown(self) -> None:
|
|
||||||
"""Call any shutdown hooks to help server close gracefully."""
|
|
||||||
|
|
||||||
async def cleanup(self) -> None:
|
|
||||||
# The loop over sites is intentional, an exception on gather()
|
|
||||||
# leaves self._sites in unpredictable state.
|
|
||||||
# The loop guaranties that a site is either deleted on success or
|
|
||||||
# still present on failure
|
|
||||||
for site in list(self._sites):
|
|
||||||
await site.stop()
|
|
||||||
|
|
||||||
if self._server: # If setup succeeded
|
|
||||||
self._server.pre_shutdown()
|
|
||||||
await self.shutdown()
|
|
||||||
|
|
||||||
if self.shutdown_callback:
|
|
||||||
await self.shutdown_callback()
|
|
||||||
|
|
||||||
await self._server.shutdown(self._shutdown_timeout)
|
|
||||||
await self._cleanup_server()
|
|
||||||
|
|
||||||
self._server = None
|
|
||||||
if self._handle_signals:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
try:
|
|
||||||
loop.remove_signal_handler(signal.SIGINT)
|
|
||||||
loop.remove_signal_handler(signal.SIGTERM)
|
|
||||||
except NotImplementedError: # pragma: no cover
|
|
||||||
# remove_signal_handler is not implemented on Windows
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def _make_server(self) -> Server:
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def _cleanup_server(self) -> None:
|
|
||||||
pass # pragma: no cover
|
|
||||||
|
|
||||||
def _reg_site(self, site: BaseSite) -> None:
|
|
||||||
if site in self._sites:
|
|
||||||
raise RuntimeError(f"Site {site} is already registered in runner {self}")
|
|
||||||
self._sites.append(site)
|
|
||||||
|
|
||||||
def _check_site(self, site: BaseSite) -> None:
|
|
||||||
if site not in self._sites:
|
|
||||||
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
|
||||||
|
|
||||||
def _unreg_site(self, site: BaseSite) -> None:
|
|
||||||
if site not in self._sites:
|
|
||||||
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
|
||||||
self._sites.remove(site)
|
|
||||||
|
|
||||||
|
|
||||||
class ServerRunner(BaseRunner):
|
|
||||||
"""Low-level web server runner"""
|
|
||||||
|
|
||||||
__slots__ = ("_web_server",)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
super().__init__(handle_signals=handle_signals, **kwargs)
|
|
||||||
self._web_server = web_server
|
|
||||||
|
|
||||||
async def shutdown(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def _make_server(self) -> Server:
|
|
||||||
return self._web_server
|
|
||||||
|
|
||||||
async def _cleanup_server(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AppRunner(BaseRunner):
|
|
||||||
"""Web Application runner"""
|
|
||||||
|
|
||||||
__slots__ = ("_app",)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
|
|
||||||
) -> None:
|
|
||||||
super().__init__(handle_signals=handle_signals, **kwargs)
|
|
||||||
if not isinstance(app, Application):
|
|
||||||
raise TypeError(
|
|
||||||
"The first argument should be web.Application "
|
|
||||||
"instance, got {!r}".format(app)
|
|
||||||
)
|
|
||||||
self._app = app
|
|
||||||
|
|
||||||
@property
|
|
||||||
def app(self) -> Application:
|
|
||||||
return self._app
|
|
||||||
|
|
||||||
async def shutdown(self) -> None:
|
|
||||||
await self._app.shutdown()
|
|
||||||
|
|
||||||
async def _make_server(self) -> Server:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
self._app._set_loop(loop)
|
|
||||||
self._app.on_startup.freeze()
|
|
||||||
await self._app.startup()
|
|
||||||
self._app.freeze()
|
|
||||||
|
|
||||||
return self._app._make_handler(loop=loop, **self._kwargs)
|
|
||||||
|
|
||||||
async def _cleanup_server(self) -> None:
|
|
||||||
await self._app.cleanup()
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user