removed local env
This commit is contained in:
parent
ff885642c5
commit
a099a93431
1548 changed files with 0 additions and 367509 deletions
Binary file not shown.
Binary file not shown.
|
|
@ -1 +0,0 @@
|
||||||
pip
|
|
||||||
|
|
@ -1,201 +0,0 @@
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright 2013-2020 aiohttp maintainers
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
@ -1,978 +0,0 @@
|
||||||
Metadata-Version: 2.1
|
|
||||||
Name: aiohttp
|
|
||||||
Version: 3.7.4.post0
|
|
||||||
Summary: Async http client/server framework (asyncio)
|
|
||||||
Home-page: https://github.com/aio-libs/aiohttp
|
|
||||||
Author: Nikolay Kim
|
|
||||||
Author-email: fafhrd91@gmail.com
|
|
||||||
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
|
|
||||||
Maintainer-email: aio-libs@googlegroups.com
|
|
||||||
License: Apache 2
|
|
||||||
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
|
||||||
Project-URL: CI: Azure Pipelines, https://dev.azure.com/aio-libs/aiohttp/_build
|
|
||||||
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
|
||||||
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
|
||||||
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
|
||||||
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: License :: OSI Approved :: Apache Software License
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Operating System :: MacOS :: MacOS X
|
|
||||||
Classifier: Operating System :: Microsoft :: Windows
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP
|
|
||||||
Classifier: Framework :: AsyncIO
|
|
||||||
Requires-Python: >=3.6
|
|
||||||
Requires-Dist: attrs (>=17.3.0)
|
|
||||||
Requires-Dist: chardet (<5.0,>=2.0)
|
|
||||||
Requires-Dist: multidict (<7.0,>=4.5)
|
|
||||||
Requires-Dist: async-timeout (<4.0,>=3.0)
|
|
||||||
Requires-Dist: yarl (<2.0,>=1.0)
|
|
||||||
Requires-Dist: typing-extensions (>=3.6.5)
|
|
||||||
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
|
|
||||||
Provides-Extra: speedups
|
|
||||||
Requires-Dist: aiodns ; extra == 'speedups'
|
|
||||||
Requires-Dist: brotlipy ; extra == 'speedups'
|
|
||||||
Requires-Dist: cchardet ; extra == 'speedups'
|
|
||||||
|
|
||||||
==================================
|
|
||||||
Async http client/server framework
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
|
|
||||||
:height: 64px
|
|
||||||
:width: 64px
|
|
||||||
:alt: aiohttp logo
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
|
|
||||||
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
|
||||||
:alt: GitHub Actions status for master branch
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/aio-libs/aiohttp
|
|
||||||
:alt: codecov.io status for master branch
|
|
||||||
|
|
||||||
.. image:: https://badge.fury.io/py/aiohttp.svg
|
|
||||||
:target: https://pypi.org/project/aiohttp
|
|
||||||
:alt: Latest PyPI package version
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
|
||||||
:target: https://docs.aiohttp.org/
|
|
||||||
:alt: Latest Read The Docs
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
|
|
||||||
:target: https://aio-libs.discourse.group
|
|
||||||
:alt: Discourse status
|
|
||||||
|
|
||||||
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
|
||||||
:target: https://gitter.im/aio-libs/Lobby
|
|
||||||
:alt: Chat on Gitter
|
|
||||||
|
|
||||||
|
|
||||||
Key Features
|
|
||||||
============
|
|
||||||
|
|
||||||
- Supports both client and server side of HTTP protocol.
|
|
||||||
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
|
||||||
Callback Hell.
|
|
||||||
- Provides Web-server with middlewares and plugable routing.
|
|
||||||
|
|
||||||
|
|
||||||
Getting started
|
|
||||||
===============
|
|
||||||
|
|
||||||
Client
|
|
||||||
------
|
|
||||||
|
|
||||||
To get something from the web:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get('http://python.org') as response:
|
|
||||||
|
|
||||||
print("Status:", response.status)
|
|
||||||
print("Content-type:", response.headers['content-type'])
|
|
||||||
|
|
||||||
html = await response.text()
|
|
||||||
print("Body:", html[:15], "...")
|
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
loop.run_until_complete(main())
|
|
||||||
|
|
||||||
This prints:
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
Status: 200
|
|
||||||
Content-type: text/html; charset=utf-8
|
|
||||||
Body: <!doctype html> ...
|
|
||||||
|
|
||||||
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
|
|
||||||
|
|
||||||
Server
|
|
||||||
------
|
|
||||||
|
|
||||||
An example using a simple server:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# examples/server_simple.py
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
async def handle(request):
|
|
||||||
name = request.match_info.get('name', "Anonymous")
|
|
||||||
text = "Hello, " + name
|
|
||||||
return web.Response(text=text)
|
|
||||||
|
|
||||||
async def wshandle(request):
|
|
||||||
ws = web.WebSocketResponse()
|
|
||||||
await ws.prepare(request)
|
|
||||||
|
|
||||||
async for msg in ws:
|
|
||||||
if msg.type == web.WSMsgType.text:
|
|
||||||
await ws.send_str("Hello, {}".format(msg.data))
|
|
||||||
elif msg.type == web.WSMsgType.binary:
|
|
||||||
await ws.send_bytes(msg.data)
|
|
||||||
elif msg.type == web.WSMsgType.close:
|
|
||||||
break
|
|
||||||
|
|
||||||
return ws
|
|
||||||
|
|
||||||
|
|
||||||
app = web.Application()
|
|
||||||
app.add_routes([web.get('/', handle),
|
|
||||||
web.get('/echo', wshandle),
|
|
||||||
web.get('/{name}', handle)])
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
web.run_app(app)
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
=============
|
|
||||||
|
|
||||||
https://aiohttp.readthedocs.io/
|
|
||||||
|
|
||||||
|
|
||||||
Demos
|
|
||||||
=====
|
|
||||||
|
|
||||||
https://github.com/aio-libs/aiohttp-demos
|
|
||||||
|
|
||||||
|
|
||||||
External links
|
|
||||||
==============
|
|
||||||
|
|
||||||
* `Third party libraries
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
|
||||||
* `Built with aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
|
||||||
* `Powered by aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
|
||||||
|
|
||||||
Feel free to make a Pull Request for adding your link to these pages!
|
|
||||||
|
|
||||||
|
|
||||||
Communication channels
|
|
||||||
======================
|
|
||||||
|
|
||||||
*aio-libs discourse group*: https://aio-libs.discourse.group
|
|
||||||
|
|
||||||
*gitter chat* https://gitter.im/aio-libs/Lobby
|
|
||||||
|
|
||||||
We support `Stack Overflow
|
|
||||||
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
|
||||||
Please add *aiohttp* tag to your question there.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
============
|
|
||||||
|
|
||||||
- Python >= 3.6
|
|
||||||
- async-timeout_
|
|
||||||
- attrs_
|
|
||||||
- chardet_
|
|
||||||
- multidict_
|
|
||||||
- yarl_
|
|
||||||
|
|
||||||
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
|
||||||
recommended for sake of speed).
|
|
||||||
|
|
||||||
.. _chardet: https://pypi.python.org/pypi/chardet
|
|
||||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
|
||||||
.. _attrs: https://github.com/python-attrs/attrs
|
|
||||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
|
||||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
|
||||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
|
||||||
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
|
||||||
|
|
||||||
License
|
|
||||||
=======
|
|
||||||
|
|
||||||
``aiohttp`` is offered under the Apache 2 license.
|
|
||||||
|
|
||||||
|
|
||||||
Keepsafe
|
|
||||||
========
|
|
||||||
|
|
||||||
The aiohttp community would like to thank Keepsafe
|
|
||||||
(https://www.getkeepsafe.com) for its support in the early days of
|
|
||||||
the project.
|
|
||||||
|
|
||||||
|
|
||||||
Source code
|
|
||||||
===========
|
|
||||||
|
|
||||||
The latest developer version is available in a GitHub repository:
|
|
||||||
https://github.com/aio-libs/aiohttp
|
|
||||||
|
|
||||||
Benchmarks
|
|
||||||
==========
|
|
||||||
|
|
||||||
If you are interested in efficiency, the AsyncIO community maintains a
|
|
||||||
list of benchmarks on the official wiki:
|
|
||||||
https://github.com/python/asyncio/wiki/Benchmarks
|
|
||||||
|
|
||||||
=========
|
|
||||||
Changelog
|
|
||||||
=========
|
|
||||||
|
|
||||||
..
|
|
||||||
You should *NOT* be adding new change log entries to this file, this
|
|
||||||
file is managed by towncrier. You *may* edit previous change logs to
|
|
||||||
fix problems like typo corrections or such.
|
|
||||||
To add a new change log entry, please see
|
|
||||||
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
|
|
||||||
we named the news folder "changes".
|
|
||||||
|
|
||||||
WARNING: Don't drop the next directive!
|
|
||||||
|
|
||||||
.. towncrier release notes start
|
|
||||||
|
|
||||||
3.7.4.post0 (2021-03-06)
|
|
||||||
========================
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- Bumped upper bound of the ``chardet`` runtime dependency
|
|
||||||
to allow their v4.0 version stream.
|
|
||||||
`#5366 <https://github.com/aio-libs/aiohttp/issues/5366>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.7.4 (2021-02-25)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- **(SECURITY BUG)** Started preventing open redirects in the
|
|
||||||
``aiohttp.web.normalize_path_middleware`` middleware. For
|
|
||||||
more details, see
|
|
||||||
https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg.
|
|
||||||
|
|
||||||
Thanks to `Beast Glatisant <https://github.com/g147>`__ for
|
|
||||||
finding the first instance of this issue and `Jelmer Vernooij
|
|
||||||
<https://jelmer.uk/>`__ for reporting and tracking it down
|
|
||||||
in aiohttp.
|
|
||||||
`#5497 <https://github.com/aio-libs/aiohttp/issues/5497>`_
|
|
||||||
- Fix interpretation difference of the pure-Python and the Cython-based
|
|
||||||
HTTP parsers construct a ``yarl.URL`` object for HTTP request-target.
|
|
||||||
|
|
||||||
Before this fix, the Python parser would turn the URI's absolute-path
|
|
||||||
for ``//some-path`` into ``/`` while the Cython code preserved it as
|
|
||||||
``//some-path``. Now, both do the latter.
|
|
||||||
`#5498 <https://github.com/aio-libs/aiohttp/issues/5498>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.7.3 (2020-11-18)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Use Brotli instead of brotlipy
|
|
||||||
`#3803 <https://github.com/aio-libs/aiohttp/issues/3803>`_
|
|
||||||
- Made exceptions pickleable. Also changed the repr of some exceptions.
|
|
||||||
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Raise a ClientResponseError instead of an AssertionError for a blank
|
|
||||||
HTTP Reason Phrase.
|
|
||||||
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
|
|
||||||
- Fix ``web_middlewares.normalize_path_middleware`` behavior for patch without slash.
|
|
||||||
`#3669 <https://github.com/aio-libs/aiohttp/issues/3669>`_
|
|
||||||
- Fix overshadowing of overlapped sub-applications prefixes.
|
|
||||||
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
|
|
||||||
- Make `BaseConnector.close()` a coroutine and wait until the client closes all connections. Drop deprecated "with Connector():" syntax.
|
|
||||||
`#3736 <https://github.com/aio-libs/aiohttp/issues/3736>`_
|
|
||||||
- Reset the ``sock_read`` timeout each time data is received for a ``aiohttp.client`` response.
|
|
||||||
`#3808 <https://github.com/aio-libs/aiohttp/issues/3808>`_
|
|
||||||
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of View
|
|
||||||
`#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
|
|
||||||
- Fixed querying the address families from DNS that the current host supports.
|
|
||||||
`#5156 <https://github.com/aio-libs/aiohttp/issues/5156>`_
|
|
||||||
- Change return type of MultipartReader.__aiter__() and BodyPartReader.__aiter__() to AsyncIterator.
|
|
||||||
`#5163 <https://github.com/aio-libs/aiohttp/issues/5163>`_
|
|
||||||
- Provide x86 Windows wheels.
|
|
||||||
`#5230 <https://github.com/aio-libs/aiohttp/issues/5230>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Add documentation for ``aiohttp.web.FileResponse``.
|
|
||||||
`#3958 <https://github.com/aio-libs/aiohttp/issues/3958>`_
|
|
||||||
- Removed deprecation warning in tracing example docs
|
|
||||||
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
|
|
||||||
- Fixed wrong "Usage" docstring of ``aiohttp.client.request``.
|
|
||||||
`#4603 <https://github.com/aio-libs/aiohttp/issues/4603>`_
|
|
||||||
- Add aiohttp-pydantic to third party libraries
|
|
||||||
`#5228 <https://github.com/aio-libs/aiohttp/issues/5228>`_
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.7.2 (2020-10-27)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fixed static files handling for loops without ``.sendfile()`` support
|
|
||||||
`#5149 <https://github.com/aio-libs/aiohttp/issues/5149>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.7.1 (2020-10-25)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fixed a type error caused by the conditional import of `Protocol`.
|
|
||||||
`#5111 <https://github.com/aio-libs/aiohttp/issues/5111>`_
|
|
||||||
- Server doesn't send Content-Length for 1xx or 204
|
|
||||||
`#4901 <https://github.com/aio-libs/aiohttp/issues/4901>`_
|
|
||||||
- Fix run_app typing
|
|
||||||
`#4957 <https://github.com/aio-libs/aiohttp/issues/4957>`_
|
|
||||||
- Always require ``typing_extensions`` library.
|
|
||||||
`#5107 <https://github.com/aio-libs/aiohttp/issues/5107>`_
|
|
||||||
- Fix a variable-shadowing bug causing `ThreadedResolver.resolve` to
|
|
||||||
return the resolved IP as the ``hostname`` in each record, which prevented
|
|
||||||
validation of HTTPS connections.
|
|
||||||
`#5110 <https://github.com/aio-libs/aiohttp/issues/5110>`_
|
|
||||||
- Added annotations to all public attributes.
|
|
||||||
`#5115 <https://github.com/aio-libs/aiohttp/issues/5115>`_
|
|
||||||
- Fix flaky test_when_timeout_smaller_second
|
|
||||||
`#5116 <https://github.com/aio-libs/aiohttp/issues/5116>`_
|
|
||||||
- Ensure sending a zero byte file does not throw an exception
|
|
||||||
`#5124 <https://github.com/aio-libs/aiohttp/issues/5124>`_
|
|
||||||
- Fix a bug in ``web.run_app()`` about Python version checking on Windows
|
|
||||||
`#5127 <https://github.com/aio-libs/aiohttp/issues/5127>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.7.0 (2020-10-24)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Response headers are now prepared prior to running ``on_response_prepare`` hooks, directly before headers are sent to the client.
|
|
||||||
`#1958 <https://github.com/aio-libs/aiohttp/issues/1958>`_
|
|
||||||
- Add a ``quote_cookie`` option to ``CookieJar``, a way to skip quotation wrapping of cookies containing special characters.
|
|
||||||
`#2571 <https://github.com/aio-libs/aiohttp/issues/2571>`_
|
|
||||||
- Call ``AccessLogger.log`` with the current exception available from ``sys.exc_info()``.
|
|
||||||
`#3557 <https://github.com/aio-libs/aiohttp/issues/3557>`_
|
|
||||||
- `web.UrlDispatcher.add_routes` and `web.Application.add_routes` return a list
|
|
||||||
of registered `AbstractRoute` instances. `AbstractRouteDef.register` (and all
|
|
||||||
subclasses) return a list of registered resources registered resource.
|
|
||||||
`#3866 <https://github.com/aio-libs/aiohttp/issues/3866>`_
|
|
||||||
- Added properties of default ClientSession params to ClientSession class so it is available for introspection
|
|
||||||
`#3882 <https://github.com/aio-libs/aiohttp/issues/3882>`_
|
|
||||||
- Don't cancel web handler on peer disconnection, raise `OSError` on reading/writing instead.
|
|
||||||
`#4080 <https://github.com/aio-libs/aiohttp/issues/4080>`_
|
|
||||||
- Implement BaseRequest.get_extra_info() to access a protocol transports' extra info.
|
|
||||||
`#4189 <https://github.com/aio-libs/aiohttp/issues/4189>`_
|
|
||||||
- Added `ClientSession.timeout` property.
|
|
||||||
`#4191 <https://github.com/aio-libs/aiohttp/issues/4191>`_
|
|
||||||
- allow use of SameSite in cookies.
|
|
||||||
`#4224 <https://github.com/aio-libs/aiohttp/issues/4224>`_
|
|
||||||
- Use ``loop.sendfile()`` instead of custom implementation if available.
|
|
||||||
`#4269 <https://github.com/aio-libs/aiohttp/issues/4269>`_
|
|
||||||
- Apply SO_REUSEADDR to test server's socket.
|
|
||||||
`#4393 <https://github.com/aio-libs/aiohttp/issues/4393>`_
|
|
||||||
- Use .raw_host instead of slower .host in client API
|
|
||||||
`#4402 <https://github.com/aio-libs/aiohttp/issues/4402>`_
|
|
||||||
- Allow configuring the buffer size of input stream by passing ``read_bufsize`` argument.
|
|
||||||
`#4453 <https://github.com/aio-libs/aiohttp/issues/4453>`_
|
|
||||||
- Pass tests on Python 3.8 for Windows.
|
|
||||||
`#4513 <https://github.com/aio-libs/aiohttp/issues/4513>`_
|
|
||||||
- Add `method` and `url` attributes to `TraceRequestChunkSentParams` and `TraceResponseChunkReceivedParams`.
|
|
||||||
`#4674 <https://github.com/aio-libs/aiohttp/issues/4674>`_
|
|
||||||
- Add ClientResponse.ok property for checking status code under 400.
|
|
||||||
`#4711 <https://github.com/aio-libs/aiohttp/issues/4711>`_
|
|
||||||
- Don't ceil timeouts that are smaller than 5 seconds.
|
|
||||||
`#4850 <https://github.com/aio-libs/aiohttp/issues/4850>`_
|
|
||||||
- TCPSite now listens by default on all interfaces instead of just IPv4 when `None` is passed in as the host.
|
|
||||||
`#4894 <https://github.com/aio-libs/aiohttp/issues/4894>`_
|
|
||||||
- Bump ``http_parser`` to 2.9.4
|
|
||||||
`#5070 <https://github.com/aio-libs/aiohttp/issues/5070>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fix keepalive connections not being closed in time
|
|
||||||
`#3296 <https://github.com/aio-libs/aiohttp/issues/3296>`_
|
|
||||||
- Fix failed websocket handshake leaving connection hanging.
|
|
||||||
`#3380 <https://github.com/aio-libs/aiohttp/issues/3380>`_
|
|
||||||
- Fix tasks cancellation order on exit. The run_app task needs to be cancelled first for cleanup hooks to run with all tasks intact.
|
|
||||||
`#3805 <https://github.com/aio-libs/aiohttp/issues/3805>`_
|
|
||||||
- Don't start heartbeat until _writer is set
|
|
||||||
`#4062 <https://github.com/aio-libs/aiohttp/issues/4062>`_
|
|
||||||
- Fix handling of multipart file uploads without a content type.
|
|
||||||
`#4089 <https://github.com/aio-libs/aiohttp/issues/4089>`_
|
|
||||||
- Preserve view handler function attributes across middlewares
|
|
||||||
`#4174 <https://github.com/aio-libs/aiohttp/issues/4174>`_
|
|
||||||
- Fix the string representation of ``ServerDisconnectedError``.
|
|
||||||
`#4175 <https://github.com/aio-libs/aiohttp/issues/4175>`_
|
|
||||||
- Raising RuntimeError when trying to get encoding from not read body
|
|
||||||
`#4214 <https://github.com/aio-libs/aiohttp/issues/4214>`_
|
|
||||||
- Remove warning messages from noop.
|
|
||||||
`#4282 <https://github.com/aio-libs/aiohttp/issues/4282>`_
|
|
||||||
- Raise ClientPayloadError if FormData re-processed.
|
|
||||||
`#4345 <https://github.com/aio-libs/aiohttp/issues/4345>`_
|
|
||||||
- Fix a warning about unfinished task in ``web_protocol.py``
|
|
||||||
`#4408 <https://github.com/aio-libs/aiohttp/issues/4408>`_
|
|
||||||
- Fixed 'deflate' compression. According to RFC 2616 now.
|
|
||||||
`#4506 <https://github.com/aio-libs/aiohttp/issues/4506>`_
|
|
||||||
- Fixed OverflowError on platforms with 32-bit time_t
|
|
||||||
`#4515 <https://github.com/aio-libs/aiohttp/issues/4515>`_
|
|
||||||
- Fixed request.body_exists returns wrong value for methods without body.
|
|
||||||
`#4528 <https://github.com/aio-libs/aiohttp/issues/4528>`_
|
|
||||||
- Fix connecting to link-local IPv6 addresses.
|
|
||||||
`#4554 <https://github.com/aio-libs/aiohttp/issues/4554>`_
|
|
||||||
- Fix a problem with connection waiters that are never awaited.
|
|
||||||
`#4562 <https://github.com/aio-libs/aiohttp/issues/4562>`_
|
|
||||||
- Always make sure transport is not closing before reuse a connection.
|
|
||||||
|
|
||||||
Reuse a protocol based on keepalive in headers is unreliable.
|
|
||||||
For example, uWSGI will not support keepalive even it serves a
|
|
||||||
HTTP 1.1 request, except explicitly configure uWSGI with a
|
|
||||||
``--http-keepalive`` option.
|
|
||||||
|
|
||||||
Servers designed like uWSGI could cause aiohttp intermittently
|
|
||||||
raise a ConnectionResetException when the protocol poll runs
|
|
||||||
out and some protocol is reused.
|
|
||||||
`#4587 <https://github.com/aio-libs/aiohttp/issues/4587>`_
|
|
||||||
- Handle the last CRLF correctly even if it is received via separate TCP segment.
|
|
||||||
`#4630 <https://github.com/aio-libs/aiohttp/issues/4630>`_
|
|
||||||
- Fix the register_resource function to validate route name before splitting it so that route name can include python keywords.
|
|
||||||
`#4691 <https://github.com/aio-libs/aiohttp/issues/4691>`_
|
|
||||||
- Improve typing annotations for ``web.Request``, ``aiohttp.ClientResponse`` and
|
|
||||||
``multipart`` module.
|
|
||||||
`#4736 <https://github.com/aio-libs/aiohttp/issues/4736>`_
|
|
||||||
- Fix resolver task is not awaited when connector is cancelled
|
|
||||||
`#4795 <https://github.com/aio-libs/aiohttp/issues/4795>`_
|
|
||||||
- Fix a bug "Aiohttp doesn't return any error on invalid request methods"
|
|
||||||
`#4798 <https://github.com/aio-libs/aiohttp/issues/4798>`_
|
|
||||||
- Fix HEAD requests for static content.
|
|
||||||
`#4809 <https://github.com/aio-libs/aiohttp/issues/4809>`_
|
|
||||||
- Fix incorrect size calculation for memoryview
|
|
||||||
`#4890 <https://github.com/aio-libs/aiohttp/issues/4890>`_
|
|
||||||
- Add HTTPMove to _all__.
|
|
||||||
`#4897 <https://github.com/aio-libs/aiohttp/issues/4897>`_
|
|
||||||
- Fixed the type annotations in the ``tracing`` module.
|
|
||||||
`#4912 <https://github.com/aio-libs/aiohttp/issues/4912>`_
|
|
||||||
- Fix typing for multipart ``__aiter__``.
|
|
||||||
`#4931 <https://github.com/aio-libs/aiohttp/issues/4931>`_
|
|
||||||
- Fix for race condition on connections in BaseConnector that leads to exceeding the connection limit.
|
|
||||||
`#4936 <https://github.com/aio-libs/aiohttp/issues/4936>`_
|
|
||||||
- Add forced UTF-8 encoding for ``application/rdap+json`` responses.
|
|
||||||
`#4938 <https://github.com/aio-libs/aiohttp/issues/4938>`_
|
|
||||||
- Fix inconsistency between Python and C http request parsers in parsing pct-encoded URL.
|
|
||||||
`#4972 <https://github.com/aio-libs/aiohttp/issues/4972>`_
|
|
||||||
- Fix connection closing issue in HEAD request.
|
|
||||||
`#5012 <https://github.com/aio-libs/aiohttp/issues/5012>`_
|
|
||||||
- Fix type hint on BaseRunner.addresses (from ``List[str]`` to ``List[Any]``)
|
|
||||||
`#5086 <https://github.com/aio-libs/aiohttp/issues/5086>`_
|
|
||||||
- Make `web.run_app()` more responsive to Ctrl+C on Windows for Python < 3.8. It slightly
|
|
||||||
increases CPU load as a side effect.
|
|
||||||
`#5098 <https://github.com/aio-libs/aiohttp/issues/5098>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Fix example code in client quick-start
|
|
||||||
`#3376 <https://github.com/aio-libs/aiohttp/issues/3376>`_
|
|
||||||
- Updated the docs so there is no contradiction in ``ttl_dns_cache`` default value
|
|
||||||
`#3512 <https://github.com/aio-libs/aiohttp/issues/3512>`_
|
|
||||||
- Add 'Deploy with SSL' to docs.
|
|
||||||
`#4201 <https://github.com/aio-libs/aiohttp/issues/4201>`_
|
|
||||||
- Change typing of the secure argument on StreamResponse.set_cookie from ``Optional[str]`` to ``Optional[bool]``
|
|
||||||
`#4204 <https://github.com/aio-libs/aiohttp/issues/4204>`_
|
|
||||||
- Changes ``ttl_dns_cache`` type from int to Optional[int].
|
|
||||||
`#4270 <https://github.com/aio-libs/aiohttp/issues/4270>`_
|
|
||||||
- Simplify README hello word example and add a documentation page for people coming from requests.
|
|
||||||
`#4272 <https://github.com/aio-libs/aiohttp/issues/4272>`_
|
|
||||||
- Improve some code examples in the documentation involving websockets and starting a simple HTTP site with an AppRunner.
|
|
||||||
`#4285 <https://github.com/aio-libs/aiohttp/issues/4285>`_
|
|
||||||
- Fix typo in code example in Multipart docs
|
|
||||||
`#4312 <https://github.com/aio-libs/aiohttp/issues/4312>`_
|
|
||||||
- Fix code example in Multipart section.
|
|
||||||
`#4314 <https://github.com/aio-libs/aiohttp/issues/4314>`_
|
|
||||||
- Update contributing guide so new contributors read the most recent version of that guide. Update command used to create test coverage reporting.
|
|
||||||
`#4810 <https://github.com/aio-libs/aiohttp/issues/4810>`_
|
|
||||||
- Spelling: Change "canonize" to "canonicalize".
|
|
||||||
`#4986 <https://github.com/aio-libs/aiohttp/issues/4986>`_
|
|
||||||
- Add ``aiohttp-sse-client`` library to third party usage list.
|
|
||||||
`#5084 <https://github.com/aio-libs/aiohttp/issues/5084>`_
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- `#2856 <https://github.com/aio-libs/aiohttp/issues/2856>`_, `#4218 <https://github.com/aio-libs/aiohttp/issues/4218>`_, `#4250 <https://github.com/aio-libs/aiohttp/issues/4250>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.6.3 (2020-10-12)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
|
|
||||||
release.
|
|
||||||
|
|
||||||
3.6.2 (2019-10-09)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Made exceptions pickleable. Also changed the repr of some exceptions.
|
|
||||||
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
|
|
||||||
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
|
|
||||||
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Reset the ``sock_read`` timeout each time data is received for a
|
|
||||||
``aiohttp.ClientResponse``. `#3808
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3808>`_
|
|
||||||
- Fix handling of expired cookies so they are not stored in CookieJar.
|
|
||||||
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
|
|
||||||
- Fix misleading message in the string representation of ``ClientConnectorError``;
|
|
||||||
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/4097>`_
|
|
||||||
- Don't clobber HTTP status when using FileResponse.
|
|
||||||
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Added minimal required logging configuration to logging documentation.
|
|
||||||
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
|
|
||||||
- Update docs to reflect proxy support.
|
|
||||||
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
|
|
||||||
- Fix typo in code example in testing docs.
|
|
||||||
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.6.1 (2019-09-19)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Compatibility with Python 3.8.
|
|
||||||
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- correct some exception string format
|
|
||||||
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
|
|
||||||
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
|
|
||||||
unavailable because the runtime is built against
|
|
||||||
an outdated OpenSSL.
|
|
||||||
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
|
|
||||||
- Update multidict requirement to >= 4.5
|
|
||||||
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
|
|
||||||
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.6.0 (2019-09-06)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
|
|
||||||
Proactor event loop to work. `#3629
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3629>`_
|
|
||||||
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
|
|
||||||
compatible with more http proxy servers. `#3798
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3798>`_
|
|
||||||
- Accept non-GET request for starting websocket handshake on server side.
|
|
||||||
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Raise a ClientResponseError instead of an AssertionError for a blank
|
|
||||||
HTTP Reason Phrase.
|
|
||||||
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
|
|
||||||
- Fix an issue where cookies would sometimes not be set during a redirect.
|
|
||||||
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
|
|
||||||
- Change normalize_path_middleware to use 308 redirect instead of 301.
|
|
||||||
|
|
||||||
This behavior should prevent clients from being unable to use PUT/POST
|
|
||||||
methods on endpoints that are redirected because of a trailing slash.
|
|
||||||
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
|
|
||||||
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
|
|
||||||
task with unhandled exception when the server is used in conjunction with
|
|
||||||
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
|
|
||||||
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
|
|
||||||
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
|
|
||||||
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
|
|
||||||
- Use sanitized URL as Location header in redirects
|
|
||||||
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
|
|
||||||
- Improve typing annotations for multipart.py along with changes required
|
|
||||||
by mypy in files that references multipart.py.
|
|
||||||
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
|
|
||||||
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
|
|
||||||
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
|
|
||||||
- Cleanup per-chunk data in generic data read. Memory leak fixed.
|
|
||||||
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
|
|
||||||
- Use correct type for add_view and family
|
|
||||||
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
|
|
||||||
- Fix _keepalive field in __slots__ of ``RequestHandler``.
|
|
||||||
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
|
|
||||||
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
|
|
||||||
transport" exception when clients disconnect uncleanly.
|
|
||||||
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
|
|
||||||
- Suppress pytest warnings due to ``test_utils`` classes
|
|
||||||
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
|
|
||||||
- Fix overshadowing of overlapped sub-application prefixes.
|
|
||||||
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
|
|
||||||
- Fixed return type annotation for WSMessage.json()
|
|
||||||
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
|
|
||||||
- Properly expose TooManyRedirects publicly as documented.
|
|
||||||
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
|
|
||||||
- Fix missing brackets for IPv6 in proxy CONNECT request
|
|
||||||
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
|
|
||||||
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
|
|
||||||
``asyncio.ClientSession.request`` according to the docs `#3852
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3852>`_
|
|
||||||
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
|
|
||||||
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
|
|
||||||
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
|
|
||||||
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
|
|
||||||
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
|
|
||||||
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
|
|
||||||
- Add URL to the string representation of ClientResponseError.
|
|
||||||
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
|
|
||||||
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
|
|
||||||
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
|
|
||||||
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
|
|
||||||
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
|
|
||||||
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
|
|
||||||
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
|
|
||||||
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
|
|
||||||
- use ``if __name__ == '__main__':`` in server examples.
|
|
||||||
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
|
|
||||||
- Update documentation reference to the default access logger.
|
|
||||||
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
|
|
||||||
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
|
|
||||||
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
|
|
||||||
- Removed deprecation warning in tracing example docs
|
|
||||||
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.5.4 (2019-01-12)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
|
|
||||||
partial content only in case of compressed content
|
|
||||||
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
|
|
||||||
|
|
||||||
|
|
||||||
3.5.3 (2019-01-10)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
|
|
||||||
``access_log=True`` and the event loop being in debug mode. `#3504
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3504>`_
|
|
||||||
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
|
|
||||||
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
|
|
||||||
- Send custom per-request cookies even if session jar is empty
|
|
||||||
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
|
|
||||||
- Restore Linux binary wheels publishing on PyPI
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.5.2 (2019-01-08)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
|
|
||||||
with files asynchronously. I/O based payloads from ``payload.py`` uses a
|
|
||||||
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3313>`_
|
|
||||||
- Internal Server Errors in plain text if the browser does not support HTML.
|
|
||||||
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Preserve MultipartWriter parts headers on write. Refactor the way how
|
|
||||||
``Payload.headers`` are handled. Payload instances now always have headers and
|
|
||||||
Content-Type defined. Fix Payload Content-Disposition header reset after initial
|
|
||||||
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
|
|
||||||
- Log suppressed exceptions in ``GunicornWebWorker``.
|
|
||||||
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
|
|
||||||
- Remove wildcard imports.
|
|
||||||
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
|
|
||||||
- Use the same task for app initialization and web server handling in gunicorn workers.
|
|
||||||
It allows to use Python3.7 context vars smoothly.
|
|
||||||
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
|
|
||||||
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
|
|
||||||
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
|
|
||||||
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
|
|
||||||
avoid a deprecation warning. `#3480
|
|
||||||
<https://github.com/aio-libs/aiohttp/issues/3480>`_
|
|
||||||
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
|
|
||||||
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
|
|
||||||
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
|
|
||||||
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Add documentation for ``aiohttp.web.HTTPException``.
|
|
||||||
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.5.1 (2018-12-24)
|
|
||||||
====================
|
|
||||||
|
|
||||||
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
|
|
||||||
mode.
|
|
||||||
|
|
||||||
3.5.0 (2018-12-22)
|
|
||||||
====================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- The library type annotations are checked in strict mode now.
|
|
||||||
- Add support for setting cookies for individual request (`#2387
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
|
|
||||||
- Application.add_domain implementation (`#2809
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
|
|
||||||
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
|
|
||||||
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
|
|
||||||
- Make ``request.url`` accessible when transport is closed. (`#3177
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
|
|
||||||
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
|
|
||||||
to run in a background executor to avoid blocking the main thread and potentially
|
|
||||||
triggering health check failures. (`#3205
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
|
|
||||||
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
|
|
||||||
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
|
|
||||||
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
|
|
||||||
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
|
|
||||||
and the default logger ``aiohttp.access`` is used, access logs will now be output
|
|
||||||
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
|
|
||||||
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
|
|
||||||
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
|
|
||||||
different HTTP method for WebSocket connection establishment. For example, ``Docker
|
|
||||||
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
|
|
||||||
- Create a task per request handling. (`#3406
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
|
|
||||||
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
|
|
||||||
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
|
|
||||||
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
|
|
||||||
- Don't uppercase HTTP method in parser (`#3233
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
|
|
||||||
- Make method match regexp RFC-7230 compliant (`#3235
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
|
|
||||||
- Add ``app.pre_frozen`` state to properly handle startup signals in
|
|
||||||
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
|
|
||||||
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
|
|
||||||
- Change imports from collections module in preparation for 3.8. (`#3258
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
|
|
||||||
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
|
|
||||||
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
|
|
||||||
collections module will not be supported anymore. (`#3273
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
|
|
||||||
- Keep the query string by ``normalize_path_middleware``. (`#3278
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
|
|
||||||
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
|
|
||||||
- Bracket IPv6 addresses in the HOST header (`#3304
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
|
|
||||||
- Fix default message for server ping and pong frames. (`#3308
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
|
|
||||||
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
|
|
||||||
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
|
|
||||||
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
|
|
||||||
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
|
|
||||||
- Release HTTP response before raising status exception (`#3364
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
|
|
||||||
- Fix task cancellation when ``sendfile()`` syscall is used by static file
|
|
||||||
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
|
|
||||||
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
|
|
||||||
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
|
|
||||||
- Fix BaseRequest.raw_headers doc. (`#3215
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
|
|
||||||
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
|
|
||||||
- Make server access log format placeholder %b documentation reflect
|
|
||||||
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Deprecations and Removals
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
|
|
||||||
- Deprecate ``stream.unread_data()`` (`#3260
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
|
|
||||||
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
|
|
||||||
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
|
|
||||||
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
|
|
||||||
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
|
|
||||||
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
|
|
||||||
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
|
|
||||||
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
|
|
||||||
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
|
|
||||||
- Deprecate bare connector close, use ``async with connector:`` and ``await
|
|
||||||
connector.close()`` instead. (`#3417
|
|
||||||
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
|
|
||||||
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
|
|
||||||
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- #3341, #3351
|
|
||||||
|
|
||||||
|
|
@ -1,135 +0,0 @@
|
||||||
aiohttp-3.7.4.post0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohttp-3.7.4.post0.dist-info/LICENSE.txt,sha256=gAD9PvGacMFN9xr1XVlZnYpL_ucI6iguio_9zKsMv88,11522
|
|
||||||
aiohttp-3.7.4.post0.dist-info/METADATA,sha256=ZF35_2WGQmQSkWbZ9iHwYLbq61rWAzpVTk0FNlVNy84,38836
|
|
||||||
aiohttp-3.7.4.post0.dist-info/RECORD,,
|
|
||||||
aiohttp-3.7.4.post0.dist-info/WHEEL,sha256=jr7ubY0Lkz_yXH9FfFe9PTtLhGOsf62dZkNvTYrJINE,100
|
|
||||||
aiohttp-3.7.4.post0.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
|
||||||
aiohttp/.hash/_cparser.pxd.hash,sha256=IrReMM-DHmX3hUqt6ZkWbHjAmBEFqWvWTpe0X5gwSXo,108
|
|
||||||
aiohttp/.hash/_find_header.pxd.hash,sha256=TxG5w4etbVd6sfm5JWbdf5PW6LnuXRQnlMoFBVGKN2E,112
|
|
||||||
aiohttp/.hash/_frozenlist.pyx.hash,sha256=UBmgbFYXCyTd4DwRcYZY1SBqTU3_IUK2Rmlw56PvtnI,111
|
|
||||||
aiohttp/.hash/_helpers.pyi.hash,sha256=D1pTrCkUaJ3by1XeGH_nE-amt7XdjfRHcm9oRtoGhHQ,108
|
|
||||||
aiohttp/.hash/_helpers.pyx.hash,sha256=MA4zlNd5xukP4VDAbnoId0Azv8HxCpwLWie2gSMPLsw,108
|
|
||||||
aiohttp/.hash/_http_parser.pyx.hash,sha256=LAHg2wAi0_2KUaH9DV1UJQr2jxgZVrHIJk2TIDro9bo,112
|
|
||||||
aiohttp/.hash/_http_writer.pyx.hash,sha256=S68YR2hVoBRgQzI7YCAM1SnlUWr4fOSr16FkcS1-H1k,112
|
|
||||||
aiohttp/.hash/_websocket.pyx.hash,sha256=8AcsJ5Tb8lZ9_QVXor_1Xbtl5igK1iP5rtEZZ0iA2AE,110
|
|
||||||
aiohttp/.hash/frozenlist.pyi.hash,sha256=9Xim5smJMiLGey1D0-BUiLxHs1XaV2_aYKAv7eQ7M_4,110
|
|
||||||
aiohttp/.hash/hdrs.py.hash,sha256=yDL4bbjH3uQidHSTCQkAArTdZgQgLidoaXB0HkaWhS0,103
|
|
||||||
aiohttp/.hash/signals.pyi.hash,sha256=kHUKGkyP9XjurohZ39KYUw2W2FEmUuTDp7MCF9AZPus,107
|
|
||||||
aiohttp/__init__.py,sha256=UmLziO7Qi_M22n8tfE4cL-qMCbN5ZodS3ADxSg5ri-M,7157
|
|
||||||
aiohttp/__pycache__/__init__.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/abc.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/base_protocol.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/client.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/client_exceptions.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/client_proto.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/client_reqrep.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/client_ws.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/connector.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/cookiejar.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/formdata.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/frozenlist.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/hdrs.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/helpers.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/http.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/http_exceptions.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/http_parser.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/http_websocket.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/http_writer.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/locks.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/log.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/multipart.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/payload.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/payload_streamer.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/pytest_plugin.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/resolver.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/signals.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/streams.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/tcp_helpers.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/test_utils.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/tracing.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/typedefs.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_app.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_exceptions.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_fileresponse.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_log.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_middlewares.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_protocol.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_request.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_response.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_routedef.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_runner.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_server.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_urldispatcher.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/web_ws.cpython-39.pyc,,
|
|
||||||
aiohttp/__pycache__/worker.cpython-39.pyc,,
|
|
||||||
aiohttp/_cparser.pxd,sha256=xvsLl13ZXXyHGyb2Us7WsLncndQrxhyGB4KXnvbsRtQ,4099
|
|
||||||
aiohttp/_find_header.c,sha256=-d1A3pkkpirVX5CDQaTSSTjdjXekmOjt-bqYcEQWbXc,197440
|
|
||||||
aiohttp/_find_header.h,sha256=HistyxY7K3xEJ53Y5xEfwrDVDkfcV0zQ9mkzMgzi_jo,184
|
|
||||||
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
|
||||||
aiohttp/_frozenlist.c,sha256=Ea69NSVskCydZGMWM-nZK7ejOITPd1_4RKima-Al9ng,294194
|
|
||||||
aiohttp/_frozenlist.cp39-win_amd64.pyd,sha256=alY6TdwjPtXwH4Y11ZA2a1oHisc6KKgtg38kvsI90U8,64512
|
|
||||||
aiohttp/_frozenlist.pyx,sha256=SB851KmtWpiJ2ZB05Tpo4855VkCyRtgMs843Wz8kFeg,2713
|
|
||||||
aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
|
|
||||||
aiohttp/_helpers.c,sha256=JzeMvzUU5gUPfsUMoaeetvhME5i45bzOEC1bVUDAYn4,211990
|
|
||||||
aiohttp/_helpers.cp39-win_amd64.pyd,sha256=OZ3-7povjGoTLC1NKJMfTGwPHROU3lSxgqZFfZFDpBg,48128
|
|
||||||
aiohttp/_helpers.pyi,sha256=2Hd5IC0Zf4YTEJ412suyyhsh1kVyVDv5g4stgyo2Ksc,208
|
|
||||||
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
|
|
||||||
aiohttp/_http_parser.c,sha256=cxhXBsnSqgk8XgURzub_XFLbJSKJJO248-2vWvkT0wM,1011527
|
|
||||||
aiohttp/_http_parser.cp39-win_amd64.pyd,sha256=Y2qSXrMcOn3jnLlJVhOxNXBgagZy0-aZy2mDKH4MAeM,235520
|
|
||||||
aiohttp/_http_parser.pyx,sha256=g8BRhSJK1X8TP3_V1WwzH0-eEBzVL5EjfntlaLVFnhw,29897
|
|
||||||
aiohttp/_http_writer.c,sha256=-xrk3WfKaYIGAwywdKrAQL1zYcVKV8L196WgulT04Cw,213022
|
|
||||||
aiohttp/_http_writer.cp39-win_amd64.pyd,sha256=hOizfW_QFiYQ3rPB1Ih_cOZEeFAyHuqEb4YO_ChicEs,41984
|
|
||||||
aiohttp/_http_writer.pyx,sha256=rBzbk-xrIWO2hD0kKo5ILKSKsW_U8Xf15IAPnqSH23Q,4351
|
|
||||||
aiohttp/_websocket.c,sha256=t8Re9DbCbALSyrRrpnlXcSF39NxIebH4aYzysWDBQns,137429
|
|
||||||
aiohttp/_websocket.cp39-win_amd64.pyd,sha256=0cRjtceoeO9TWKY7sOqehzEf4fQW92K9GLSIjBcMZH8,27648
|
|
||||||
aiohttp/_websocket.pyx,sha256=o9J7yi9c2-jTBjE3dUkXxhDWKvRWJz5GZfyLsgJQa38,1617
|
|
||||||
aiohttp/abc.py,sha256=m5MSBBYS0fs4Kb0yROJrqufmupgnRMeLoJOcVylt_gQ,5447
|
|
||||||
aiohttp/base_protocol.py,sha256=BqQYyyTSwLjYtWe8pOCpvsjrlbovITd9rrZ5MMu8P8Q,2788
|
|
||||||
aiohttp/client.py,sha256=SY6_RXgfbeHDb63bccs-03a0Jj5f5RCBl5qpnYbsD6I,45191
|
|
||||||
aiohttp/client_exceptions.py,sha256=P0gmFGv4FAGiKeDCX6qtL_C-DlqsjwQ3HPI32oxFg34,8846
|
|
||||||
aiohttp/client_proto.py,sha256=3U2TI6gfoTRIzWZpknV4jGqO-fUQ_hJM-tKKQHWkATQ,8414
|
|
||||||
aiohttp/client_reqrep.py,sha256=1IlPotOVSIVqD_G2qALE1ChtB963nZICD8-CFqB00UY,37566
|
|
||||||
aiohttp/client_ws.py,sha256=pBTFy5Ss8iECE_4Cq5m86VZpDFOEZhlVeCtc7SeMopo,10588
|
|
||||||
aiohttp/connector.py,sha256=rMYFC4kaobHUSHmatR5GZyoJZ64R5uJx22QpGIWdOpA,44230
|
|
||||||
aiohttp/cookiejar.py,sha256=LluB0A_imJ19d7phW6cVuQMyZVz_Rq4RZ1_N09L3_ws,12545
|
|
||||||
aiohttp/formdata.py,sha256=akyeuVTAdPvtUpiFeX759uerv69PQB4mchq3ZOdTaNo,6250
|
|
||||||
aiohttp/frozenlist.py,sha256=nJaNj0CP5QRHw7U8Fqq2bYzrLWlQhWqNN_XNvlWlHeY,1790
|
|
||||||
aiohttp/frozenlist.pyi,sha256=kBG9J61ymCqiUvBkrjsRGVmfakmkzk6KHmZbdgRLCZY,1480
|
|
||||||
aiohttp/hdrs.py,sha256=XyvcUDaIZe-HUoro_WggyLNWdyCcVDC2aciFer7bnpQ,3554
|
|
||||||
aiohttp/helpers.py,sha256=LW6EL9AtwvGVAxPWPAL1pbIt3PMaOy9OJoW4TewRc-M,23698
|
|
||||||
aiohttp/http.py,sha256=NKlSh1UEf-ZoYBYI0IoAUq0jy_-wKyJQ-aT0GjQCy7k,1896
|
|
||||||
aiohttp/http_exceptions.py,sha256=rLwhCbFrOpQ_ntr3GnxaxD3oRnTTNM1utmDDBUbdVTU,2691
|
|
||||||
aiohttp/http_parser.py,sha256=IleNVZ3FGYmk3hKXz1RGwCcsUl5oUjDbikuwMcoyZ-I,31682
|
|
||||||
aiohttp/http_websocket.py,sha256=unIzhQEnAGpeXESvvtmP7k59EiTTmoG1_rd5BPoYrqk,25796
|
|
||||||
aiohttp/http_writer.py,sha256=z2K59frCgLJGwKADU3q-VRgHLKLXNzq6MW1Geg9pea8,5523
|
|
||||||
aiohttp/locks.py,sha256=-ySdj_OPys4s1LvpSeFRl6XHgmcpVGbrNtuoKsZ-lXQ,1265
|
|
||||||
aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
|
|
||||||
aiohttp/multipart.py,sha256=P1erLT4m3Cj6j_6fO_dJhppntg4EocRRBGYCgptnCxc,33208
|
|
||||||
aiohttp/payload.py,sha256=m6RIJyZXIumdRpvGGJovdjsVcL--6GybxjpUyjKXGo4,13781
|
|
||||||
aiohttp/payload_streamer.py,sha256=avZCRjdpQU2t0HL5YeWs7JQaaUHe81YcyYxQzrECVLA,2176
|
|
||||||
aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
|
|
||||||
aiohttp/pytest_plugin.py,sha256=rJ8PLNtR6UhMwfD-BeLMeHdNQSWUhEpVh3nhdsAHghQ,11389
|
|
||||||
aiohttp/resolver.py,sha256=TyLUEe8QyxLCYy8jETJa-8MotQxmKfcEfg85wjv8_hs,4757
|
|
||||||
aiohttp/signals.py,sha256=HdX5hKj-w-jIVrYTKADRzXZh-2x26CwF4UKSQy9zMsg,886
|
|
||||||
aiohttp/signals.pyi,sha256=EnNobON7azFQ1fHSiklvbrvQfQUnOZPi3n_6pKEzXoM,331
|
|
||||||
aiohttp/streams.py,sha256=ZEEnFyTIecfzeJGcsxpD7LXo4bS93IpeQvep5NAXQsA,21177
|
|
||||||
aiohttp/tcp_helpers.py,sha256=jPHZyIHbIAqyWS0QShT_ZgKLMiDW7s_124IPc4irTU8,1000
|
|
||||||
aiohttp/test_utils.py,sha256=UgC_8I0WVE0LgFtKKHfLIBceySkVE7oCatuyj5-dQeg,20929
|
|
||||||
aiohttp/tracing.py,sha256=_oTwN_h8sj8seL0QfeTlWAaUhTe1yv8glc8Wutuitds,14805
|
|
||||||
aiohttp/typedefs.py,sha256=Am4eWH_C4lE_m3pl3IlfuB-KpqkjStTRMYcoS8cJMC4,1420
|
|
||||||
aiohttp/web.py,sha256=Zd6dGInVzbHlzIoYGwe86fdIFP3hDA_-FPevp_B356M,18462
|
|
||||||
aiohttp/web_app.py,sha256=IKHRp1PrRHJQYI-km_dhxJlchc89gWfI19pmC5rTK-o,17605
|
|
||||||
aiohttp/web_exceptions.py,sha256=ydzJJKwJWHOKzjzh0XtZNzZ5NCb0Me8DKmlKy2qvijw,10547
|
|
||||||
aiohttp/web_fileresponse.py,sha256=rFpMfXUcbfVjQkVAt_xtA6tbgrUVj7BJQCGZHBBtNI8,9268
|
|
||||||
aiohttp/web_log.py,sha256=B-gy5ixLtq6TVXvBzXuCLGf96GM4qeWjjNIiczVllLM,7706
|
|
||||||
aiohttp/web_middlewares.py,sha256=fP2Fp113O5keX-h4VD4rJTQUM_5Si4vXu1m-1fFNIpU,4314
|
|
||||||
aiohttp/web_protocol.py,sha256=6B0cUAuPTF6JTtRmLCKStqjdD8_rsKNKNeYeSOL5hD0,23918
|
|
||||||
aiohttp/web_request.py,sha256=7U-Rizv5L_srCVVjQaG4Mer3kbyifTt2LAgOnzFfQ6A,27278
|
|
||||||
aiohttp/web_response.py,sha256=8hzlkbHWHp7HXpVXV9jZF_Cq2aKl6X-0pkl_IfcZ7jg,26983
|
|
||||||
aiohttp/web_routedef.py,sha256=7gnG-KLvQPLDXxGUJXwOC3_k2ufqFWMAP8qofprQ79c,6324
|
|
||||||
aiohttp/web_runner.py,sha256=E4asEULIN8umMWCpmYhJ4eAH2SbjAStk0FMolhu-cZk,11575
|
|
||||||
aiohttp/web_server.py,sha256=iKc9a4fQS14-3ivqzBiBp742m8vEexRZiSzeKTW7NCo,2120
|
|
||||||
aiohttp/web_urldispatcher.py,sha256=ydsygCAoYnBvOL7qW4cspYKaTr0bEJjQVnUEQAZsb8g,40765
|
|
||||||
aiohttp/web_ws.py,sha256=5edpQhp6h135RZaHkVTDNFbr_qU9lcZf6iUefmBXcmg,17264
|
|
||||||
aiohttp/worker.py,sha256=YKvDyIcNrRROfhH-huN1EMoB31WbNnDTAh60Tu7hOxQ,8274
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.36.2)
|
|
||||||
Root-Is-Purelib: false
|
|
||||||
Tag: cp39-cp39-win_amd64
|
|
||||||
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
aiohttp
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
c6fb0b975dd95d7c871b26f652ced6b0b9dc9dd42bc61c860782979ef6ec46d4 *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
481f39d4a9ad5a9889d99074e53a68e3ce795640b246d80cb3ce375b3f2415e8 *D:/a/aiohttp/aiohttp/aiohttp/_frozenlist.pyx
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
83c05185224ad57f133f7fd5d56c331f4f9e101cd52f91237e7b6568b5459e1c *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
ac1cdb93ec6b2163b6843d242a8e482ca48ab16fd4f177f5e4800f9ea487db74 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f *D:/a/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
9011bd27ad72982aa252f064ae3b1119599f6a49a4ce4e8a1e665b76044b0996 *D:/a/aiohttp/aiohttp/aiohttp/frozenlist.pyi
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
5f2bdc50368865ef87528ae8fd6820c8b35677209c5430b669c8857abedb9e94 *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
1273686ce37b6b3150d5f1d28a496f6ebbd07d05273993e2de7ffaa4a1335e83 *D:/a/aiohttp/aiohttp/aiohttp/signals.pyi
|
|
||||||
217
env/Lib/site-packages/aiohttp/__init__.py
vendored
217
env/Lib/site-packages/aiohttp/__init__.py
vendored
|
|
@ -1,217 +0,0 @@
|
||||||
__version__ = "3.7.4.post0"
|
|
||||||
|
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from . import hdrs as hdrs
|
|
||||||
from .client import (
|
|
||||||
BaseConnector as BaseConnector,
|
|
||||||
ClientConnectionError as ClientConnectionError,
|
|
||||||
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
|
||||||
ClientConnectorError as ClientConnectorError,
|
|
||||||
ClientConnectorSSLError as ClientConnectorSSLError,
|
|
||||||
ClientError as ClientError,
|
|
||||||
ClientHttpProxyError as ClientHttpProxyError,
|
|
||||||
ClientOSError as ClientOSError,
|
|
||||||
ClientPayloadError as ClientPayloadError,
|
|
||||||
ClientProxyConnectionError as ClientProxyConnectionError,
|
|
||||||
ClientRequest as ClientRequest,
|
|
||||||
ClientResponse as ClientResponse,
|
|
||||||
ClientResponseError as ClientResponseError,
|
|
||||||
ClientSession as ClientSession,
|
|
||||||
ClientSSLError as ClientSSLError,
|
|
||||||
ClientTimeout as ClientTimeout,
|
|
||||||
ClientWebSocketResponse as ClientWebSocketResponse,
|
|
||||||
ContentTypeError as ContentTypeError,
|
|
||||||
Fingerprint as Fingerprint,
|
|
||||||
InvalidURL as InvalidURL,
|
|
||||||
NamedPipeConnector as NamedPipeConnector,
|
|
||||||
RequestInfo as RequestInfo,
|
|
||||||
ServerConnectionError as ServerConnectionError,
|
|
||||||
ServerDisconnectedError as ServerDisconnectedError,
|
|
||||||
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
|
||||||
ServerTimeoutError as ServerTimeoutError,
|
|
||||||
TCPConnector as TCPConnector,
|
|
||||||
TooManyRedirects as TooManyRedirects,
|
|
||||||
UnixConnector as UnixConnector,
|
|
||||||
WSServerHandshakeError as WSServerHandshakeError,
|
|
||||||
request as request,
|
|
||||||
)
|
|
||||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
|
||||||
from .formdata import FormData as FormData
|
|
||||||
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
|
|
||||||
from .http import (
|
|
||||||
HttpVersion as HttpVersion,
|
|
||||||
HttpVersion10 as HttpVersion10,
|
|
||||||
HttpVersion11 as HttpVersion11,
|
|
||||||
WebSocketError as WebSocketError,
|
|
||||||
WSCloseCode as WSCloseCode,
|
|
||||||
WSMessage as WSMessage,
|
|
||||||
WSMsgType as WSMsgType,
|
|
||||||
)
|
|
||||||
from .multipart import (
|
|
||||||
BadContentDispositionHeader as BadContentDispositionHeader,
|
|
||||||
BadContentDispositionParam as BadContentDispositionParam,
|
|
||||||
BodyPartReader as BodyPartReader,
|
|
||||||
MultipartReader as MultipartReader,
|
|
||||||
MultipartWriter as MultipartWriter,
|
|
||||||
content_disposition_filename as content_disposition_filename,
|
|
||||||
parse_content_disposition as parse_content_disposition,
|
|
||||||
)
|
|
||||||
from .payload import (
|
|
||||||
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
|
||||||
AsyncIterablePayload as AsyncIterablePayload,
|
|
||||||
BufferedReaderPayload as BufferedReaderPayload,
|
|
||||||
BytesIOPayload as BytesIOPayload,
|
|
||||||
BytesPayload as BytesPayload,
|
|
||||||
IOBasePayload as IOBasePayload,
|
|
||||||
JsonPayload as JsonPayload,
|
|
||||||
Payload as Payload,
|
|
||||||
StringIOPayload as StringIOPayload,
|
|
||||||
StringPayload as StringPayload,
|
|
||||||
TextIOPayload as TextIOPayload,
|
|
||||||
get_payload as get_payload,
|
|
||||||
payload_type as payload_type,
|
|
||||||
)
|
|
||||||
from .payload_streamer import streamer as streamer
|
|
||||||
from .resolver import (
|
|
||||||
AsyncResolver as AsyncResolver,
|
|
||||||
DefaultResolver as DefaultResolver,
|
|
||||||
ThreadedResolver as ThreadedResolver,
|
|
||||||
)
|
|
||||||
from .signals import Signal as Signal
|
|
||||||
from .streams import (
|
|
||||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
|
||||||
DataQueue as DataQueue,
|
|
||||||
EofStream as EofStream,
|
|
||||||
FlowControlDataQueue as FlowControlDataQueue,
|
|
||||||
StreamReader as StreamReader,
|
|
||||||
)
|
|
||||||
from .tracing import (
|
|
||||||
TraceConfig as TraceConfig,
|
|
||||||
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
|
||||||
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
|
||||||
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
|
||||||
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
|
||||||
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
|
||||||
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
|
||||||
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
|
||||||
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
|
||||||
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
|
||||||
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
|
||||||
TraceRequestEndParams as TraceRequestEndParams,
|
|
||||||
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
|
||||||
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
|
||||||
TraceRequestStartParams as TraceRequestStartParams,
|
|
||||||
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__: Tuple[str, ...] = (
|
|
||||||
"hdrs",
|
|
||||||
# client
|
|
||||||
"BaseConnector",
|
|
||||||
"ClientConnectionError",
|
|
||||||
"ClientConnectorCertificateError",
|
|
||||||
"ClientConnectorError",
|
|
||||||
"ClientConnectorSSLError",
|
|
||||||
"ClientError",
|
|
||||||
"ClientHttpProxyError",
|
|
||||||
"ClientOSError",
|
|
||||||
"ClientPayloadError",
|
|
||||||
"ClientProxyConnectionError",
|
|
||||||
"ClientResponse",
|
|
||||||
"ClientRequest",
|
|
||||||
"ClientResponseError",
|
|
||||||
"ClientSSLError",
|
|
||||||
"ClientSession",
|
|
||||||
"ClientTimeout",
|
|
||||||
"ClientWebSocketResponse",
|
|
||||||
"ContentTypeError",
|
|
||||||
"Fingerprint",
|
|
||||||
"InvalidURL",
|
|
||||||
"RequestInfo",
|
|
||||||
"ServerConnectionError",
|
|
||||||
"ServerDisconnectedError",
|
|
||||||
"ServerFingerprintMismatch",
|
|
||||||
"ServerTimeoutError",
|
|
||||||
"TCPConnector",
|
|
||||||
"TooManyRedirects",
|
|
||||||
"UnixConnector",
|
|
||||||
"NamedPipeConnector",
|
|
||||||
"WSServerHandshakeError",
|
|
||||||
"request",
|
|
||||||
# cookiejar
|
|
||||||
"CookieJar",
|
|
||||||
"DummyCookieJar",
|
|
||||||
# formdata
|
|
||||||
"FormData",
|
|
||||||
# helpers
|
|
||||||
"BasicAuth",
|
|
||||||
"ChainMapProxy",
|
|
||||||
# http
|
|
||||||
"HttpVersion",
|
|
||||||
"HttpVersion10",
|
|
||||||
"HttpVersion11",
|
|
||||||
"WSMsgType",
|
|
||||||
"WSCloseCode",
|
|
||||||
"WSMessage",
|
|
||||||
"WebSocketError",
|
|
||||||
# multipart
|
|
||||||
"BadContentDispositionHeader",
|
|
||||||
"BadContentDispositionParam",
|
|
||||||
"BodyPartReader",
|
|
||||||
"MultipartReader",
|
|
||||||
"MultipartWriter",
|
|
||||||
"content_disposition_filename",
|
|
||||||
"parse_content_disposition",
|
|
||||||
# payload
|
|
||||||
"AsyncIterablePayload",
|
|
||||||
"BufferedReaderPayload",
|
|
||||||
"BytesIOPayload",
|
|
||||||
"BytesPayload",
|
|
||||||
"IOBasePayload",
|
|
||||||
"JsonPayload",
|
|
||||||
"PAYLOAD_REGISTRY",
|
|
||||||
"Payload",
|
|
||||||
"StringIOPayload",
|
|
||||||
"StringPayload",
|
|
||||||
"TextIOPayload",
|
|
||||||
"get_payload",
|
|
||||||
"payload_type",
|
|
||||||
# payload_streamer
|
|
||||||
"streamer",
|
|
||||||
# resolver
|
|
||||||
"AsyncResolver",
|
|
||||||
"DefaultResolver",
|
|
||||||
"ThreadedResolver",
|
|
||||||
# signals
|
|
||||||
"Signal",
|
|
||||||
"DataQueue",
|
|
||||||
"EMPTY_PAYLOAD",
|
|
||||||
"EofStream",
|
|
||||||
"FlowControlDataQueue",
|
|
||||||
"StreamReader",
|
|
||||||
# tracing
|
|
||||||
"TraceConfig",
|
|
||||||
"TraceConnectionCreateEndParams",
|
|
||||||
"TraceConnectionCreateStartParams",
|
|
||||||
"TraceConnectionQueuedEndParams",
|
|
||||||
"TraceConnectionQueuedStartParams",
|
|
||||||
"TraceConnectionReuseconnParams",
|
|
||||||
"TraceDnsCacheHitParams",
|
|
||||||
"TraceDnsCacheMissParams",
|
|
||||||
"TraceDnsResolveHostEndParams",
|
|
||||||
"TraceDnsResolveHostStartParams",
|
|
||||||
"TraceRequestChunkSentParams",
|
|
||||||
"TraceRequestEndParams",
|
|
||||||
"TraceRequestExceptionParams",
|
|
||||||
"TraceRequestRedirectParams",
|
|
||||||
"TraceRequestStartParams",
|
|
||||||
"TraceResponseChunkReceivedParams",
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
|
||||||
|
|
||||||
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
140
env/Lib/site-packages/aiohttp/_cparser.pxd
vendored
140
env/Lib/site-packages/aiohttp/_cparser.pxd
vendored
|
|
@ -1,140 +0,0 @@
|
||||||
from libc.stdint cimport uint16_t, uint32_t, uint64_t
|
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "../vendor/http-parser/http_parser.h":
|
|
||||||
ctypedef int (*http_data_cb) (http_parser*,
|
|
||||||
const char *at,
|
|
||||||
size_t length) except -1
|
|
||||||
|
|
||||||
ctypedef int (*http_cb) (http_parser*) except -1
|
|
||||||
|
|
||||||
struct http_parser:
|
|
||||||
unsigned int type
|
|
||||||
unsigned int flags
|
|
||||||
unsigned int state
|
|
||||||
unsigned int header_state
|
|
||||||
unsigned int index
|
|
||||||
|
|
||||||
uint32_t nread
|
|
||||||
uint64_t content_length
|
|
||||||
|
|
||||||
unsigned short http_major
|
|
||||||
unsigned short http_minor
|
|
||||||
unsigned int status_code
|
|
||||||
unsigned int method
|
|
||||||
unsigned int http_errno
|
|
||||||
|
|
||||||
unsigned int upgrade
|
|
||||||
|
|
||||||
void *data
|
|
||||||
|
|
||||||
struct http_parser_settings:
|
|
||||||
http_cb on_message_begin
|
|
||||||
http_data_cb on_url
|
|
||||||
http_data_cb on_status
|
|
||||||
http_data_cb on_header_field
|
|
||||||
http_data_cb on_header_value
|
|
||||||
http_cb on_headers_complete
|
|
||||||
http_data_cb on_body
|
|
||||||
http_cb on_message_complete
|
|
||||||
http_cb on_chunk_header
|
|
||||||
http_cb on_chunk_complete
|
|
||||||
|
|
||||||
enum http_parser_type:
|
|
||||||
HTTP_REQUEST,
|
|
||||||
HTTP_RESPONSE,
|
|
||||||
HTTP_BOTH
|
|
||||||
|
|
||||||
enum http_errno:
|
|
||||||
HPE_OK,
|
|
||||||
HPE_CB_message_begin,
|
|
||||||
HPE_CB_url,
|
|
||||||
HPE_CB_header_field,
|
|
||||||
HPE_CB_header_value,
|
|
||||||
HPE_CB_headers_complete,
|
|
||||||
HPE_CB_body,
|
|
||||||
HPE_CB_message_complete,
|
|
||||||
HPE_CB_status,
|
|
||||||
HPE_CB_chunk_header,
|
|
||||||
HPE_CB_chunk_complete,
|
|
||||||
HPE_INVALID_EOF_STATE,
|
|
||||||
HPE_HEADER_OVERFLOW,
|
|
||||||
HPE_CLOSED_CONNECTION,
|
|
||||||
HPE_INVALID_VERSION,
|
|
||||||
HPE_INVALID_STATUS,
|
|
||||||
HPE_INVALID_METHOD,
|
|
||||||
HPE_INVALID_URL,
|
|
||||||
HPE_INVALID_HOST,
|
|
||||||
HPE_INVALID_PORT,
|
|
||||||
HPE_INVALID_PATH,
|
|
||||||
HPE_INVALID_QUERY_STRING,
|
|
||||||
HPE_INVALID_FRAGMENT,
|
|
||||||
HPE_LF_EXPECTED,
|
|
||||||
HPE_INVALID_HEADER_TOKEN,
|
|
||||||
HPE_INVALID_CONTENT_LENGTH,
|
|
||||||
HPE_INVALID_CHUNK_SIZE,
|
|
||||||
HPE_INVALID_CONSTANT,
|
|
||||||
HPE_INVALID_INTERNAL_STATE,
|
|
||||||
HPE_STRICT,
|
|
||||||
HPE_PAUSED,
|
|
||||||
HPE_UNKNOWN
|
|
||||||
|
|
||||||
enum flags:
|
|
||||||
F_CHUNKED,
|
|
||||||
F_CONNECTION_KEEP_ALIVE,
|
|
||||||
F_CONNECTION_CLOSE,
|
|
||||||
F_CONNECTION_UPGRADE,
|
|
||||||
F_TRAILING,
|
|
||||||
F_UPGRADE,
|
|
||||||
F_SKIPBODY,
|
|
||||||
F_CONTENTLENGTH
|
|
||||||
|
|
||||||
enum http_method:
|
|
||||||
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
|
|
||||||
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
|
|
||||||
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
|
|
||||||
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
|
|
||||||
LINK, UNLINK
|
|
||||||
|
|
||||||
void http_parser_init(http_parser *parser, http_parser_type type)
|
|
||||||
|
|
||||||
size_t http_parser_execute(http_parser *parser,
|
|
||||||
const http_parser_settings *settings,
|
|
||||||
const char *data,
|
|
||||||
size_t len)
|
|
||||||
|
|
||||||
int http_should_keep_alive(const http_parser *parser)
|
|
||||||
|
|
||||||
void http_parser_settings_init(http_parser_settings *settings)
|
|
||||||
|
|
||||||
const char *http_errno_name(http_errno err)
|
|
||||||
const char *http_errno_description(http_errno err)
|
|
||||||
const char *http_method_str(http_method m)
|
|
||||||
|
|
||||||
# URL Parser
|
|
||||||
|
|
||||||
enum http_parser_url_fields:
|
|
||||||
UF_SCHEMA = 0,
|
|
||||||
UF_HOST = 1,
|
|
||||||
UF_PORT = 2,
|
|
||||||
UF_PATH = 3,
|
|
||||||
UF_QUERY = 4,
|
|
||||||
UF_FRAGMENT = 5,
|
|
||||||
UF_USERINFO = 6,
|
|
||||||
UF_MAX = 7
|
|
||||||
|
|
||||||
struct http_parser_url_field_data:
|
|
||||||
uint16_t off
|
|
||||||
uint16_t len
|
|
||||||
|
|
||||||
struct http_parser_url:
|
|
||||||
uint16_t field_set
|
|
||||||
uint16_t port
|
|
||||||
http_parser_url_field_data[<int>UF_MAX] field_data
|
|
||||||
|
|
||||||
void http_parser_url_init(http_parser_url *u)
|
|
||||||
|
|
||||||
int http_parser_parse_url(const char *buf,
|
|
||||||
size_t buflen,
|
|
||||||
int is_connect,
|
|
||||||
http_parser_url *u)
|
|
||||||
9870
env/Lib/site-packages/aiohttp/_find_header.c
vendored
9870
env/Lib/site-packages/aiohttp/_find_header.c
vendored
File diff suppressed because it is too large
Load diff
14
env/Lib/site-packages/aiohttp/_find_header.h
vendored
14
env/Lib/site-packages/aiohttp/_find_header.h
vendored
|
|
@ -1,14 +0,0 @@
|
||||||
#ifndef _FIND_HEADERS_H
|
|
||||||
#define _FIND_HEADERS_H
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
|
|
||||||
int find_header(const char *str, int size);
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
cdef extern from "_find_header.h":
|
|
||||||
int find_header(char *, int)
|
|
||||||
7512
env/Lib/site-packages/aiohttp/_frozenlist.c
vendored
7512
env/Lib/site-packages/aiohttp/_frozenlist.c
vendored
File diff suppressed because it is too large
Load diff
Binary file not shown.
108
env/Lib/site-packages/aiohttp/_frozenlist.pyx
vendored
108
env/Lib/site-packages/aiohttp/_frozenlist.pyx
vendored
|
|
@ -1,108 +0,0 @@
|
||||||
from collections.abc import MutableSequence
|
|
||||||
|
|
||||||
|
|
||||||
cdef class FrozenList:
|
|
||||||
|
|
||||||
cdef readonly bint frozen
|
|
||||||
cdef list _items
|
|
||||||
|
|
||||||
def __init__(self, items=None):
|
|
||||||
self.frozen = False
|
|
||||||
if items is not None:
|
|
||||||
items = list(items)
|
|
||||||
else:
|
|
||||||
items = []
|
|
||||||
self._items = items
|
|
||||||
|
|
||||||
cdef object _check_frozen(self):
|
|
||||||
if self.frozen:
|
|
||||||
raise RuntimeError("Cannot modify frozen list.")
|
|
||||||
|
|
||||||
cdef inline object _fast_len(self):
|
|
||||||
return len(self._items)
|
|
||||||
|
|
||||||
def freeze(self):
|
|
||||||
self.frozen = True
|
|
||||||
|
|
||||||
def __getitem__(self, index):
|
|
||||||
return self._items[index]
|
|
||||||
|
|
||||||
def __setitem__(self, index, value):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items[index] = value
|
|
||||||
|
|
||||||
def __delitem__(self, index):
|
|
||||||
self._check_frozen()
|
|
||||||
del self._items[index]
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self._fast_len()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self._items.__iter__()
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
return self._items.__reversed__()
|
|
||||||
|
|
||||||
def __richcmp__(self, other, op):
|
|
||||||
if op == 0: # <
|
|
||||||
return list(self) < other
|
|
||||||
if op == 1: # <=
|
|
||||||
return list(self) <= other
|
|
||||||
if op == 2: # ==
|
|
||||||
return list(self) == other
|
|
||||||
if op == 3: # !=
|
|
||||||
return list(self) != other
|
|
||||||
if op == 4: # >
|
|
||||||
return list(self) > other
|
|
||||||
if op == 5: # =>
|
|
||||||
return list(self) >= other
|
|
||||||
|
|
||||||
def insert(self, pos, item):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items.insert(pos, item)
|
|
||||||
|
|
||||||
def __contains__(self, item):
|
|
||||||
return item in self._items
|
|
||||||
|
|
||||||
def __iadd__(self, items):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items += list(items)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def index(self, item):
|
|
||||||
return self._items.index(item)
|
|
||||||
|
|
||||||
def remove(self, item):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items.remove(item)
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items.clear()
|
|
||||||
|
|
||||||
def extend(self, items):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items += list(items)
|
|
||||||
|
|
||||||
def reverse(self):
|
|
||||||
self._check_frozen()
|
|
||||||
self._items.reverse()
|
|
||||||
|
|
||||||
def pop(self, index=-1):
|
|
||||||
self._check_frozen()
|
|
||||||
return self._items.pop(index)
|
|
||||||
|
|
||||||
def append(self, item):
|
|
||||||
self._check_frozen()
|
|
||||||
return self._items.append(item)
|
|
||||||
|
|
||||||
def count(self, item):
|
|
||||||
return self._items.count(item)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
|
|
||||||
self._items)
|
|
||||||
|
|
||||||
|
|
||||||
MutableSequence.register(FrozenList)
|
|
||||||
83
env/Lib/site-packages/aiohttp/_headers.pxi
vendored
83
env/Lib/site-packages/aiohttp/_headers.pxi
vendored
|
|
@ -1,83 +0,0 @@
|
||||||
# The file is autogenerated from aiohttp/hdrs.py
|
|
||||||
# Run ./tools/gen.py to update it after the origin changing.
|
|
||||||
|
|
||||||
from . import hdrs
|
|
||||||
cdef tuple headers = (
|
|
||||||
hdrs.ACCEPT,
|
|
||||||
hdrs.ACCEPT_CHARSET,
|
|
||||||
hdrs.ACCEPT_ENCODING,
|
|
||||||
hdrs.ACCEPT_LANGUAGE,
|
|
||||||
hdrs.ACCEPT_RANGES,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
|
||||||
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
|
||||||
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_MAX_AGE,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
|
||||||
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
|
||||||
hdrs.AGE,
|
|
||||||
hdrs.ALLOW,
|
|
||||||
hdrs.AUTHORIZATION,
|
|
||||||
hdrs.CACHE_CONTROL,
|
|
||||||
hdrs.CONNECTION,
|
|
||||||
hdrs.CONTENT_DISPOSITION,
|
|
||||||
hdrs.CONTENT_ENCODING,
|
|
||||||
hdrs.CONTENT_LANGUAGE,
|
|
||||||
hdrs.CONTENT_LENGTH,
|
|
||||||
hdrs.CONTENT_LOCATION,
|
|
||||||
hdrs.CONTENT_MD5,
|
|
||||||
hdrs.CONTENT_RANGE,
|
|
||||||
hdrs.CONTENT_TRANSFER_ENCODING,
|
|
||||||
hdrs.CONTENT_TYPE,
|
|
||||||
hdrs.COOKIE,
|
|
||||||
hdrs.DATE,
|
|
||||||
hdrs.DESTINATION,
|
|
||||||
hdrs.DIGEST,
|
|
||||||
hdrs.ETAG,
|
|
||||||
hdrs.EXPECT,
|
|
||||||
hdrs.EXPIRES,
|
|
||||||
hdrs.FORWARDED,
|
|
||||||
hdrs.FROM,
|
|
||||||
hdrs.HOST,
|
|
||||||
hdrs.IF_MATCH,
|
|
||||||
hdrs.IF_MODIFIED_SINCE,
|
|
||||||
hdrs.IF_NONE_MATCH,
|
|
||||||
hdrs.IF_RANGE,
|
|
||||||
hdrs.IF_UNMODIFIED_SINCE,
|
|
||||||
hdrs.KEEP_ALIVE,
|
|
||||||
hdrs.LAST_EVENT_ID,
|
|
||||||
hdrs.LAST_MODIFIED,
|
|
||||||
hdrs.LINK,
|
|
||||||
hdrs.LOCATION,
|
|
||||||
hdrs.MAX_FORWARDS,
|
|
||||||
hdrs.ORIGIN,
|
|
||||||
hdrs.PRAGMA,
|
|
||||||
hdrs.PROXY_AUTHENTICATE,
|
|
||||||
hdrs.PROXY_AUTHORIZATION,
|
|
||||||
hdrs.RANGE,
|
|
||||||
hdrs.REFERER,
|
|
||||||
hdrs.RETRY_AFTER,
|
|
||||||
hdrs.SEC_WEBSOCKET_ACCEPT,
|
|
||||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY,
|
|
||||||
hdrs.SEC_WEBSOCKET_KEY1,
|
|
||||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
|
||||||
hdrs.SEC_WEBSOCKET_VERSION,
|
|
||||||
hdrs.SERVER,
|
|
||||||
hdrs.SET_COOKIE,
|
|
||||||
hdrs.TE,
|
|
||||||
hdrs.TRAILER,
|
|
||||||
hdrs.TRANSFER_ENCODING,
|
|
||||||
hdrs.URI,
|
|
||||||
hdrs.UPGRADE,
|
|
||||||
hdrs.USER_AGENT,
|
|
||||||
hdrs.VARY,
|
|
||||||
hdrs.VIA,
|
|
||||||
hdrs.WWW_AUTHENTICATE,
|
|
||||||
hdrs.WANT_DIGEST,
|
|
||||||
hdrs.WARNING,
|
|
||||||
hdrs.X_FORWARDED_FOR,
|
|
||||||
hdrs.X_FORWARDED_HOST,
|
|
||||||
hdrs.X_FORWARDED_PROTO,
|
|
||||||
)
|
|
||||||
5433
env/Lib/site-packages/aiohttp/_helpers.c
vendored
5433
env/Lib/site-packages/aiohttp/_helpers.c
vendored
File diff suppressed because it is too large
Load diff
Binary file not shown.
6
env/Lib/site-packages/aiohttp/_helpers.pyi
vendored
6
env/Lib/site-packages/aiohttp/_helpers.pyi
vendored
|
|
@ -1,6 +0,0 @@
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
class reify:
|
|
||||||
def __init__(self, wrapped: Any) -> None: ...
|
|
||||||
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
|
||||||
def __set__(self, inst: Any, value: Any) -> None: ...
|
|
||||||
35
env/Lib/site-packages/aiohttp/_helpers.pyx
vendored
35
env/Lib/site-packages/aiohttp/_helpers.pyx
vendored
|
|
@ -1,35 +0,0 @@
|
||||||
cdef class reify:
|
|
||||||
"""Use as a class method decorator. It operates almost exactly like
|
|
||||||
the Python `@property` decorator, but it puts the result of the
|
|
||||||
method it decorates into the instance dict after the first call,
|
|
||||||
effectively replacing the function it decorates with an instance
|
|
||||||
variable. It is, in Python parlance, a data descriptor.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
cdef object wrapped
|
|
||||||
cdef object name
|
|
||||||
|
|
||||||
def __init__(self, wrapped):
|
|
||||||
self.wrapped = wrapped
|
|
||||||
self.name = wrapped.__name__
|
|
||||||
|
|
||||||
@property
|
|
||||||
def __doc__(self):
|
|
||||||
return self.wrapped.__doc__
|
|
||||||
|
|
||||||
def __get__(self, inst, owner):
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
return inst._cache[self.name]
|
|
||||||
except KeyError:
|
|
||||||
val = self.wrapped(inst)
|
|
||||||
inst._cache[self.name] = val
|
|
||||||
return val
|
|
||||||
except AttributeError:
|
|
||||||
if inst is None:
|
|
||||||
return self
|
|
||||||
raise
|
|
||||||
|
|
||||||
def __set__(self, inst, value):
|
|
||||||
raise AttributeError("reified property is read-only")
|
|
||||||
24607
env/Lib/site-packages/aiohttp/_http_parser.c
vendored
24607
env/Lib/site-packages/aiohttp/_http_parser.c
vendored
File diff suppressed because it is too large
Load diff
Binary file not shown.
875
env/Lib/site-packages/aiohttp/_http_parser.pyx
vendored
875
env/Lib/site-packages/aiohttp/_http_parser.pyx
vendored
|
|
@ -1,875 +0,0 @@
|
||||||
#cython: language_level=3
|
|
||||||
#
|
|
||||||
# Based on https://github.com/MagicStack/httptools
|
|
||||||
#
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
from cpython cimport (
|
|
||||||
Py_buffer,
|
|
||||||
PyBUF_SIMPLE,
|
|
||||||
PyBuffer_Release,
|
|
||||||
PyBytes_AsString,
|
|
||||||
PyBytes_AsStringAndSize,
|
|
||||||
PyObject_GetBuffer,
|
|
||||||
)
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
|
||||||
from libc.limits cimport ULLONG_MAX
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
|
||||||
from yarl import URL as _URL
|
|
||||||
|
|
||||||
from aiohttp import hdrs
|
|
||||||
|
|
||||||
from .http_exceptions import (
|
|
||||||
BadHttpMessage,
|
|
||||||
BadStatusLine,
|
|
||||||
ContentLengthError,
|
|
||||||
InvalidHeader,
|
|
||||||
InvalidURLError,
|
|
||||||
LineTooLong,
|
|
||||||
PayloadEncodingError,
|
|
||||||
TransferEncodingError,
|
|
||||||
)
|
|
||||||
from .http_parser import DeflateBuffer as _DeflateBuffer
|
|
||||||
from .http_writer import (
|
|
||||||
HttpVersion as _HttpVersion,
|
|
||||||
HttpVersion10 as _HttpVersion10,
|
|
||||||
HttpVersion11 as _HttpVersion11,
|
|
||||||
)
|
|
||||||
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
|
||||||
|
|
||||||
cimport cython
|
|
||||||
|
|
||||||
from aiohttp cimport _cparser as cparser
|
|
||||||
|
|
||||||
include "_headers.pxi"
|
|
||||||
|
|
||||||
from aiohttp cimport _find_header
|
|
||||||
|
|
||||||
DEF DEFAULT_FREELIST_SIZE = 250
|
|
||||||
|
|
||||||
cdef extern from "Python.h":
|
|
||||||
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
|
||||||
Py_ssize_t PyByteArray_Size(object) except -1
|
|
||||||
char* PyByteArray_AsString(object)
|
|
||||||
|
|
||||||
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
|
||||||
'RawRequestMessage', 'RawResponseMessage')
|
|
||||||
|
|
||||||
cdef object URL = _URL
|
|
||||||
cdef object URL_build = URL.build
|
|
||||||
cdef object CIMultiDict = _CIMultiDict
|
|
||||||
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
|
||||||
cdef object HttpVersion = _HttpVersion
|
|
||||||
cdef object HttpVersion10 = _HttpVersion10
|
|
||||||
cdef object HttpVersion11 = _HttpVersion11
|
|
||||||
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
|
||||||
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
|
||||||
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
|
||||||
cdef object StreamReader = _StreamReader
|
|
||||||
cdef object DeflateBuffer = _DeflateBuffer
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline object extend(object buf, const char* at, size_t length):
|
|
||||||
cdef Py_ssize_t s
|
|
||||||
cdef char* ptr
|
|
||||||
s = PyByteArray_Size(buf)
|
|
||||||
PyByteArray_Resize(buf, s + length)
|
|
||||||
ptr = PyByteArray_AsString(buf)
|
|
||||||
memcpy(ptr + s, at, length)
|
|
||||||
|
|
||||||
|
|
||||||
DEF METHODS_COUNT = 34;
|
|
||||||
|
|
||||||
cdef list _http_method = []
|
|
||||||
|
|
||||||
for i in range(METHODS_COUNT):
|
|
||||||
_http_method.append(
|
|
||||||
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline str http_method_str(int i):
|
|
||||||
if i < METHODS_COUNT:
|
|
||||||
return <str>_http_method[i]
|
|
||||||
else:
|
|
||||||
return "<unknown>"
|
|
||||||
|
|
||||||
cdef inline object find_header(bytes raw_header):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
cdef int idx
|
|
||||||
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
|
||||||
idx = _find_header.find_header(buf, size)
|
|
||||||
if idx == -1:
|
|
||||||
return raw_header.decode('utf-8', 'surrogateescape')
|
|
||||||
return headers[idx]
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawRequestMessage:
|
|
||||||
cdef readonly str method
|
|
||||||
cdef readonly str path
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
cdef readonly object url # yarl.URL
|
|
||||||
|
|
||||||
def __init__(self, method, path, version, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked, url):
|
|
||||||
self.method = method
|
|
||||||
self.path = path
|
|
||||||
self.version = version
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("method", self.method))
|
|
||||||
info.append(("path", self.path))
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
info.append(("url", self.url))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawRequestMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
def _replace(self, **dct):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = _new_request_message(self.method,
|
|
||||||
self.path,
|
|
||||||
self.version,
|
|
||||||
self.headers,
|
|
||||||
self.raw_headers,
|
|
||||||
self.should_close,
|
|
||||||
self.compression,
|
|
||||||
self.upgrade,
|
|
||||||
self.chunked,
|
|
||||||
self.url)
|
|
||||||
if "method" in dct:
|
|
||||||
ret.method = dct["method"]
|
|
||||||
if "path" in dct:
|
|
||||||
ret.path = dct["path"]
|
|
||||||
if "version" in dct:
|
|
||||||
ret.version = dct["version"]
|
|
||||||
if "headers" in dct:
|
|
||||||
ret.headers = dct["headers"]
|
|
||||||
if "raw_headers" in dct:
|
|
||||||
ret.raw_headers = dct["raw_headers"]
|
|
||||||
if "should_close" in dct:
|
|
||||||
ret.should_close = dct["should_close"]
|
|
||||||
if "compression" in dct:
|
|
||||||
ret.compression = dct["compression"]
|
|
||||||
if "upgrade" in dct:
|
|
||||||
ret.upgrade = dct["upgrade"]
|
|
||||||
if "chunked" in dct:
|
|
||||||
ret.chunked = dct["chunked"]
|
|
||||||
if "url" in dct:
|
|
||||||
ret.url = dct["url"]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
cdef _new_request_message(str method,
|
|
||||||
str path,
|
|
||||||
object version,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked,
|
|
||||||
object url):
|
|
||||||
cdef RawRequestMessage ret
|
|
||||||
ret = RawRequestMessage.__new__(RawRequestMessage)
|
|
||||||
ret.method = method
|
|
||||||
ret.path = path
|
|
||||||
ret.version = version
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
ret.url = url
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
|
||||||
cdef class RawResponseMessage:
|
|
||||||
cdef readonly object version # HttpVersion
|
|
||||||
cdef readonly int code
|
|
||||||
cdef readonly str reason
|
|
||||||
cdef readonly object headers # CIMultiDict
|
|
||||||
cdef readonly object raw_headers # tuple
|
|
||||||
cdef readonly object should_close
|
|
||||||
cdef readonly object compression
|
|
||||||
cdef readonly object upgrade
|
|
||||||
cdef readonly object chunked
|
|
||||||
|
|
||||||
def __init__(self, version, code, reason, headers, raw_headers,
|
|
||||||
should_close, compression, upgrade, chunked):
|
|
||||||
self.version = version
|
|
||||||
self.code = code
|
|
||||||
self.reason = reason
|
|
||||||
self.headers = headers
|
|
||||||
self.raw_headers = raw_headers
|
|
||||||
self.should_close = should_close
|
|
||||||
self.compression = compression
|
|
||||||
self.upgrade = upgrade
|
|
||||||
self.chunked = chunked
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
info = []
|
|
||||||
info.append(("version", self.version))
|
|
||||||
info.append(("code", self.code))
|
|
||||||
info.append(("reason", self.reason))
|
|
||||||
info.append(("headers", self.headers))
|
|
||||||
info.append(("raw_headers", self.raw_headers))
|
|
||||||
info.append(("should_close", self.should_close))
|
|
||||||
info.append(("compression", self.compression))
|
|
||||||
info.append(("upgrade", self.upgrade))
|
|
||||||
info.append(("chunked", self.chunked))
|
|
||||||
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
|
||||||
return '<RawResponseMessage(' + sinfo + ')>'
|
|
||||||
|
|
||||||
|
|
||||||
cdef _new_response_message(object version,
|
|
||||||
int code,
|
|
||||||
str reason,
|
|
||||||
object headers,
|
|
||||||
object raw_headers,
|
|
||||||
bint should_close,
|
|
||||||
object compression,
|
|
||||||
bint upgrade,
|
|
||||||
bint chunked):
|
|
||||||
cdef RawResponseMessage ret
|
|
||||||
ret = RawResponseMessage.__new__(RawResponseMessage)
|
|
||||||
ret.version = version
|
|
||||||
ret.code = code
|
|
||||||
ret.reason = reason
|
|
||||||
ret.headers = headers
|
|
||||||
ret.raw_headers = raw_headers
|
|
||||||
ret.should_close = should_close
|
|
||||||
ret.compression = compression
|
|
||||||
ret.upgrade = upgrade
|
|
||||||
ret.chunked = chunked
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
@cython.internal
|
|
||||||
cdef class HttpParser:
|
|
||||||
|
|
||||||
cdef:
|
|
||||||
cparser.http_parser* _cparser
|
|
||||||
cparser.http_parser_settings* _csettings
|
|
||||||
|
|
||||||
bytearray _raw_name
|
|
||||||
bytearray _raw_value
|
|
||||||
bint _has_value
|
|
||||||
|
|
||||||
object _protocol
|
|
||||||
object _loop
|
|
||||||
object _timer
|
|
||||||
|
|
||||||
size_t _max_line_size
|
|
||||||
size_t _max_field_size
|
|
||||||
size_t _max_headers
|
|
||||||
bint _response_with_body
|
|
||||||
bint _read_until_eof
|
|
||||||
|
|
||||||
bint _started
|
|
||||||
object _url
|
|
||||||
bytearray _buf
|
|
||||||
str _path
|
|
||||||
str _reason
|
|
||||||
object _headers
|
|
||||||
list _raw_headers
|
|
||||||
bint _upgraded
|
|
||||||
list _messages
|
|
||||||
object _payload
|
|
||||||
bint _payload_error
|
|
||||||
object _payload_exception
|
|
||||||
object _last_error
|
|
||||||
bint _auto_decompress
|
|
||||||
int _limit
|
|
||||||
|
|
||||||
str _content_encoding
|
|
||||||
|
|
||||||
Py_buffer py_buf
|
|
||||||
|
|
||||||
def __cinit__(self):
|
|
||||||
self._cparser = <cparser.http_parser*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.http_parser))
|
|
||||||
if self._cparser is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
self._csettings = <cparser.http_parser_settings*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.http_parser_settings))
|
|
||||||
if self._csettings is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
|
|
||||||
def __dealloc__(self):
|
|
||||||
PyMem_Free(self._cparser)
|
|
||||||
PyMem_Free(self._csettings)
|
|
||||||
|
|
||||||
cdef _init(self, cparser.http_parser_type mode,
|
|
||||||
object protocol, object loop, int limit,
|
|
||||||
object timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True):
|
|
||||||
cparser.http_parser_init(self._cparser, mode)
|
|
||||||
self._cparser.data = <void*>self
|
|
||||||
self._cparser.content_length = 0
|
|
||||||
|
|
||||||
cparser.http_parser_settings_init(self._csettings)
|
|
||||||
|
|
||||||
self._protocol = protocol
|
|
||||||
self._loop = loop
|
|
||||||
self._timer = timer
|
|
||||||
|
|
||||||
self._buf = bytearray()
|
|
||||||
self._payload = None
|
|
||||||
self._payload_error = 0
|
|
||||||
self._payload_exception = payload_exception
|
|
||||||
self._messages = []
|
|
||||||
|
|
||||||
self._raw_name = bytearray()
|
|
||||||
self._raw_value = bytearray()
|
|
||||||
self._has_value = False
|
|
||||||
|
|
||||||
self._max_line_size = max_line_size
|
|
||||||
self._max_headers = max_headers
|
|
||||||
self._max_field_size = max_field_size
|
|
||||||
self._response_with_body = response_with_body
|
|
||||||
self._read_until_eof = read_until_eof
|
|
||||||
self._upgraded = False
|
|
||||||
self._auto_decompress = auto_decompress
|
|
||||||
self._content_encoding = None
|
|
||||||
|
|
||||||
self._csettings.on_url = cb_on_url
|
|
||||||
self._csettings.on_status = cb_on_status
|
|
||||||
self._csettings.on_header_field = cb_on_header_field
|
|
||||||
self._csettings.on_header_value = cb_on_header_value
|
|
||||||
self._csettings.on_headers_complete = cb_on_headers_complete
|
|
||||||
self._csettings.on_body = cb_on_body
|
|
||||||
self._csettings.on_message_begin = cb_on_message_begin
|
|
||||||
self._csettings.on_message_complete = cb_on_message_complete
|
|
||||||
self._csettings.on_chunk_header = cb_on_chunk_header
|
|
||||||
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
|
||||||
|
|
||||||
self._last_error = None
|
|
||||||
self._limit = limit
|
|
||||||
|
|
||||||
cdef _process_header(self):
|
|
||||||
if self._raw_name:
|
|
||||||
raw_name = bytes(self._raw_name)
|
|
||||||
raw_value = bytes(self._raw_value)
|
|
||||||
|
|
||||||
name = find_header(raw_name)
|
|
||||||
value = raw_value.decode('utf-8', 'surrogateescape')
|
|
||||||
|
|
||||||
self._headers.add(name, value)
|
|
||||||
|
|
||||||
if name is CONTENT_ENCODING:
|
|
||||||
self._content_encoding = value
|
|
||||||
|
|
||||||
PyByteArray_Resize(self._raw_name, 0)
|
|
||||||
PyByteArray_Resize(self._raw_value, 0)
|
|
||||||
self._has_value = False
|
|
||||||
self._raw_headers.append((raw_name, raw_value))
|
|
||||||
|
|
||||||
cdef _on_header_field(self, char* at, size_t length):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
if self._has_value:
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
size = PyByteArray_Size(self._raw_name)
|
|
||||||
PyByteArray_Resize(self._raw_name, size + length)
|
|
||||||
buf = PyByteArray_AsString(self._raw_name)
|
|
||||||
memcpy(buf + size, at, length)
|
|
||||||
|
|
||||||
cdef _on_header_value(self, char* at, size_t length):
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
cdef char *buf
|
|
||||||
|
|
||||||
size = PyByteArray_Size(self._raw_value)
|
|
||||||
PyByteArray_Resize(self._raw_value, size + length)
|
|
||||||
buf = PyByteArray_AsString(self._raw_value)
|
|
||||||
memcpy(buf + size, at, length)
|
|
||||||
self._has_value = True
|
|
||||||
|
|
||||||
cdef _on_headers_complete(self):
|
|
||||||
self._process_header()
|
|
||||||
|
|
||||||
method = http_method_str(self._cparser.method)
|
|
||||||
should_close = not cparser.http_should_keep_alive(self._cparser)
|
|
||||||
upgrade = self._cparser.upgrade
|
|
||||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
|
||||||
|
|
||||||
raw_headers = tuple(self._raw_headers)
|
|
||||||
headers = CIMultiDictProxy(self._headers)
|
|
||||||
|
|
||||||
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
|
||||||
self._upgraded = True
|
|
||||||
|
|
||||||
# do not support old websocket spec
|
|
||||||
if SEC_WEBSOCKET_KEY1 in headers:
|
|
||||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
|
||||||
|
|
||||||
encoding = None
|
|
||||||
enc = self._content_encoding
|
|
||||||
if enc is not None:
|
|
||||||
self._content_encoding = None
|
|
||||||
enc = enc.lower()
|
|
||||||
if enc in ('gzip', 'deflate', 'br'):
|
|
||||||
encoding = enc
|
|
||||||
|
|
||||||
if self._cparser.type == cparser.HTTP_REQUEST:
|
|
||||||
msg = _new_request_message(
|
|
||||||
method, self._path,
|
|
||||||
self.http_version(), headers, raw_headers,
|
|
||||||
should_close, encoding, upgrade, chunked, self._url)
|
|
||||||
else:
|
|
||||||
msg = _new_response_message(
|
|
||||||
self.http_version(), self._cparser.status_code, self._reason,
|
|
||||||
headers, raw_headers, should_close, encoding,
|
|
||||||
upgrade, chunked)
|
|
||||||
|
|
||||||
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
|
||||||
self._cparser.method == 5 or # CONNECT: 5
|
|
||||||
(self._cparser.status_code >= 199 and
|
|
||||||
self._cparser.content_length == ULLONG_MAX and
|
|
||||||
self._read_until_eof)
|
|
||||||
):
|
|
||||||
payload = StreamReader(
|
|
||||||
self._protocol, timer=self._timer, loop=self._loop,
|
|
||||||
limit=self._limit)
|
|
||||||
else:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
if encoding is not None and self._auto_decompress:
|
|
||||||
self._payload = DeflateBuffer(payload, encoding)
|
|
||||||
|
|
||||||
if not self._response_with_body:
|
|
||||||
payload = EMPTY_PAYLOAD
|
|
||||||
|
|
||||||
self._messages.append((msg, payload))
|
|
||||||
|
|
||||||
cdef _on_message_complete(self):
|
|
||||||
self._payload.feed_eof()
|
|
||||||
self._payload = None
|
|
||||||
|
|
||||||
cdef _on_chunk_header(self):
|
|
||||||
self._payload.begin_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef _on_chunk_complete(self):
|
|
||||||
self._payload.end_http_chunk_receiving()
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
cdef inline http_version(self):
|
|
||||||
cdef cparser.http_parser* parser = self._cparser
|
|
||||||
|
|
||||||
if parser.http_major == 1:
|
|
||||||
if parser.http_minor == 0:
|
|
||||||
return HttpVersion10
|
|
||||||
elif parser.http_minor == 1:
|
|
||||||
return HttpVersion11
|
|
||||||
|
|
||||||
return HttpVersion(parser.http_major, parser.http_minor)
|
|
||||||
|
|
||||||
### Public API ###
|
|
||||||
|
|
||||||
def feed_eof(self):
|
|
||||||
cdef bytes desc
|
|
||||||
|
|
||||||
if self._payload is not None:
|
|
||||||
if self._cparser.flags & cparser.F_CHUNKED:
|
|
||||||
raise TransferEncodingError(
|
|
||||||
"Not enough data for satisfy transfer length header.")
|
|
||||||
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
|
|
||||||
raise ContentLengthError(
|
|
||||||
"Not enough data for satisfy content length header.")
|
|
||||||
elif self._cparser.http_errno != cparser.HPE_OK:
|
|
||||||
desc = cparser.http_errno_description(
|
|
||||||
<cparser.http_errno> self._cparser.http_errno)
|
|
||||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
|
||||||
else:
|
|
||||||
self._payload.feed_eof()
|
|
||||||
elif self._started:
|
|
||||||
self._on_headers_complete()
|
|
||||||
if self._messages:
|
|
||||||
return self._messages[-1][0]
|
|
||||||
|
|
||||||
def feed_data(self, data):
|
|
||||||
cdef:
|
|
||||||
size_t data_len
|
|
||||||
size_t nb
|
|
||||||
|
|
||||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
|
||||||
data_len = <size_t>self.py_buf.len
|
|
||||||
|
|
||||||
nb = cparser.http_parser_execute(
|
|
||||||
self._cparser,
|
|
||||||
self._csettings,
|
|
||||||
<char*>self.py_buf.buf,
|
|
||||||
data_len)
|
|
||||||
|
|
||||||
PyBuffer_Release(&self.py_buf)
|
|
||||||
|
|
||||||
if (self._cparser.http_errno != cparser.HPE_OK):
|
|
||||||
if self._payload_error == 0:
|
|
||||||
if self._last_error is not None:
|
|
||||||
ex = self._last_error
|
|
||||||
self._last_error = None
|
|
||||||
else:
|
|
||||||
ex = parser_error_from_errno(
|
|
||||||
<cparser.http_errno> self._cparser.http_errno)
|
|
||||||
self._payload = None
|
|
||||||
raise ex
|
|
||||||
|
|
||||||
if self._messages:
|
|
||||||
messages = self._messages
|
|
||||||
self._messages = []
|
|
||||||
else:
|
|
||||||
messages = ()
|
|
||||||
|
|
||||||
if self._upgraded:
|
|
||||||
return messages, True, data[nb:]
|
|
||||||
else:
|
|
||||||
return messages, False, b''
|
|
||||||
|
|
||||||
def set_upgraded(self, val):
|
|
||||||
self._upgraded = val
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpRequestParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
cdef Py_buffer py_buf
|
|
||||||
if not self._buf:
|
|
||||||
return
|
|
||||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
if self._cparser.method == 5: # CONNECT
|
|
||||||
self._url = URL(self._path)
|
|
||||||
else:
|
|
||||||
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
|
|
||||||
try:
|
|
||||||
self._url = _parse_url(<char*>py_buf.buf,
|
|
||||||
py_buf.len)
|
|
||||||
finally:
|
|
||||||
PyBuffer_Release(&py_buf)
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
|
|
||||||
|
|
||||||
cdef class HttpResponseParser(HttpParser):
|
|
||||||
|
|
||||||
def __init__(self, protocol, loop, int limit, timer=None,
|
|
||||||
size_t max_line_size=8190, size_t max_headers=32768,
|
|
||||||
size_t max_field_size=8190, payload_exception=None,
|
|
||||||
bint response_with_body=True, bint read_until_eof=False,
|
|
||||||
bint auto_decompress=True
|
|
||||||
):
|
|
||||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
|
||||||
max_line_size, max_headers, max_field_size,
|
|
||||||
payload_exception, response_with_body, read_until_eof,
|
|
||||||
auto_decompress)
|
|
||||||
|
|
||||||
cdef object _on_status_complete(self):
|
|
||||||
if self._buf:
|
|
||||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
|
||||||
PyByteArray_Resize(self._buf, 0)
|
|
||||||
else:
|
|
||||||
self._reason = self._reason or ''
|
|
||||||
|
|
||||||
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
|
|
||||||
pyparser._started = True
|
|
||||||
pyparser._headers = CIMultiDict()
|
|
||||||
pyparser._raw_headers = []
|
|
||||||
PyByteArray_Resize(pyparser._buf, 0)
|
|
||||||
pyparser._path = None
|
|
||||||
pyparser._reason = None
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_url(cparser.http_parser* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_status(cparser.http_parser* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef str reason
|
|
||||||
try:
|
|
||||||
if length > pyparser._max_line_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Status line is too long', pyparser._max_line_size, length)
|
|
||||||
extend(pyparser._buf, at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_field(cparser.http_parser* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
size = len(pyparser._raw_name) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header name is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_field(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_header_value(cparser.http_parser* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
try:
|
|
||||||
size = len(pyparser._raw_value) + length
|
|
||||||
if size > pyparser._max_field_size:
|
|
||||||
raise LineTooLong(
|
|
||||||
'Header value is too long', pyparser._max_field_size, size)
|
|
||||||
pyparser._on_header_value(at, length)
|
|
||||||
except BaseException as ex:
|
|
||||||
pyparser._last_error = ex
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_status_complete()
|
|
||||||
pyparser._on_headers_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_body(cparser.http_parser* parser,
|
|
||||||
const char *at, size_t length) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
cdef bytes body = at[:length]
|
|
||||||
try:
|
|
||||||
pyparser._payload.feed_data(body, length)
|
|
||||||
except BaseException as exc:
|
|
||||||
if pyparser._payload_exception is not None:
|
|
||||||
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
|
||||||
else:
|
|
||||||
pyparser._payload.set_exception(exc)
|
|
||||||
pyparser._payload_error = 1
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._started = False
|
|
||||||
pyparser._on_message_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_header()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
|
||||||
try:
|
|
||||||
pyparser._on_chunk_complete()
|
|
||||||
except BaseException as exc:
|
|
||||||
pyparser._last_error = exc
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef parser_error_from_errno(cparser.http_errno errno):
|
|
||||||
cdef bytes desc = cparser.http_errno_description(errno)
|
|
||||||
|
|
||||||
if errno in (cparser.HPE_CB_message_begin,
|
|
||||||
cparser.HPE_CB_url,
|
|
||||||
cparser.HPE_CB_header_field,
|
|
||||||
cparser.HPE_CB_header_value,
|
|
||||||
cparser.HPE_CB_headers_complete,
|
|
||||||
cparser.HPE_CB_body,
|
|
||||||
cparser.HPE_CB_message_complete,
|
|
||||||
cparser.HPE_CB_status,
|
|
||||||
cparser.HPE_CB_chunk_header,
|
|
||||||
cparser.HPE_CB_chunk_complete):
|
|
||||||
cls = BadHttpMessage
|
|
||||||
|
|
||||||
elif errno == cparser.HPE_INVALID_STATUS:
|
|
||||||
cls = BadStatusLine
|
|
||||||
|
|
||||||
elif errno == cparser.HPE_INVALID_METHOD:
|
|
||||||
cls = BadStatusLine
|
|
||||||
|
|
||||||
elif errno == cparser.HPE_INVALID_URL:
|
|
||||||
cls = InvalidURLError
|
|
||||||
|
|
||||||
else:
|
|
||||||
cls = BadHttpMessage
|
|
||||||
|
|
||||||
return cls(desc.decode('latin-1'))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_url(url):
|
|
||||||
cdef:
|
|
||||||
Py_buffer py_buf
|
|
||||||
char* buf_data
|
|
||||||
|
|
||||||
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
|
|
||||||
try:
|
|
||||||
buf_data = <char*>py_buf.buf
|
|
||||||
return _parse_url(buf_data, py_buf.len)
|
|
||||||
finally:
|
|
||||||
PyBuffer_Release(&py_buf)
|
|
||||||
|
|
||||||
|
|
||||||
cdef _parse_url(char* buf_data, size_t length):
|
|
||||||
cdef:
|
|
||||||
cparser.http_parser_url* parsed
|
|
||||||
int res
|
|
||||||
str schema = None
|
|
||||||
str host = None
|
|
||||||
object port = None
|
|
||||||
str path = None
|
|
||||||
str query = None
|
|
||||||
str fragment = None
|
|
||||||
str user = None
|
|
||||||
str password = None
|
|
||||||
str userinfo = None
|
|
||||||
object result = None
|
|
||||||
int off
|
|
||||||
int ln
|
|
||||||
|
|
||||||
parsed = <cparser.http_parser_url*> \
|
|
||||||
PyMem_Malloc(sizeof(cparser.http_parser_url))
|
|
||||||
if parsed is NULL:
|
|
||||||
raise MemoryError()
|
|
||||||
cparser.http_parser_url_init(parsed)
|
|
||||||
try:
|
|
||||||
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
|
|
||||||
|
|
||||||
if res == 0:
|
|
||||||
if parsed.field_set & (1 << cparser.UF_SCHEMA):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
|
|
||||||
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
else:
|
|
||||||
schema = ''
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_HOST):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_HOST].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_HOST].len
|
|
||||||
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
else:
|
|
||||||
host = ''
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_PORT):
|
|
||||||
port = parsed.port
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_PATH):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_PATH].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_PATH].len
|
|
||||||
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
else:
|
|
||||||
path = ''
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_QUERY):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_QUERY].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_QUERY].len
|
|
||||||
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
else:
|
|
||||||
query = ''
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
|
|
||||||
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
else:
|
|
||||||
fragment = ''
|
|
||||||
|
|
||||||
if parsed.field_set & (1 << cparser.UF_USERINFO):
|
|
||||||
off = parsed.field_data[<int>cparser.UF_USERINFO].off
|
|
||||||
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
|
|
||||||
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
|
||||||
|
|
||||||
user, sep, password = userinfo.partition(':')
|
|
||||||
|
|
||||||
return URL_build(scheme=schema,
|
|
||||||
user=user, password=password, host=host, port=port,
|
|
||||||
path=path, query_string=query, fragment=fragment, encoded=True)
|
|
||||||
else:
|
|
||||||
raise InvalidURLError("invalid url {!r}".format(buf_data))
|
|
||||||
finally:
|
|
||||||
PyMem_Free(parsed)
|
|
||||||
5840
env/Lib/site-packages/aiohttp/_http_writer.c
vendored
5840
env/Lib/site-packages/aiohttp/_http_writer.c
vendored
File diff suppressed because it is too large
Load diff
Binary file not shown.
151
env/Lib/site-packages/aiohttp/_http_writer.pyx
vendored
151
env/Lib/site-packages/aiohttp/_http_writer.pyx
vendored
|
|
@ -1,151 +0,0 @@
|
||||||
from cpython.bytes cimport PyBytes_FromStringAndSize
|
|
||||||
from cpython.exc cimport PyErr_NoMemory
|
|
||||||
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
|
||||||
from cpython.object cimport PyObject_Str
|
|
||||||
from libc.stdint cimport uint8_t, uint64_t
|
|
||||||
from libc.string cimport memcpy
|
|
||||||
|
|
||||||
from multidict import istr
|
|
||||||
|
|
||||||
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
|
||||||
cdef char BUFFER[BUF_SIZE]
|
|
||||||
|
|
||||||
cdef object _istr = istr
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------- writer ---------------------------
|
|
||||||
|
|
||||||
cdef struct Writer:
|
|
||||||
char *buf
|
|
||||||
Py_ssize_t size
|
|
||||||
Py_ssize_t pos
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _init_writer(Writer* writer):
|
|
||||||
writer.buf = &BUFFER[0]
|
|
||||||
writer.size = BUF_SIZE
|
|
||||||
writer.pos = 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline void _release_writer(Writer* writer):
|
|
||||||
if writer.buf != BUFFER:
|
|
||||||
PyMem_Free(writer.buf)
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
|
||||||
cdef char * buf
|
|
||||||
cdef Py_ssize_t size
|
|
||||||
|
|
||||||
if writer.pos == writer.size:
|
|
||||||
# reallocate
|
|
||||||
size = writer.size + BUF_SIZE
|
|
||||||
if writer.buf == BUFFER:
|
|
||||||
buf = <char*>PyMem_Malloc(size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
memcpy(buf, writer.buf, writer.size)
|
|
||||||
else:
|
|
||||||
buf = <char*>PyMem_Realloc(writer.buf, size)
|
|
||||||
if buf == NULL:
|
|
||||||
PyErr_NoMemory()
|
|
||||||
return -1
|
|
||||||
writer.buf = buf
|
|
||||||
writer.size = size
|
|
||||||
writer.buf[writer.pos] = <char>ch
|
|
||||||
writer.pos += 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
|
||||||
cdef uint64_t utf = <uint64_t> symbol
|
|
||||||
|
|
||||||
if utf < 0x80:
|
|
||||||
return _write_byte(writer, <uint8_t>utf)
|
|
||||||
elif utf < 0x800:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif 0xD800 <= utf <= 0xDFFF:
|
|
||||||
# surogate pair, ignored
|
|
||||||
return 0
|
|
||||||
elif utf < 0x10000:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
elif utf > 0x10FFFF:
|
|
||||||
# symbol is too large
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
if _write_byte(writer,
|
|
||||||
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
|
||||||
return -1
|
|
||||||
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
|
||||||
|
|
||||||
|
|
||||||
cdef inline int _write_str(Writer* writer, str s):
|
|
||||||
cdef Py_UCS4 ch
|
|
||||||
for ch in s:
|
|
||||||
if _write_utf8(writer, ch) < 0:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
# --------------- _serialize_headers ----------------------
|
|
||||||
|
|
||||||
cdef str to_str(object s):
|
|
||||||
typ = type(s)
|
|
||||||
if typ is str:
|
|
||||||
return <str>s
|
|
||||||
elif typ is _istr:
|
|
||||||
return PyObject_Str(s)
|
|
||||||
elif not isinstance(s, str):
|
|
||||||
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
|
||||||
else:
|
|
||||||
return str(s)
|
|
||||||
|
|
||||||
|
|
||||||
def _serialize_headers(str status_line, headers):
|
|
||||||
cdef Writer writer
|
|
||||||
cdef object key
|
|
||||||
cdef object val
|
|
||||||
cdef bytes ret
|
|
||||||
|
|
||||||
_init_writer(&writer)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if _write_str(&writer, status_line) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
for key, val in headers.items():
|
|
||||||
if _write_str(&writer, to_str(key)) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b':') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b' ') < 0:
|
|
||||||
raise
|
|
||||||
if _write_str(&writer, to_str(val)) < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if _write_byte(&writer, b'\r') < 0:
|
|
||||||
raise
|
|
||||||
if _write_byte(&writer, b'\n') < 0:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
|
||||||
finally:
|
|
||||||
_release_writer(&writer)
|
|
||||||
3588
env/Lib/site-packages/aiohttp/_websocket.c
vendored
3588
env/Lib/site-packages/aiohttp/_websocket.c
vendored
File diff suppressed because it is too large
Load diff
Binary file not shown.
56
env/Lib/site-packages/aiohttp/_websocket.pyx
vendored
56
env/Lib/site-packages/aiohttp/_websocket.pyx
vendored
|
|
@ -1,56 +0,0 @@
|
||||||
from cpython cimport PyBytes_AsString
|
|
||||||
|
|
||||||
|
|
||||||
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
|
||||||
cdef extern from "Python.h":
|
|
||||||
char* PyByteArray_AsString(bytearray ba) except NULL
|
|
||||||
|
|
||||||
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
|
||||||
|
|
||||||
|
|
||||||
def _websocket_mask_cython(object mask, object data):
|
|
||||||
"""Note, this function mutates its `data` argument
|
|
||||||
"""
|
|
||||||
cdef:
|
|
||||||
Py_ssize_t data_len, i
|
|
||||||
# bit operations on signed integers are implementation-specific
|
|
||||||
unsigned char * in_buf
|
|
||||||
const unsigned char * mask_buf
|
|
||||||
uint32_t uint32_msk
|
|
||||||
uint64_t uint64_msk
|
|
||||||
|
|
||||||
assert len(mask) == 4
|
|
||||||
|
|
||||||
if not isinstance(mask, bytes):
|
|
||||||
mask = bytes(mask)
|
|
||||||
|
|
||||||
if isinstance(data, bytearray):
|
|
||||||
data = <bytearray>data
|
|
||||||
else:
|
|
||||||
data = bytearray(data)
|
|
||||||
|
|
||||||
data_len = len(data)
|
|
||||||
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
|
||||||
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
|
||||||
uint32_msk = (<uint32_t*>mask_buf)[0]
|
|
||||||
|
|
||||||
# TODO: align in_data ptr to achieve even faster speeds
|
|
||||||
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
|
||||||
|
|
||||||
if sizeof(size_t) >= 8:
|
|
||||||
uint64_msk = uint32_msk
|
|
||||||
uint64_msk = (uint64_msk << 32) | uint32_msk
|
|
||||||
|
|
||||||
while data_len >= 8:
|
|
||||||
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
|
||||||
in_buf += 8
|
|
||||||
data_len -= 8
|
|
||||||
|
|
||||||
|
|
||||||
while data_len >= 4:
|
|
||||||
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
|
||||||
in_buf += 4
|
|
||||||
data_len -= 4
|
|
||||||
|
|
||||||
for i in range(0, data_len):
|
|
||||||
in_buf[i] ^= mask_buf[i]
|
|
||||||
200
env/Lib/site-packages/aiohttp/abc.py
vendored
200
env/Lib/site-packages/aiohttp/abc.py
vendored
|
|
@ -1,200 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from collections.abc import Sized
|
|
||||||
from http.cookies import BaseCookie, Morsel
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Awaitable,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Generator,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
)
|
|
||||||
|
|
||||||
from multidict import CIMultiDict
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .helpers import get_running_loop
|
|
||||||
from .typedefs import LooseCookies
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .web_app import Application
|
|
||||||
from .web_exceptions import HTTPException
|
|
||||||
from .web_request import BaseRequest, Request
|
|
||||||
from .web_response import StreamResponse
|
|
||||||
else:
|
|
||||||
BaseRequest = Request = Application = StreamResponse = None
|
|
||||||
HTTPException = None
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractRouter(ABC):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self._frozen = False
|
|
||||||
|
|
||||||
def post_init(self, app: Application) -> None:
|
|
||||||
"""Post init stage.
|
|
||||||
|
|
||||||
Not an abstract method for sake of backward compatibility,
|
|
||||||
but if the router wants to be aware of the application
|
|
||||||
it can override this.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def frozen(self) -> bool:
|
|
||||||
return self._frozen
|
|
||||||
|
|
||||||
def freeze(self) -> None:
|
|
||||||
"""Freeze router."""
|
|
||||||
self._frozen = True
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
|
||||||
"""Return MATCH_INFO for given request"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractMatchInfo(ABC):
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
|
||||||
"""Execute matched request handler"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
|
||||||
"""Expect handler for 100-continue processing"""
|
|
||||||
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def http_exception(self) -> Optional[HTTPException]:
|
|
||||||
"""HTTPException instance raised on router's resolving, or None"""
|
|
||||||
|
|
||||||
@abstractmethod # pragma: no branch
|
|
||||||
def get_info(self) -> Dict[str, Any]:
|
|
||||||
"""Return a dict with additional info useful for introspection"""
|
|
||||||
|
|
||||||
@property # pragma: no branch
|
|
||||||
@abstractmethod
|
|
||||||
def apps(self) -> Tuple[Application, ...]:
|
|
||||||
"""Stack of nested applications.
|
|
||||||
|
|
||||||
Top level application is left-most element.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def add_app(self, app: Application) -> None:
|
|
||||||
"""Add application to the nested apps stack."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def freeze(self) -> None:
|
|
||||||
"""Freeze the match info.
|
|
||||||
|
|
||||||
The method is called after route resolution.
|
|
||||||
|
|
||||||
After the call .add_app() is forbidden.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractView(ABC):
|
|
||||||
"""Abstract class based view."""
|
|
||||||
|
|
||||||
def __init__(self, request: Request) -> None:
|
|
||||||
self._request = request
|
|
||||||
|
|
||||||
@property
|
|
||||||
def request(self) -> Request:
|
|
||||||
"""Request instance."""
|
|
||||||
return self._request
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
|
||||||
"""Execute the view handler."""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractResolver(ABC):
|
|
||||||
"""Abstract DNS resolver."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
|
||||||
"""Return IP address for given hostname"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def close(self) -> None:
|
|
||||||
"""Release resolver"""
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
IterableBase = Iterable[Morsel[str]]
|
|
||||||
else:
|
|
||||||
IterableBase = Iterable
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractCookieJar(Sized, IterableBase):
|
|
||||||
"""Abstract Cookie Jar."""
|
|
||||||
|
|
||||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
|
||||||
self._loop = get_running_loop(loop)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def clear(self) -> None:
|
|
||||||
"""Clear all cookies."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
"""Update cookies."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
|
||||||
"""Return the jar's cookies filtered by their attributes."""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractStreamWriter(ABC):
|
|
||||||
"""Abstract stream writer."""
|
|
||||||
|
|
||||||
buffer_size = 0
|
|
||||||
output_size = 0
|
|
||||||
length = 0 # type: Optional[int]
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write(self, chunk: bytes) -> None:
|
|
||||||
"""Write chunk into stream."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write_eof(self, chunk: bytes = b"") -> None:
|
|
||||||
"""Write last chunk."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def drain(self) -> None:
|
|
||||||
"""Flush the write buffer."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
|
||||||
"""Enable HTTP body compression"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def enable_chunking(self) -> None:
|
|
||||||
"""Enable HTTP chunked mode"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def write_headers(
|
|
||||||
self, status_line: str, headers: "CIMultiDict[str]"
|
|
||||||
) -> None:
|
|
||||||
"""Write HTTP headers"""
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractAccessLogger(ABC):
|
|
||||||
"""Abstract writer to access log."""
|
|
||||||
|
|
||||||
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
|
||||||
self.logger = logger
|
|
||||||
self.log_format = log_format
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
|
||||||
"""Emit log to logger."""
|
|
||||||
87
env/Lib/site-packages/aiohttp/base_protocol.py
vendored
87
env/Lib/site-packages/aiohttp/base_protocol.py
vendored
|
|
@ -1,87 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from typing import Optional, cast
|
|
||||||
|
|
||||||
from .tcp_helpers import tcp_nodelay
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProtocol(asyncio.Protocol):
|
|
||||||
__slots__ = (
|
|
||||||
"_loop",
|
|
||||||
"_paused",
|
|
||||||
"_drain_waiter",
|
|
||||||
"_connection_lost",
|
|
||||||
"_reading_paused",
|
|
||||||
"transport",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
self._loop = loop # type: asyncio.AbstractEventLoop
|
|
||||||
self._paused = False
|
|
||||||
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
|
||||||
self._connection_lost = False
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
self.transport = None # type: Optional[asyncio.Transport]
|
|
||||||
|
|
||||||
def pause_writing(self) -> None:
|
|
||||||
assert not self._paused
|
|
||||||
self._paused = True
|
|
||||||
|
|
||||||
def resume_writing(self) -> None:
|
|
||||||
assert self._paused
|
|
||||||
self._paused = False
|
|
||||||
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
if waiter is not None:
|
|
||||||
self._drain_waiter = None
|
|
||||||
if not waiter.done():
|
|
||||||
waiter.set_result(None)
|
|
||||||
|
|
||||||
def pause_reading(self) -> None:
|
|
||||||
if not self._reading_paused and self.transport is not None:
|
|
||||||
try:
|
|
||||||
self.transport.pause_reading()
|
|
||||||
except (AttributeError, NotImplementedError, RuntimeError):
|
|
||||||
pass
|
|
||||||
self._reading_paused = True
|
|
||||||
|
|
||||||
def resume_reading(self) -> None:
|
|
||||||
if self._reading_paused and self.transport is not None:
|
|
||||||
try:
|
|
||||||
self.transport.resume_reading()
|
|
||||||
except (AttributeError, NotImplementedError, RuntimeError):
|
|
||||||
pass
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
|
||||||
tr = cast(asyncio.Transport, transport)
|
|
||||||
tcp_nodelay(tr, True)
|
|
||||||
self.transport = tr
|
|
||||||
|
|
||||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
|
||||||
self._connection_lost = True
|
|
||||||
# Wake up the writer if currently paused.
|
|
||||||
self.transport = None
|
|
||||||
if not self._paused:
|
|
||||||
return
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
if waiter is None:
|
|
||||||
return
|
|
||||||
self._drain_waiter = None
|
|
||||||
if waiter.done():
|
|
||||||
return
|
|
||||||
if exc is None:
|
|
||||||
waiter.set_result(None)
|
|
||||||
else:
|
|
||||||
waiter.set_exception(exc)
|
|
||||||
|
|
||||||
async def _drain_helper(self) -> None:
|
|
||||||
if self._connection_lost:
|
|
||||||
raise ConnectionResetError("Connection lost")
|
|
||||||
if not self._paused:
|
|
||||||
return
|
|
||||||
waiter = self._drain_waiter
|
|
||||||
assert waiter is None or waiter.cancelled()
|
|
||||||
waiter = self._loop.create_future()
|
|
||||||
self._drain_waiter = waiter
|
|
||||||
await waiter
|
|
||||||
1275
env/Lib/site-packages/aiohttp/client.py
vendored
1275
env/Lib/site-packages/aiohttp/client.py
vendored
File diff suppressed because it is too large
Load diff
317
env/Lib/site-packages/aiohttp/client_exceptions.py
vendored
317
env/Lib/site-packages/aiohttp/client_exceptions.py
vendored
|
|
@ -1,317 +0,0 @@
|
||||||
"""HTTP related errors."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import warnings
|
|
||||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
|
||||||
|
|
||||||
from .typedefs import LooseHeaders
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
SSLContext = ssl.SSLContext
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
ssl = SSLContext = None # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
|
||||||
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
|
||||||
else:
|
|
||||||
RequestInfo = ClientResponse = ConnectionKey = None
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
"ClientError",
|
|
||||||
"ClientConnectionError",
|
|
||||||
"ClientOSError",
|
|
||||||
"ClientConnectorError",
|
|
||||||
"ClientProxyConnectionError",
|
|
||||||
"ClientSSLError",
|
|
||||||
"ClientConnectorSSLError",
|
|
||||||
"ClientConnectorCertificateError",
|
|
||||||
"ServerConnectionError",
|
|
||||||
"ServerTimeoutError",
|
|
||||||
"ServerDisconnectedError",
|
|
||||||
"ServerFingerprintMismatch",
|
|
||||||
"ClientResponseError",
|
|
||||||
"ClientHttpProxyError",
|
|
||||||
"WSServerHandshakeError",
|
|
||||||
"ContentTypeError",
|
|
||||||
"ClientPayloadError",
|
|
||||||
"InvalidURL",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientError(Exception):
|
|
||||||
"""Base class for client connection errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientResponseError(ClientError):
|
|
||||||
"""Connection error during reading response.
|
|
||||||
|
|
||||||
request_info: instance of RequestInfo
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
request_info: RequestInfo,
|
|
||||||
history: Tuple[ClientResponse, ...],
|
|
||||||
*,
|
|
||||||
code: Optional[int] = None,
|
|
||||||
status: Optional[int] = None,
|
|
||||||
message: str = "",
|
|
||||||
headers: Optional[LooseHeaders] = None,
|
|
||||||
) -> None:
|
|
||||||
self.request_info = request_info
|
|
||||||
if code is not None:
|
|
||||||
if status is not None:
|
|
||||||
raise ValueError(
|
|
||||||
"Both code and status arguments are provided; "
|
|
||||||
"code is deprecated, use status instead"
|
|
||||||
)
|
|
||||||
warnings.warn(
|
|
||||||
"code argument is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
if status is not None:
|
|
||||||
self.status = status
|
|
||||||
elif code is not None:
|
|
||||||
self.status = code
|
|
||||||
else:
|
|
||||||
self.status = 0
|
|
||||||
self.message = message
|
|
||||||
self.headers = headers
|
|
||||||
self.history = history
|
|
||||||
self.args = (request_info, history)
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return "{}, message={!r}, url={!r}".format(
|
|
||||||
self.status,
|
|
||||||
self.message,
|
|
||||||
self.request_info.real_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
args = f"{self.request_info!r}, {self.history!r}"
|
|
||||||
if self.status != 0:
|
|
||||||
args += f", status={self.status!r}"
|
|
||||||
if self.message != "":
|
|
||||||
args += f", message={self.message!r}"
|
|
||||||
if self.headers is not None:
|
|
||||||
args += f", headers={self.headers!r}"
|
|
||||||
return "{}({})".format(type(self).__name__, args)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def code(self) -> int:
|
|
||||||
warnings.warn(
|
|
||||||
"code property is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
return self.status
|
|
||||||
|
|
||||||
@code.setter
|
|
||||||
def code(self, value: int) -> None:
|
|
||||||
warnings.warn(
|
|
||||||
"code property is deprecated, use status instead",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
self.status = value
|
|
||||||
|
|
||||||
|
|
||||||
class ContentTypeError(ClientResponseError):
|
|
||||||
"""ContentType found is not valid."""
|
|
||||||
|
|
||||||
|
|
||||||
class WSServerHandshakeError(ClientResponseError):
|
|
||||||
"""websocket server handshake error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientHttpProxyError(ClientResponseError):
|
|
||||||
"""HTTP proxy error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
proxy responds with status other than ``200 OK``
|
|
||||||
on ``CONNECT`` request.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class TooManyRedirects(ClientResponseError):
|
|
||||||
"""Client was redirected too many times."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectionError(ClientError):
|
|
||||||
"""Base class for client socket errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientOSError(ClientConnectionError, OSError):
|
|
||||||
"""OSError error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorError(ClientOSError):
|
|
||||||
"""Client connector error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
connection to proxy can not be established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
|
||||||
self._conn_key = connection_key
|
|
||||||
self._os_error = os_error
|
|
||||||
super().__init__(os_error.errno, os_error.strerror)
|
|
||||||
self.args = (connection_key, os_error)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def os_error(self) -> OSError:
|
|
||||||
return self._os_error
|
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self) -> str:
|
|
||||||
return self._conn_key.host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> Optional[int]:
|
|
||||||
return self._conn_key.port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
|
||||||
return self._conn_key.ssl
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
|
||||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
|
||||||
)
|
|
||||||
|
|
||||||
# OSError.__reduce__ does too much black magick
|
|
||||||
__reduce__ = BaseException.__reduce__
|
|
||||||
|
|
||||||
|
|
||||||
class ClientProxyConnectionError(ClientConnectorError):
|
|
||||||
"""Proxy connection error.
|
|
||||||
|
|
||||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
|
||||||
connection to proxy can not be established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ServerConnectionError(ClientConnectionError):
|
|
||||||
"""Server connection errors."""
|
|
||||||
|
|
||||||
|
|
||||||
class ServerDisconnectedError(ServerConnectionError):
|
|
||||||
"""Server disconnected."""
|
|
||||||
|
|
||||||
def __init__(self, message: Optional[str] = None) -> None:
|
|
||||||
if message is None:
|
|
||||||
message = "Server disconnected"
|
|
||||||
|
|
||||||
self.args = (message,)
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
|
|
||||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
|
||||||
"""Server timeout error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ServerFingerprintMismatch(ServerConnectionError):
|
|
||||||
"""SSL certificate does not match expected fingerprint."""
|
|
||||||
|
|
||||||
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
|
||||||
self.expected = expected
|
|
||||||
self.got = got
|
|
||||||
self.host = host
|
|
||||||
self.port = port
|
|
||||||
self.args = (expected, got, host, port)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
|
||||||
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientPayloadError(ClientError):
|
|
||||||
"""Response payload error."""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidURL(ClientError, ValueError):
|
|
||||||
"""Invalid URL.
|
|
||||||
|
|
||||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
|
||||||
part."""
|
|
||||||
|
|
||||||
# Derive from ValueError for backward compatibility
|
|
||||||
|
|
||||||
def __init__(self, url: Any) -> None:
|
|
||||||
# The type of url is not yarl.URL because the exception can be raised
|
|
||||||
# on URL(url) call
|
|
||||||
super().__init__(url)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self) -> Any:
|
|
||||||
return self.args[0]
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<{self.__class__.__name__} {self.url}>"
|
|
||||||
|
|
||||||
|
|
||||||
class ClientSSLError(ClientConnectorError):
|
|
||||||
"""Base error for ssl.*Errors."""
|
|
||||||
|
|
||||||
|
|
||||||
if ssl is not None:
|
|
||||||
cert_errors = (ssl.CertificateError,)
|
|
||||||
cert_errors_bases = (
|
|
||||||
ClientSSLError,
|
|
||||||
ssl.CertificateError,
|
|
||||||
)
|
|
||||||
|
|
||||||
ssl_errors = (ssl.SSLError,)
|
|
||||||
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
|
||||||
else: # pragma: no cover
|
|
||||||
cert_errors = tuple()
|
|
||||||
cert_errors_bases = (
|
|
||||||
ClientSSLError,
|
|
||||||
ValueError,
|
|
||||||
)
|
|
||||||
|
|
||||||
ssl_errors = tuple()
|
|
||||||
ssl_error_bases = (ClientSSLError,)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
|
|
||||||
"""Response ssl error."""
|
|
||||||
|
|
||||||
|
|
||||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
|
||||||
"""Response certificate error."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, connection_key: ConnectionKey, certificate_error: Exception
|
|
||||||
) -> None:
|
|
||||||
self._conn_key = connection_key
|
|
||||||
self._certificate_error = certificate_error
|
|
||||||
self.args = (connection_key, certificate_error)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def certificate_error(self) -> Exception:
|
|
||||||
return self._certificate_error
|
|
||||||
|
|
||||||
@property
|
|
||||||
def host(self) -> str:
|
|
||||||
return self._conn_key.host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self) -> Optional[int]:
|
|
||||||
return self._conn_key.port
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ssl(self) -> bool:
|
|
||||||
return self._conn_key.is_ssl
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return (
|
|
||||||
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
|
||||||
"[{0.certificate_error.__class__.__name__}: "
|
|
||||||
"{0.certificate_error.args}]".format(self)
|
|
||||||
)
|
|
||||||
251
env/Lib/site-packages/aiohttp/client_proto.py
vendored
251
env/Lib/site-packages/aiohttp/client_proto.py
vendored
|
|
@ -1,251 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
|
||||||
from typing import Any, Optional, Tuple
|
|
||||||
|
|
||||||
from .base_protocol import BaseProtocol
|
|
||||||
from .client_exceptions import (
|
|
||||||
ClientOSError,
|
|
||||||
ClientPayloadError,
|
|
||||||
ServerDisconnectedError,
|
|
||||||
ServerTimeoutError,
|
|
||||||
)
|
|
||||||
from .helpers import BaseTimerContext
|
|
||||||
from .http import HttpResponseParser, RawResponseMessage
|
|
||||||
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
|
||||||
|
|
||||||
|
|
||||||
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
|
||||||
"""Helper class to adapt between Protocol and StreamReader."""
|
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
BaseProtocol.__init__(self, loop=loop)
|
|
||||||
DataQueue.__init__(self, loop)
|
|
||||||
|
|
||||||
self._should_close = False
|
|
||||||
|
|
||||||
self._payload = None
|
|
||||||
self._skip_payload = False
|
|
||||||
self._payload_parser = None
|
|
||||||
|
|
||||||
self._timer = None
|
|
||||||
|
|
||||||
self._tail = b""
|
|
||||||
self._upgraded = False
|
|
||||||
self._parser = None # type: Optional[HttpResponseParser]
|
|
||||||
|
|
||||||
self._read_timeout = None # type: Optional[float]
|
|
||||||
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def upgraded(self) -> bool:
|
|
||||||
return self._upgraded
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_close(self) -> bool:
|
|
||||||
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return (
|
|
||||||
self._should_close
|
|
||||||
or self._upgraded
|
|
||||||
or self.exception() is not None
|
|
||||||
or self._payload_parser is not None
|
|
||||||
or len(self) > 0
|
|
||||||
or bool(self._tail)
|
|
||||||
)
|
|
||||||
|
|
||||||
def force_close(self) -> None:
|
|
||||||
self._should_close = True
|
|
||||||
|
|
||||||
def close(self) -> None:
|
|
||||||
transport = self.transport
|
|
||||||
if transport is not None:
|
|
||||||
transport.close()
|
|
||||||
self.transport = None
|
|
||||||
self._payload = None
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def is_connected(self) -> bool:
|
|
||||||
return self.transport is not None and not self.transport.is_closing()
|
|
||||||
|
|
||||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if self._payload_parser is not None:
|
|
||||||
with suppress(Exception):
|
|
||||||
self._payload_parser.feed_eof()
|
|
||||||
|
|
||||||
uncompleted = None
|
|
||||||
if self._parser is not None:
|
|
||||||
try:
|
|
||||||
uncompleted = self._parser.feed_eof()
|
|
||||||
except Exception:
|
|
||||||
if self._payload is not None:
|
|
||||||
self._payload.set_exception(
|
|
||||||
ClientPayloadError("Response payload is not completed")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.is_eof():
|
|
||||||
if isinstance(exc, OSError):
|
|
||||||
exc = ClientOSError(*exc.args)
|
|
||||||
if exc is None:
|
|
||||||
exc = ServerDisconnectedError(uncompleted)
|
|
||||||
# assigns self._should_close to True as side effect,
|
|
||||||
# we do it anyway below
|
|
||||||
self.set_exception(exc)
|
|
||||||
|
|
||||||
self._should_close = True
|
|
||||||
self._parser = None
|
|
||||||
self._payload = None
|
|
||||||
self._payload_parser = None
|
|
||||||
self._reading_paused = False
|
|
||||||
|
|
||||||
super().connection_lost(exc)
|
|
||||||
|
|
||||||
def eof_received(self) -> None:
|
|
||||||
# should call parser.feed_eof() most likely
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def pause_reading(self) -> None:
|
|
||||||
super().pause_reading()
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
def resume_reading(self) -> None:
|
|
||||||
super().resume_reading()
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
def set_exception(self, exc: BaseException) -> None:
|
|
||||||
self._should_close = True
|
|
||||||
self._drop_timeout()
|
|
||||||
super().set_exception(exc)
|
|
||||||
|
|
||||||
def set_parser(self, parser: Any, payload: Any) -> None:
|
|
||||||
# TODO: actual types are:
|
|
||||||
# parser: WebSocketReader
|
|
||||||
# payload: FlowControlDataQueue
|
|
||||||
# but they are not generi enough
|
|
||||||
# Need an ABC for both types
|
|
||||||
self._payload = payload
|
|
||||||
self._payload_parser = parser
|
|
||||||
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if self._tail:
|
|
||||||
data, self._tail = self._tail, b""
|
|
||||||
self.data_received(data)
|
|
||||||
|
|
||||||
def set_response_params(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
timer: Optional[BaseTimerContext] = None,
|
|
||||||
skip_payload: bool = False,
|
|
||||||
read_until_eof: bool = False,
|
|
||||||
auto_decompress: bool = True,
|
|
||||||
read_timeout: Optional[float] = None,
|
|
||||||
read_bufsize: int = 2 ** 16
|
|
||||||
) -> None:
|
|
||||||
self._skip_payload = skip_payload
|
|
||||||
|
|
||||||
self._read_timeout = read_timeout
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
self._parser = HttpResponseParser(
|
|
||||||
self,
|
|
||||||
self._loop,
|
|
||||||
read_bufsize,
|
|
||||||
timer=timer,
|
|
||||||
payload_exception=ClientPayloadError,
|
|
||||||
response_with_body=not skip_payload,
|
|
||||||
read_until_eof=read_until_eof,
|
|
||||||
auto_decompress=auto_decompress,
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._tail:
|
|
||||||
data, self._tail = self._tail, b""
|
|
||||||
self.data_received(data)
|
|
||||||
|
|
||||||
def _drop_timeout(self) -> None:
|
|
||||||
if self._read_timeout_handle is not None:
|
|
||||||
self._read_timeout_handle.cancel()
|
|
||||||
self._read_timeout_handle = None
|
|
||||||
|
|
||||||
def _reschedule_timeout(self) -> None:
|
|
||||||
timeout = self._read_timeout
|
|
||||||
if self._read_timeout_handle is not None:
|
|
||||||
self._read_timeout_handle.cancel()
|
|
||||||
|
|
||||||
if timeout:
|
|
||||||
self._read_timeout_handle = self._loop.call_later(
|
|
||||||
timeout, self._on_read_timeout
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._read_timeout_handle = None
|
|
||||||
|
|
||||||
def _on_read_timeout(self) -> None:
|
|
||||||
exc = ServerTimeoutError("Timeout on reading data from socket")
|
|
||||||
self.set_exception(exc)
|
|
||||||
if self._payload is not None:
|
|
||||||
self._payload.set_exception(exc)
|
|
||||||
|
|
||||||
def data_received(self, data: bytes) -> None:
|
|
||||||
self._reschedule_timeout()
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
return
|
|
||||||
|
|
||||||
# custom payload parser
|
|
||||||
if self._payload_parser is not None:
|
|
||||||
eof, tail = self._payload_parser.feed_data(data)
|
|
||||||
if eof:
|
|
||||||
self._payload = None
|
|
||||||
self._payload_parser = None
|
|
||||||
|
|
||||||
if tail:
|
|
||||||
self.data_received(tail)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
if self._upgraded or self._parser is None:
|
|
||||||
# i.e. websocket connection, websocket parser is not set yet
|
|
||||||
self._tail += data
|
|
||||||
else:
|
|
||||||
# parse http messages
|
|
||||||
try:
|
|
||||||
messages, upgraded, tail = self._parser.feed_data(data)
|
|
||||||
except BaseException as exc:
|
|
||||||
if self.transport is not None:
|
|
||||||
# connection.release() could be called BEFORE
|
|
||||||
# data_received(), the transport is already
|
|
||||||
# closed in this case
|
|
||||||
self.transport.close()
|
|
||||||
# should_close is True after the call
|
|
||||||
self.set_exception(exc)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._upgraded = upgraded
|
|
||||||
|
|
||||||
payload = None
|
|
||||||
for message, payload in messages:
|
|
||||||
if message.should_close:
|
|
||||||
self._should_close = True
|
|
||||||
|
|
||||||
self._payload = payload
|
|
||||||
|
|
||||||
if self._skip_payload or message.code in (204, 304):
|
|
||||||
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore
|
|
||||||
else:
|
|
||||||
self.feed_data((message, payload), 0)
|
|
||||||
if payload is not None:
|
|
||||||
# new message(s) was processed
|
|
||||||
# register timeout handler unsubscribing
|
|
||||||
# either on end-of-stream or immediately for
|
|
||||||
# EMPTY_PAYLOAD
|
|
||||||
if payload is not EMPTY_PAYLOAD:
|
|
||||||
payload.on_eof(self._drop_timeout)
|
|
||||||
else:
|
|
||||||
self._drop_timeout()
|
|
||||||
|
|
||||||
if tail:
|
|
||||||
if upgraded:
|
|
||||||
self.data_received(tail)
|
|
||||||
else:
|
|
||||||
self._tail = tail
|
|
||||||
1127
env/Lib/site-packages/aiohttp/client_reqrep.py
vendored
1127
env/Lib/site-packages/aiohttp/client_reqrep.py
vendored
File diff suppressed because it is too large
Load diff
301
env/Lib/site-packages/aiohttp/client_ws.py
vendored
301
env/Lib/site-packages/aiohttp/client_ws.py
vendored
|
|
@ -1,301 +0,0 @@
|
||||||
"""WebSocket client for asyncio."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .client_exceptions import ClientError
|
|
||||||
from .client_reqrep import ClientResponse
|
|
||||||
from .helpers import call_later, set_result
|
|
||||||
from .http import (
|
|
||||||
WS_CLOSED_MESSAGE,
|
|
||||||
WS_CLOSING_MESSAGE,
|
|
||||||
WebSocketError,
|
|
||||||
WSMessage,
|
|
||||||
WSMsgType,
|
|
||||||
)
|
|
||||||
from .http_websocket import WebSocketWriter # WSMessage
|
|
||||||
from .streams import EofStream, FlowControlDataQueue
|
|
||||||
from .typedefs import (
|
|
||||||
DEFAULT_JSON_DECODER,
|
|
||||||
DEFAULT_JSON_ENCODER,
|
|
||||||
JSONDecoder,
|
|
||||||
JSONEncoder,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientWebSocketResponse:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reader: "FlowControlDataQueue[WSMessage]",
|
|
||||||
writer: WebSocketWriter,
|
|
||||||
protocol: Optional[str],
|
|
||||||
response: ClientResponse,
|
|
||||||
timeout: float,
|
|
||||||
autoclose: bool,
|
|
||||||
autoping: bool,
|
|
||||||
loop: asyncio.AbstractEventLoop,
|
|
||||||
*,
|
|
||||||
receive_timeout: Optional[float] = None,
|
|
||||||
heartbeat: Optional[float] = None,
|
|
||||||
compress: int = 0,
|
|
||||||
client_notakeover: bool = False,
|
|
||||||
) -> None:
|
|
||||||
self._response = response
|
|
||||||
self._conn = response.connection
|
|
||||||
|
|
||||||
self._writer = writer
|
|
||||||
self._reader = reader
|
|
||||||
self._protocol = protocol
|
|
||||||
self._closed = False
|
|
||||||
self._closing = False
|
|
||||||
self._close_code = None # type: Optional[int]
|
|
||||||
self._timeout = timeout
|
|
||||||
self._receive_timeout = receive_timeout
|
|
||||||
self._autoclose = autoclose
|
|
||||||
self._autoping = autoping
|
|
||||||
self._heartbeat = heartbeat
|
|
||||||
self._heartbeat_cb = None
|
|
||||||
if heartbeat is not None:
|
|
||||||
self._pong_heartbeat = heartbeat / 2.0
|
|
||||||
self._pong_response_cb = None
|
|
||||||
self._loop = loop
|
|
||||||
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
|
||||||
self._exception = None # type: Optional[BaseException]
|
|
||||||
self._compress = compress
|
|
||||||
self._client_notakeover = client_notakeover
|
|
||||||
|
|
||||||
self._reset_heartbeat()
|
|
||||||
|
|
||||||
def _cancel_heartbeat(self) -> None:
|
|
||||||
if self._pong_response_cb is not None:
|
|
||||||
self._pong_response_cb.cancel()
|
|
||||||
self._pong_response_cb = None
|
|
||||||
|
|
||||||
if self._heartbeat_cb is not None:
|
|
||||||
self._heartbeat_cb.cancel()
|
|
||||||
self._heartbeat_cb = None
|
|
||||||
|
|
||||||
def _reset_heartbeat(self) -> None:
|
|
||||||
self._cancel_heartbeat()
|
|
||||||
|
|
||||||
if self._heartbeat is not None:
|
|
||||||
self._heartbeat_cb = call_later(
|
|
||||||
self._send_heartbeat, self._heartbeat, self._loop
|
|
||||||
)
|
|
||||||
|
|
||||||
def _send_heartbeat(self) -> None:
|
|
||||||
if self._heartbeat is not None and not self._closed:
|
|
||||||
# fire-and-forget a task is not perfect but maybe ok for
|
|
||||||
# sending ping. Otherwise we need a long-living heartbeat
|
|
||||||
# task in the class.
|
|
||||||
self._loop.create_task(self._writer.ping())
|
|
||||||
|
|
||||||
if self._pong_response_cb is not None:
|
|
||||||
self._pong_response_cb.cancel()
|
|
||||||
self._pong_response_cb = call_later(
|
|
||||||
self._pong_not_received, self._pong_heartbeat, self._loop
|
|
||||||
)
|
|
||||||
|
|
||||||
def _pong_not_received(self) -> None:
|
|
||||||
if not self._closed:
|
|
||||||
self._closed = True
|
|
||||||
self._close_code = 1006
|
|
||||||
self._exception = asyncio.TimeoutError()
|
|
||||||
self._response.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def closed(self) -> bool:
|
|
||||||
return self._closed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def close_code(self) -> Optional[int]:
|
|
||||||
return self._close_code
|
|
||||||
|
|
||||||
@property
|
|
||||||
def protocol(self) -> Optional[str]:
|
|
||||||
return self._protocol
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compress(self) -> int:
|
|
||||||
return self._compress
|
|
||||||
|
|
||||||
@property
|
|
||||||
def client_notakeover(self) -> bool:
|
|
||||||
return self._client_notakeover
|
|
||||||
|
|
||||||
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
|
||||||
"""extra info from connection transport"""
|
|
||||||
conn = self._response.connection
|
|
||||||
if conn is None:
|
|
||||||
return default
|
|
||||||
transport = conn.transport
|
|
||||||
if transport is None:
|
|
||||||
return default
|
|
||||||
return transport.get_extra_info(name, default)
|
|
||||||
|
|
||||||
def exception(self) -> Optional[BaseException]:
|
|
||||||
return self._exception
|
|
||||||
|
|
||||||
async def ping(self, message: bytes = b"") -> None:
|
|
||||||
await self._writer.ping(message)
|
|
||||||
|
|
||||||
async def pong(self, message: bytes = b"") -> None:
|
|
||||||
await self._writer.pong(message)
|
|
||||||
|
|
||||||
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
|
||||||
if not isinstance(data, str):
|
|
||||||
raise TypeError("data argument must be str (%r)" % type(data))
|
|
||||||
await self._writer.send(data, binary=False, compress=compress)
|
|
||||||
|
|
||||||
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
|
||||||
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
||||||
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
|
||||||
await self._writer.send(data, binary=True, compress=compress)
|
|
||||||
|
|
||||||
async def send_json(
|
|
||||||
self,
|
|
||||||
data: Any,
|
|
||||||
compress: Optional[int] = None,
|
|
||||||
*,
|
|
||||||
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
|
||||||
) -> None:
|
|
||||||
await self.send_str(dumps(data), compress=compress)
|
|
||||||
|
|
||||||
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
|
|
||||||
# we need to break `receive()` cycle first,
|
|
||||||
# `close()` may be called from different task
|
|
||||||
if self._waiting is not None and not self._closed:
|
|
||||||
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
|
||||||
await self._waiting
|
|
||||||
|
|
||||||
if not self._closed:
|
|
||||||
self._cancel_heartbeat()
|
|
||||||
self._closed = True
|
|
||||||
try:
|
|
||||||
await self._writer.close(code, message)
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self._close_code = 1006
|
|
||||||
self._response.close()
|
|
||||||
raise
|
|
||||||
except Exception as exc:
|
|
||||||
self._close_code = 1006
|
|
||||||
self._exception = exc
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self._closing:
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(self._timeout, loop=self._loop):
|
|
||||||
msg = await self._reader.read()
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
self._close_code = 1006
|
|
||||||
self._response.close()
|
|
||||||
raise
|
|
||||||
except Exception as exc:
|
|
||||||
self._close_code = 1006
|
|
||||||
self._exception = exc
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
if msg.type == WSMsgType.CLOSE:
|
|
||||||
self._close_code = msg.data
|
|
||||||
self._response.close()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
|
||||||
while True:
|
|
||||||
if self._waiting is not None:
|
|
||||||
raise RuntimeError("Concurrent call to receive() is not allowed")
|
|
||||||
|
|
||||||
if self._closed:
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
elif self._closing:
|
|
||||||
await self.close()
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._waiting = self._loop.create_future()
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(
|
|
||||||
timeout or self._receive_timeout, loop=self._loop
|
|
||||||
):
|
|
||||||
msg = await self._reader.read()
|
|
||||||
self._reset_heartbeat()
|
|
||||||
finally:
|
|
||||||
waiter = self._waiting
|
|
||||||
self._waiting = None
|
|
||||||
set_result(waiter, True)
|
|
||||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
||||||
self._close_code = 1006
|
|
||||||
raise
|
|
||||||
except EofStream:
|
|
||||||
self._close_code = 1000
|
|
||||||
await self.close()
|
|
||||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
|
||||||
except ClientError:
|
|
||||||
self._closed = True
|
|
||||||
self._close_code = 1006
|
|
||||||
return WS_CLOSED_MESSAGE
|
|
||||||
except WebSocketError as exc:
|
|
||||||
self._close_code = exc.code
|
|
||||||
await self.close(code=exc.code)
|
|
||||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
|
||||||
except Exception as exc:
|
|
||||||
self._exception = exc
|
|
||||||
self._closing = True
|
|
||||||
self._close_code = 1006
|
|
||||||
await self.close()
|
|
||||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
|
||||||
|
|
||||||
if msg.type == WSMsgType.CLOSE:
|
|
||||||
self._closing = True
|
|
||||||
self._close_code = msg.data
|
|
||||||
if not self._closed and self._autoclose:
|
|
||||||
await self.close()
|
|
||||||
elif msg.type == WSMsgType.CLOSING:
|
|
||||||
self._closing = True
|
|
||||||
elif msg.type == WSMsgType.PING and self._autoping:
|
|
||||||
await self.pong(msg.data)
|
|
||||||
continue
|
|
||||||
elif msg.type == WSMsgType.PONG and self._autoping:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return msg
|
|
||||||
|
|
||||||
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
|
||||||
msg = await self.receive(timeout)
|
|
||||||
if msg.type != WSMsgType.TEXT:
|
|
||||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
|
||||||
return msg.data
|
|
||||||
|
|
||||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
|
||||||
msg = await self.receive(timeout)
|
|
||||||
if msg.type != WSMsgType.BINARY:
|
|
||||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
|
||||||
return msg.data
|
|
||||||
|
|
||||||
async def receive_json(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
|
||||||
timeout: Optional[float] = None,
|
|
||||||
) -> Any:
|
|
||||||
data = await self.receive_str(timeout=timeout)
|
|
||||||
return loads(data)
|
|
||||||
|
|
||||||
def __aiter__(self) -> "ClientWebSocketResponse":
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self) -> WSMessage:
|
|
||||||
msg = await self.receive()
|
|
||||||
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return msg
|
|
||||||
1262
env/Lib/site-packages/aiohttp/connector.py
vendored
1262
env/Lib/site-packages/aiohttp/connector.py
vendored
File diff suppressed because it is too large
Load diff
382
env/Lib/site-packages/aiohttp/cookiejar.py
vendored
382
env/Lib/site-packages/aiohttp/cookiejar.py
vendored
|
|
@ -1,382 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import datetime
|
|
||||||
import os # noqa
|
|
||||||
import pathlib
|
|
||||||
import pickle
|
|
||||||
import re
|
|
||||||
from collections import defaultdict
|
|
||||||
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
|
||||||
from typing import ( # noqa
|
|
||||||
DefaultDict,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
Mapping,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
from yarl import URL
|
|
||||||
|
|
||||||
from .abc import AbstractCookieJar
|
|
||||||
from .helpers import is_ip_address, next_whole_second
|
|
||||||
from .typedefs import LooseCookies, PathLike
|
|
||||||
|
|
||||||
__all__ = ("CookieJar", "DummyCookieJar")
|
|
||||||
|
|
||||||
|
|
||||||
CookieItem = Union[str, "Morsel[str]"]
|
|
||||||
|
|
||||||
|
|
||||||
class CookieJar(AbstractCookieJar):
|
|
||||||
"""Implements cookie storage adhering to RFC 6265."""
|
|
||||||
|
|
||||||
DATE_TOKENS_RE = re.compile(
|
|
||||||
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
|
||||||
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
|
||||||
)
|
|
||||||
|
|
||||||
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
|
||||||
|
|
||||||
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
|
||||||
|
|
||||||
DATE_MONTH_RE = re.compile(
|
|
||||||
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
|
||||||
re.I,
|
|
||||||
)
|
|
||||||
|
|
||||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
|
||||||
|
|
||||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
|
||||||
|
|
||||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
unsafe: bool = False,
|
|
||||||
quote_cookie: bool = True,
|
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
|
||||||
) -> None:
|
|
||||||
super().__init__(loop=loop)
|
|
||||||
self._cookies = defaultdict(
|
|
||||||
SimpleCookie
|
|
||||||
) # type: DefaultDict[str, SimpleCookie[str]]
|
|
||||||
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
|
||||||
self._unsafe = unsafe
|
|
||||||
self._quote_cookie = quote_cookie
|
|
||||||
self._next_expiration = next_whole_second()
|
|
||||||
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
|
|
||||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
|
||||||
self._max_time = self.MAX_TIME
|
|
||||||
try:
|
|
||||||
self._max_time.timestamp()
|
|
||||||
except OverflowError:
|
|
||||||
self._max_time = self.MAX_32BIT_TIME
|
|
||||||
|
|
||||||
def save(self, file_path: PathLike) -> None:
|
|
||||||
file_path = pathlib.Path(file_path)
|
|
||||||
with file_path.open(mode="wb") as f:
|
|
||||||
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
|
||||||
|
|
||||||
def load(self, file_path: PathLike) -> None:
|
|
||||||
file_path = pathlib.Path(file_path)
|
|
||||||
with file_path.open(mode="rb") as f:
|
|
||||||
self._cookies = pickle.load(f)
|
|
||||||
|
|
||||||
def clear(self) -> None:
|
|
||||||
self._cookies.clear()
|
|
||||||
self._host_only_cookies.clear()
|
|
||||||
self._next_expiration = next_whole_second()
|
|
||||||
self._expirations.clear()
|
|
||||||
|
|
||||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
|
||||||
self._do_expiration()
|
|
||||||
for val in self._cookies.values():
|
|
||||||
yield from val.values()
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return sum(1 for i in self)
|
|
||||||
|
|
||||||
def _do_expiration(self) -> None:
|
|
||||||
now = datetime.datetime.now(datetime.timezone.utc)
|
|
||||||
if self._next_expiration > now:
|
|
||||||
return
|
|
||||||
if not self._expirations:
|
|
||||||
return
|
|
||||||
next_expiration = self._max_time
|
|
||||||
to_del = []
|
|
||||||
cookies = self._cookies
|
|
||||||
expirations = self._expirations
|
|
||||||
for (domain, name), when in expirations.items():
|
|
||||||
if when <= now:
|
|
||||||
cookies[domain].pop(name, None)
|
|
||||||
to_del.append((domain, name))
|
|
||||||
self._host_only_cookies.discard((domain, name))
|
|
||||||
else:
|
|
||||||
next_expiration = min(next_expiration, when)
|
|
||||||
for key in to_del:
|
|
||||||
del expirations[key]
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._next_expiration = next_expiration.replace(
|
|
||||||
microsecond=0
|
|
||||||
) + datetime.timedelta(seconds=1)
|
|
||||||
except OverflowError:
|
|
||||||
self._next_expiration = self._max_time
|
|
||||||
|
|
||||||
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
|
|
||||||
self._next_expiration = min(self._next_expiration, when)
|
|
||||||
self._expirations[(domain, name)] = when
|
|
||||||
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
"""Update cookies."""
|
|
||||||
hostname = response_url.raw_host
|
|
||||||
|
|
||||||
if not self._unsafe and is_ip_address(hostname):
|
|
||||||
# Don't accept cookies from IPs
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(cookies, Mapping):
|
|
||||||
cookies = cookies.items()
|
|
||||||
|
|
||||||
for name, cookie in cookies:
|
|
||||||
if not isinstance(cookie, Morsel):
|
|
||||||
tmp = SimpleCookie() # type: SimpleCookie[str]
|
|
||||||
tmp[name] = cookie # type: ignore
|
|
||||||
cookie = tmp[name]
|
|
||||||
|
|
||||||
domain = cookie["domain"]
|
|
||||||
|
|
||||||
# ignore domains with trailing dots
|
|
||||||
if domain.endswith("."):
|
|
||||||
domain = ""
|
|
||||||
del cookie["domain"]
|
|
||||||
|
|
||||||
if not domain and hostname is not None:
|
|
||||||
# Set the cookie's domain to the response hostname
|
|
||||||
# and set its host-only-flag
|
|
||||||
self._host_only_cookies.add((hostname, name))
|
|
||||||
domain = cookie["domain"] = hostname
|
|
||||||
|
|
||||||
if domain.startswith("."):
|
|
||||||
# Remove leading dot
|
|
||||||
domain = domain[1:]
|
|
||||||
cookie["domain"] = domain
|
|
||||||
|
|
||||||
if hostname and not self._is_domain_match(domain, hostname):
|
|
||||||
# Setting cookies for different domains is not allowed
|
|
||||||
continue
|
|
||||||
|
|
||||||
path = cookie["path"]
|
|
||||||
if not path or not path.startswith("/"):
|
|
||||||
# Set the cookie's path to the response path
|
|
||||||
path = response_url.path
|
|
||||||
if not path.startswith("/"):
|
|
||||||
path = "/"
|
|
||||||
else:
|
|
||||||
# Cut everything from the last slash to the end
|
|
||||||
path = "/" + path[1 : path.rfind("/")]
|
|
||||||
cookie["path"] = path
|
|
||||||
|
|
||||||
max_age = cookie["max-age"]
|
|
||||||
if max_age:
|
|
||||||
try:
|
|
||||||
delta_seconds = int(max_age)
|
|
||||||
try:
|
|
||||||
max_age_expiration = datetime.datetime.now(
|
|
||||||
datetime.timezone.utc
|
|
||||||
) + datetime.timedelta(seconds=delta_seconds)
|
|
||||||
except OverflowError:
|
|
||||||
max_age_expiration = self._max_time
|
|
||||||
self._expire_cookie(max_age_expiration, domain, name)
|
|
||||||
except ValueError:
|
|
||||||
cookie["max-age"] = ""
|
|
||||||
|
|
||||||
else:
|
|
||||||
expires = cookie["expires"]
|
|
||||||
if expires:
|
|
||||||
expire_time = self._parse_date(expires)
|
|
||||||
if expire_time:
|
|
||||||
self._expire_cookie(expire_time, domain, name)
|
|
||||||
else:
|
|
||||||
cookie["expires"] = ""
|
|
||||||
|
|
||||||
self._cookies[domain][name] = cookie
|
|
||||||
|
|
||||||
self._do_expiration()
|
|
||||||
|
|
||||||
def filter_cookies(
|
|
||||||
self, request_url: URL = URL()
|
|
||||||
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
|
||||||
"""Returns this jar's cookies filtered by their attributes."""
|
|
||||||
self._do_expiration()
|
|
||||||
request_url = URL(request_url)
|
|
||||||
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
|
||||||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
|
||||||
)
|
|
||||||
hostname = request_url.raw_host or ""
|
|
||||||
is_not_secure = request_url.scheme not in ("https", "wss")
|
|
||||||
|
|
||||||
for cookie in self:
|
|
||||||
name = cookie.key
|
|
||||||
domain = cookie["domain"]
|
|
||||||
|
|
||||||
# Send shared cookies
|
|
||||||
if not domain:
|
|
||||||
filtered[name] = cookie.value
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not self._unsafe and is_ip_address(hostname):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (domain, name) in self._host_only_cookies:
|
|
||||||
if domain != hostname:
|
|
||||||
continue
|
|
||||||
elif not self._is_domain_match(domain, hostname):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not self._is_path_match(request_url.path, cookie["path"]):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if is_not_secure and cookie["secure"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# It's critical we use the Morsel so the coded_value
|
|
||||||
# (based on cookie version) is preserved
|
|
||||||
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
|
||||||
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
|
||||||
filtered[name] = mrsl_val
|
|
||||||
|
|
||||||
return filtered
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _is_domain_match(domain: str, hostname: str) -> bool:
|
|
||||||
"""Implements domain matching adhering to RFC 6265."""
|
|
||||||
if hostname == domain:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not hostname.endswith(domain):
|
|
||||||
return False
|
|
||||||
|
|
||||||
non_matching = hostname[: -len(domain)]
|
|
||||||
|
|
||||||
if not non_matching.endswith("."):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return not is_ip_address(hostname)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
|
||||||
"""Implements path matching adhering to RFC 6265."""
|
|
||||||
if not req_path.startswith("/"):
|
|
||||||
req_path = "/"
|
|
||||||
|
|
||||||
if req_path == cookie_path:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not req_path.startswith(cookie_path):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if cookie_path.endswith("/"):
|
|
||||||
return True
|
|
||||||
|
|
||||||
non_matching = req_path[len(cookie_path) :]
|
|
||||||
|
|
||||||
return non_matching.startswith("/")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
|
||||||
"""Implements date string parsing adhering to RFC 6265."""
|
|
||||||
if not date_str:
|
|
||||||
return None
|
|
||||||
|
|
||||||
found_time = False
|
|
||||||
found_day = False
|
|
||||||
found_month = False
|
|
||||||
found_year = False
|
|
||||||
|
|
||||||
hour = minute = second = 0
|
|
||||||
day = 0
|
|
||||||
month = 0
|
|
||||||
year = 0
|
|
||||||
|
|
||||||
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
|
||||||
|
|
||||||
token = token_match.group("token")
|
|
||||||
|
|
||||||
if not found_time:
|
|
||||||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
|
||||||
if time_match:
|
|
||||||
found_time = True
|
|
||||||
hour, minute, second = [int(s) for s in time_match.groups()]
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_day:
|
|
||||||
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
|
||||||
if day_match:
|
|
||||||
found_day = True
|
|
||||||
day = int(day_match.group())
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_month:
|
|
||||||
month_match = cls.DATE_MONTH_RE.match(token)
|
|
||||||
if month_match:
|
|
||||||
found_month = True
|
|
||||||
assert month_match.lastindex is not None
|
|
||||||
month = month_match.lastindex
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not found_year:
|
|
||||||
year_match = cls.DATE_YEAR_RE.match(token)
|
|
||||||
if year_match:
|
|
||||||
found_year = True
|
|
||||||
year = int(year_match.group())
|
|
||||||
|
|
||||||
if 70 <= year <= 99:
|
|
||||||
year += 1900
|
|
||||||
elif 0 <= year <= 69:
|
|
||||||
year += 2000
|
|
||||||
|
|
||||||
if False in (found_day, found_month, found_year, found_time):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not 1 <= day <= 31:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return datetime.datetime(
|
|
||||||
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DummyCookieJar(AbstractCookieJar):
|
|
||||||
"""Implements a dummy cookie storage.
|
|
||||||
|
|
||||||
It can be used with the ClientSession when no cookie processing is needed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
|
||||||
super().__init__(loop=loop)
|
|
||||||
|
|
||||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
|
||||||
while False:
|
|
||||||
yield None
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def clear(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
|
||||||
return SimpleCookie()
|
|
||||||
170
env/Lib/site-packages/aiohttp/formdata.py
vendored
170
env/Lib/site-packages/aiohttp/formdata.py
vendored
|
|
@ -1,170 +0,0 @@
|
||||||
import io
|
|
||||||
from typing import Any, Iterable, List, Optional
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from multidict import MultiDict, MultiDictProxy
|
|
||||||
|
|
||||||
from . import hdrs, multipart, payload
|
|
||||||
from .helpers import guess_filename
|
|
||||||
from .payload import Payload
|
|
||||||
|
|
||||||
__all__ = ("FormData",)
|
|
||||||
|
|
||||||
|
|
||||||
class FormData:
|
|
||||||
"""Helper class for multipart/form-data and
|
|
||||||
application/x-www-form-urlencoded body generation."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
fields: Iterable[Any] = (),
|
|
||||||
quote_fields: bool = True,
|
|
||||||
charset: Optional[str] = None,
|
|
||||||
) -> None:
|
|
||||||
self._writer = multipart.MultipartWriter("form-data")
|
|
||||||
self._fields = [] # type: List[Any]
|
|
||||||
self._is_multipart = False
|
|
||||||
self._is_processed = False
|
|
||||||
self._quote_fields = quote_fields
|
|
||||||
self._charset = charset
|
|
||||||
|
|
||||||
if isinstance(fields, dict):
|
|
||||||
fields = list(fields.items())
|
|
||||||
elif not isinstance(fields, (list, tuple)):
|
|
||||||
fields = (fields,)
|
|
||||||
self.add_fields(*fields)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_multipart(self) -> bool:
|
|
||||||
return self._is_multipart
|
|
||||||
|
|
||||||
def add_field(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
value: Any,
|
|
||||||
*,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
filename: Optional[str] = None,
|
|
||||||
content_transfer_encoding: Optional[str] = None
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
if isinstance(value, io.IOBase):
|
|
||||||
self._is_multipart = True
|
|
||||||
elif isinstance(value, (bytes, bytearray, memoryview)):
|
|
||||||
if filename is None and content_transfer_encoding is None:
|
|
||||||
filename = name
|
|
||||||
|
|
||||||
type_options = MultiDict({"name": name}) # type: MultiDict[str]
|
|
||||||
if filename is not None and not isinstance(filename, str):
|
|
||||||
raise TypeError(
|
|
||||||
"filename must be an instance of str. " "Got: %s" % filename
|
|
||||||
)
|
|
||||||
if filename is None and isinstance(value, io.IOBase):
|
|
||||||
filename = guess_filename(value, name)
|
|
||||||
if filename is not None:
|
|
||||||
type_options["filename"] = filename
|
|
||||||
self._is_multipart = True
|
|
||||||
|
|
||||||
headers = {}
|
|
||||||
if content_type is not None:
|
|
||||||
if not isinstance(content_type, str):
|
|
||||||
raise TypeError(
|
|
||||||
"content_type must be an instance of str. " "Got: %s" % content_type
|
|
||||||
)
|
|
||||||
headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
self._is_multipart = True
|
|
||||||
if content_transfer_encoding is not None:
|
|
||||||
if not isinstance(content_transfer_encoding, str):
|
|
||||||
raise TypeError(
|
|
||||||
"content_transfer_encoding must be an instance"
|
|
||||||
" of str. Got: %s" % content_transfer_encoding
|
|
||||||
)
|
|
||||||
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
|
||||||
self._is_multipart = True
|
|
||||||
|
|
||||||
self._fields.append((type_options, headers, value))
|
|
||||||
|
|
||||||
def add_fields(self, *fields: Any) -> None:
|
|
||||||
to_add = list(fields)
|
|
||||||
|
|
||||||
while to_add:
|
|
||||||
rec = to_add.pop(0)
|
|
||||||
|
|
||||||
if isinstance(rec, io.IOBase):
|
|
||||||
k = guess_filename(rec, "unknown")
|
|
||||||
self.add_field(k, rec) # type: ignore
|
|
||||||
|
|
||||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
|
||||||
to_add.extend(rec.items())
|
|
||||||
|
|
||||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
|
||||||
k, fp = rec
|
|
||||||
self.add_field(k, fp) # type: ignore
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise TypeError(
|
|
||||||
"Only io.IOBase, multidict and (name, file) "
|
|
||||||
"pairs allowed, use .add_field() for passing "
|
|
||||||
"more complex parameters, got {!r}".format(rec)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
|
||||||
# form data (x-www-form-urlencoded)
|
|
||||||
data = []
|
|
||||||
for type_options, _, value in self._fields:
|
|
||||||
data.append((type_options["name"], value))
|
|
||||||
|
|
||||||
charset = self._charset if self._charset is not None else "utf-8"
|
|
||||||
|
|
||||||
if charset == "utf-8":
|
|
||||||
content_type = "application/x-www-form-urlencoded"
|
|
||||||
else:
|
|
||||||
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
|
||||||
|
|
||||||
return payload.BytesPayload(
|
|
||||||
urlencode(data, doseq=True, encoding=charset).encode(),
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _gen_form_data(self) -> multipart.MultipartWriter:
|
|
||||||
"""Encode a list of fields using the multipart/form-data MIME format"""
|
|
||||||
if self._is_processed:
|
|
||||||
raise RuntimeError("Form data has been processed already")
|
|
||||||
for dispparams, headers, value in self._fields:
|
|
||||||
try:
|
|
||||||
if hdrs.CONTENT_TYPE in headers:
|
|
||||||
part = payload.get_payload(
|
|
||||||
value,
|
|
||||||
content_type=headers[hdrs.CONTENT_TYPE],
|
|
||||||
headers=headers,
|
|
||||||
encoding=self._charset,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
part = payload.get_payload(
|
|
||||||
value, headers=headers, encoding=self._charset
|
|
||||||
)
|
|
||||||
except Exception as exc:
|
|
||||||
raise TypeError(
|
|
||||||
"Can not serialize value type: %r\n "
|
|
||||||
"headers: %r\n value: %r" % (type(value), headers, value)
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
if dispparams:
|
|
||||||
part.set_content_disposition(
|
|
||||||
"form-data", quote_fields=self._quote_fields, **dispparams
|
|
||||||
)
|
|
||||||
# FIXME cgi.FieldStorage doesn't likes body parts with
|
|
||||||
# Content-Length which were sent via chunked transfer encoding
|
|
||||||
assert part.headers is not None
|
|
||||||
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
|
||||||
|
|
||||||
self._writer.append_payload(part)
|
|
||||||
|
|
||||||
self._is_processed = True
|
|
||||||
return self._writer
|
|
||||||
|
|
||||||
def __call__(self) -> Payload:
|
|
||||||
if self._is_multipart:
|
|
||||||
return self._gen_form_data()
|
|
||||||
else:
|
|
||||||
return self._gen_form_urlencoded()
|
|
||||||
72
env/Lib/site-packages/aiohttp/frozenlist.py
vendored
72
env/Lib/site-packages/aiohttp/frozenlist.py
vendored
|
|
@ -1,72 +0,0 @@
|
||||||
from collections.abc import MutableSequence
|
|
||||||
from functools import total_ordering
|
|
||||||
|
|
||||||
from .helpers import NO_EXTENSIONS
|
|
||||||
|
|
||||||
|
|
||||||
@total_ordering
|
|
||||||
class FrozenList(MutableSequence):
|
|
||||||
|
|
||||||
__slots__ = ("_frozen", "_items")
|
|
||||||
|
|
||||||
def __init__(self, items=None):
|
|
||||||
self._frozen = False
|
|
||||||
if items is not None:
|
|
||||||
items = list(items)
|
|
||||||
else:
|
|
||||||
items = []
|
|
||||||
self._items = items
|
|
||||||
|
|
||||||
@property
|
|
||||||
def frozen(self):
|
|
||||||
return self._frozen
|
|
||||||
|
|
||||||
def freeze(self):
|
|
||||||
self._frozen = True
|
|
||||||
|
|
||||||
def __getitem__(self, index):
|
|
||||||
return self._items[index]
|
|
||||||
|
|
||||||
def __setitem__(self, index, value):
|
|
||||||
if self._frozen:
|
|
||||||
raise RuntimeError("Cannot modify frozen list.")
|
|
||||||
self._items[index] = value
|
|
||||||
|
|
||||||
def __delitem__(self, index):
|
|
||||||
if self._frozen:
|
|
||||||
raise RuntimeError("Cannot modify frozen list.")
|
|
||||||
del self._items[index]
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self._items.__len__()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self._items.__iter__()
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
return self._items.__reversed__()
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return list(self) == other
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return list(self) <= other
|
|
||||||
|
|
||||||
def insert(self, pos, item):
|
|
||||||
if self._frozen:
|
|
||||||
raise RuntimeError("Cannot modify frozen list.")
|
|
||||||
self._items.insert(pos, item)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
|
|
||||||
|
|
||||||
|
|
||||||
PyFrozenList = FrozenList
|
|
||||||
|
|
||||||
try:
|
|
||||||
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
|
|
||||||
|
|
||||||
if not NO_EXTENSIONS:
|
|
||||||
FrozenList = CFrozenList # type: ignore
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
46
env/Lib/site-packages/aiohttp/frozenlist.pyi
vendored
46
env/Lib/site-packages/aiohttp/frozenlist.pyi
vendored
|
|
@ -1,46 +0,0 @@
|
||||||
from typing import (
|
|
||||||
Generic,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
MutableSequence,
|
|
||||||
Optional,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
|
|
||||||
_T = TypeVar("_T")
|
|
||||||
_Arg = Union[List[_T], Iterable[_T]]
|
|
||||||
|
|
||||||
class FrozenList(MutableSequence[_T], Generic[_T]):
|
|
||||||
def __init__(self, items: Optional[_Arg[_T]] = ...) -> None: ...
|
|
||||||
@property
|
|
||||||
def frozen(self) -> bool: ...
|
|
||||||
def freeze(self) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, i: int) -> _T: ...
|
|
||||||
@overload
|
|
||||||
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, i: int, o: _T) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __delitem__(self, i: int) -> None: ...
|
|
||||||
@overload
|
|
||||||
def __delitem__(self, i: slice) -> None: ...
|
|
||||||
def __len__(self) -> int: ...
|
|
||||||
def __iter__(self) -> Iterator[_T]: ...
|
|
||||||
def __reversed__(self) -> Iterator[_T]: ...
|
|
||||||
def __eq__(self, other: object) -> bool: ...
|
|
||||||
def __le__(self, other: FrozenList[_T]) -> bool: ...
|
|
||||||
def __ne__(self, other: object) -> bool: ...
|
|
||||||
def __lt__(self, other: FrozenList[_T]) -> bool: ...
|
|
||||||
def __ge__(self, other: FrozenList[_T]) -> bool: ...
|
|
||||||
def __gt__(self, other: FrozenList[_T]) -> bool: ...
|
|
||||||
def insert(self, pos: int, item: _T) -> None: ...
|
|
||||||
def __repr__(self) -> str: ...
|
|
||||||
|
|
||||||
# types for C accelerators are the same
|
|
||||||
CFrozenList = PyFrozenList = FrozenList
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue