Compare commits

...

47 commits

Author SHA1 Message Date
Sakimori 16654ed3ff fixed team delete confirm 2023-04-01 00:13:00 -04:00
Sakimori b598e9aa5e fixed inning number not working 2023-03-31 14:46:37 -04:00
Sakimori deb10fb3b0 Merge branch 'master' of https://github.com/Sakimori/matteo-the-prestige 2023-03-31 14:33:17 -04:00
Sakimori a22d70cb0c Update .gitignore 2023-03-31 14:32:05 -04:00
Sakimori 68a8588ecd
Merge pull request #278 from Sakimori/full_slash_consequences
Full slash consequences
2023-03-31 14:30:17 -04:00
Sakimori 2ca1a44455 fixed some lastminute things 2023-03-31 14:29:38 -04:00
Sakimori 7fbd793cf5 done except draft 2023-03-31 14:28:04 -04:00
Sakimori 99fd5b00a3 finished league management commands, started OBL 2023-03-31 14:07:01 -04:00
Sakimori a7a9e37842 continued work on slashes 2023-03-31 13:22:10 -04:00
Sakimori 3d5aba94a9 added a few more slashes 2023-03-31 03:02:37 -04:00
Sakimori d7eb232af4 fixed environment, continued work on commands 2023-03-31 02:18:28 -04:00
Sakimori 03ab7c24bd showteam 2022-09-18 12:37:55 -04:00
Sakimori 793c67da11 view archetypes 2022-09-18 12:23:38 -04:00
Sakimori 85cbc0ef8a archetypehelp 2022-09-18 12:10:11 -04:00
Sakimori 4685f4aea8 added weather selection box 2022-09-18 12:02:34 -04:00
Sakimori 393094a63c archetypes, with dropdown 2022-09-18 11:49:50 -04:00
Sakimori 2b56ea6955 re-added conventional command handling for bot DMs 2022-09-18 11:35:03 -04:00
Sakimori 6d7cfe05c2 removed the one-command text and setupgame 2022-09-18 11:34:04 -04:00
Sakimori d6011a69d7 random games (also bugfix for same) 2022-09-18 11:27:36 -04:00
Sakimori dcbc934f86 removed custom voices for now 2022-09-18 11:14:20 -04:00
Sakimori 74aef9d163 startgame and showplayer 2022-09-18 11:08:24 -04:00
Sakimori 1f37131a76 moved showplayer 2022-09-18 10:00:28 -04:00
Sakimori 95af631746 updated requirements to match version of discord.py 2022-09-18 09:19:36 -04:00
Sakimori 0887cedc86 updated discord.py 2022-09-18 09:17:20 -04:00
Sakimori f05f644064 Update games.py
removed the OBL easter egg due to abuse
2022-07-07 11:58:00 -04:00
Sakimori 93f3d99eae fixed double updoot 2022-07-05 00:37:35 -04:00
Sakimori 19caf51c9a telemetry for double updates 2022-07-05 00:34:52 -04:00
Sakimori 231d545739 Update weather.py 2022-07-05 00:29:25 -04:00
Sakimori 6936b767ff Merge branch 'master' of https://github.com/Sakimori/matteo-the-prestige 2022-07-05 00:28:12 -04:00
Sakimori 39aa47336e implemented Runoff 2022-07-05 00:19:46 -04:00
Sakimori 8f92fb8ed0 implemented Runoff 2022-07-05 00:13:54 -04:00
Sakimori 02ee1332f7 disable real player names, return TBD 2022-06-30 00:42:56 -04:00
Sakimori a5a1f4ce04 added -s flag to league leaders 2022-06-28 11:40:10 -04:00
Sakimori 548f37fa9e Merge branch 'master' of https://github.com/Sakimori/matteo-the-prestige 2022-06-15 22:23:43 -04:00
Sakimori 936305c88d updated to work with apache proxying 2022-06-15 21:59:51 -04:00
Sakimori 2c4b340d91
really allowed 0 inter-div and inter-conf games. also allowed odd conferences, i think 2021-09-10 09:38:43 -07:00
Sakimori 28eb6e207f
fixed indent error, updated draftflags 2021-09-08 20:19:18 -07:00
Sakimori 1b7a32e3ec added more error catching 2021-08-24 13:42:00 -04:00
Sakimori 41b1765d77 fixed a typo 2021-08-15 19:54:37 -04:00
Sakimori e3594ef9fb attempt to fix typeerror in day_watcher 2021-08-15 19:52:32 -04:00
Sakimori 9bfb72e494 added blacklist function to config 2021-08-13 15:00:31 -04:00
Sakimori ffde5adf42
Update README.md
added patron
2021-07-28 13:35:30 -04:00
Sakimori 10bef0550c added extra tracking for league errors 2021-07-26 19:07:35 -04:00
Sakimori fc45e05ad2 Merge branch 'master' of https://github.com/Sakimori/matteo-the-prestige 2021-07-26 19:02:54 -04:00
Sakimori 0dd906aac5 re-enabled error catching for setplayermods 2021-07-23 18:34:32 -04:00
Sakimori c3c700d242 added "ea" array to config file, restricted a command to users in that list or bot owners 2021-07-23 18:32:36 -04:00
Sakimori 4d8e61d47f
removed leaf eddies from aftermath chain group 2021-07-19 21:50:07 -04:00
164 changed files with 38153 additions and 30519 deletions

1
.gitignore vendored
View file

@ -359,3 +359,4 @@ env
/simmadome/build
/simmadome/.eslintcache
/matteo_env/Lib/site-packages
/new_matteo_env

View file

@ -282,6 +282,7 @@ these folks are helping me a *ton* via patreon, and i cannot possibly thank them
- yooori
- Bend
- ALC
- Luna
## Attribution

View file

@ -1,5 +1,6 @@
import random
from gametext import appearance_outcomes
from discord.app_commands import Choice
class Archetype:
name = "basic"
@ -126,6 +127,12 @@ def all_archetypes():
Control
]
def archetype_choices():
lst = []
for arch in all_archetypes():
lst.append(Choice(name=arch.display_name, value=arch.name))
return lst
def search_archetypes(text):
for archetype in all_archetypes():
if archetype.name == text or archetype.display_name.lower() == text.lower():

View file

@ -235,7 +235,7 @@ class game(object):
self.over = False
self.random_weather_flag = False
self.teams = {"away" : team1, "home" : team2}
self.archetypes = {team1.name : team1.archetypes, team2.name : team2.archetypes}
self.offense_archetypes = {}
self.defense_archetypes = {}
self.inning = 1
@ -254,10 +254,14 @@ class game(object):
self.voice = None
self.current_batter = None
for this_team in [team1, team2]:
for this_player in this_team.lineup + this_team.rotation:
if this_player.name in this_team.archetypes.keys():
this_team.archetypes[this_player.name].modify_player_stats(this_player)
try:
self.archetypes = {team1.name : team1.archetypes, team2.name : team2.archetypes}
for this_team in [team1, team2]:
for this_player in this_team.lineup + this_team.rotation:
if this_player.name in this_team.archetypes.keys():
this_team.archetypes[this_player.name].modify_player_stats(this_player)
except:
pass
def occupied_bases(self):
occ_dic = {}
@ -351,7 +355,7 @@ class game(object):
outcome["batter_archetype"].modify_out_type(outcome)
outcome["pitcher_archetype"].modify_out_type(outcome)
if self.bases[1] is not None and roll["hitnum"] < -2 and self.outs != 2:
if self.bases[1] is not None and roll["hitnum"] < -2 and (self.outs != 2 or self.weather.out_extension):
outcome["outcome"] = appearance_outcomes.doubleplay
outcome["defender"] = ""
@ -806,12 +810,7 @@ class game(object):
self.over = True
try: #if something goes wrong with OBL don't erase game
if self.max_innings >= 9 or self.weather.name in ["Leaf Eddies", "Torrential Downpour"]:
if self.teams["home"].score == 16:
this_xvi_team = self.teams["home"]
elif self.teams["away"].score == 16:
this_xvi_team = self.teams["away"]
else:
this_xvi_team = None
this_xvi_team = None
db.save_obl_results(self.teams["home"] if self.teams["home"].score > self.teams["away"].score else self.teams["away"], self.teams["home"] if self.teams["home"].score < self.teams["away"].score else self.teams["away"], xvi_team=this_xvi_team)
except:
pass
@ -955,7 +954,7 @@ def get_team_and_owner(name):
update_team(team_json)
return (team_json, owner_id)
except:
return None
return (None, None)
def save_team(this_team, user_id):
try:

View file

@ -390,13 +390,10 @@ class TrespassOracle(game_strings_base):
def all_voices():
return {"default": game_strings_base,
"The Goddesses": TheGoddesses,
"The New Guy": TheNewGuy,
"Trespass Oracle": TrespassOracle}
return {"default": game_strings_base}
def weighted_voices(): #these are the ones accessible to random games
return [game_strings_base, TheGoddesses, TheNewGuy], [6, 2, 2]
return [game_strings_base], [6]
def base_string(base):

View file

@ -79,10 +79,12 @@ def create_season_connection(league_name, season_num):
#create connection, create db if doesn't exist
conn = None
try:
if not os.path.exists(os.path.join(data_dir, league_dir, league_name)):
if not os.path.exists(os.path.join(data_dir, league_dir, league_name)):
os.makedirs(os.path.join(data_dir, league_dir, league_name))
conn = sql.connect(os.path.join(data_dir, league_dir, league_name, season_num, f"{league_name}.db"))
try:
conn = sql.connect(os.path.join(data_dir, league_dir, league_name, season_num, f"{league_name}.db"))
except:
raise ValueError("Season not played")
# enable write-ahead log for performance and resilience
conn.execute('pragma journal_mode=wal')
@ -197,8 +199,11 @@ def add_stats(league_name, player_game_stats_list):
conn.commit()
conn.close()
def get_stats(league_name, stat, is_batter=True, day = 10):
conn = create_connection(league_name)
def get_stats(league_name, stat, is_batter=True, day = 10, season = None):
if season is None:
conn = create_connection(league_name)
else:
conn = create_season_connection(league_name, season)
stats = None
if conn is not None:
conn.row_factory = sql.Row

View file

@ -455,9 +455,15 @@ class league_structure(object):
return tournaments
def stat_embed(self, stat_name):
this_embed = Embed(color=Color.purple(), title=f"{self.name} Season {self.season} {stat_name} Leaders")
stats = league_db.get_stats(self.name, stat_name.lower(), day = self.day)
def stat_embed(self, stat_name, season_num):
if season_num is None:
season_string = str(self.season)
day = self.day
else:
season_string = str(season_num)
day = len(self.schedule)
this_embed = Embed(color=Color.purple(), title=f"{self.name} Season {season_string} {stat_name} Leaders")
stats = league_db.get_stats(self.name, stat_name.lower(), day = day, season = season_num)
if stats is None:
return None
else:

View file

@ -118,7 +118,7 @@ def create_league():
### SOCKETS
thread2 = threading.Thread(target=socketio.run,args=(app,"0.0.0.0", "80"))
thread2 = threading.Thread(target=socketio.run,args=(app,"127.0.0.1", "8080"))
thread2.start()
master_games_dic = {} #key timestamp : (game game, {} state)

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,661 +0,0 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.6.3
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5.3
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<4.0,>=2.0)
Requires-Dist: multidict (<5.0,>=4.5)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<1.6.0,>=1.0)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
:target: https://travis-ci.com/aio-libs/aiohttp
:align: right
:alt: Travis status for master branch
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
:align: right
:alt: AppVeyor status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and pluggable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
if __name__ == '__main__':
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
Feel free to post your questions and ideas here.
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.5.3
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.6.3 (2020-10-12)
==================
Bugfixes
--------
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
release.
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View file

@ -1,124 +0,0 @@
aiohttp-3.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp-3.6.3.dist-info/LICENSE.txt,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
aiohttp-3.6.3.dist-info/METADATA,sha256=UmObIpkmeVqZFB1rwDUEbIzmKMrVaEaEfDZulA48d_g,24570
aiohttp-3.6.3.dist-info/RECORD,,
aiohttp-3.6.3.dist-info/WHEEL,sha256=_iQIYqodxlIIeRSKaaUx61MUqAflFno6m3IasYrHq0k,96
aiohttp-3.6.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp/__init__.py,sha256=mLgcCI1IbtpejkbYB11E4_cdnTGXDucIVNlt-6Itres,8201
aiohttp/__pycache__/__init__.cpython-38.pyc,,
aiohttp/__pycache__/abc.cpython-38.pyc,,
aiohttp/__pycache__/base_protocol.cpython-38.pyc,,
aiohttp/__pycache__/client.cpython-38.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-38.pyc,,
aiohttp/__pycache__/client_proto.cpython-38.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-38.pyc,,
aiohttp/__pycache__/client_ws.cpython-38.pyc,,
aiohttp/__pycache__/connector.cpython-38.pyc,,
aiohttp/__pycache__/cookiejar.cpython-38.pyc,,
aiohttp/__pycache__/formdata.cpython-38.pyc,,
aiohttp/__pycache__/frozenlist.cpython-38.pyc,,
aiohttp/__pycache__/hdrs.cpython-38.pyc,,
aiohttp/__pycache__/helpers.cpython-38.pyc,,
aiohttp/__pycache__/http.cpython-38.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-38.pyc,,
aiohttp/__pycache__/http_parser.cpython-38.pyc,,
aiohttp/__pycache__/http_websocket.cpython-38.pyc,,
aiohttp/__pycache__/http_writer.cpython-38.pyc,,
aiohttp/__pycache__/locks.cpython-38.pyc,,
aiohttp/__pycache__/log.cpython-38.pyc,,
aiohttp/__pycache__/multipart.cpython-38.pyc,,
aiohttp/__pycache__/payload.cpython-38.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-38.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-38.pyc,,
aiohttp/__pycache__/resolver.cpython-38.pyc,,
aiohttp/__pycache__/signals.cpython-38.pyc,,
aiohttp/__pycache__/streams.cpython-38.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-38.pyc,,
aiohttp/__pycache__/test_utils.cpython-38.pyc,,
aiohttp/__pycache__/tracing.cpython-38.pyc,,
aiohttp/__pycache__/typedefs.cpython-38.pyc,,
aiohttp/__pycache__/web.cpython-38.pyc,,
aiohttp/__pycache__/web_app.cpython-38.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-38.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-38.pyc,,
aiohttp/__pycache__/web_log.cpython-38.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-38.pyc,,
aiohttp/__pycache__/web_protocol.cpython-38.pyc,,
aiohttp/__pycache__/web_request.cpython-38.pyc,,
aiohttp/__pycache__/web_response.cpython-38.pyc,,
aiohttp/__pycache__/web_routedef.cpython-38.pyc,,
aiohttp/__pycache__/web_runner.cpython-38.pyc,,
aiohttp/__pycache__/web_server.cpython-38.pyc,,
aiohttp/__pycache__/web_urldispatcher.cpython-38.pyc,,
aiohttp/__pycache__/web_ws.cpython-38.pyc,,
aiohttp/__pycache__/worker.cpython-38.pyc,,
aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959
aiohttp/_find_header.c,sha256=lWc5w3UZiVd3ni60DuFDSSPzsaQUhAQcERDGBOqeML8,189932
aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
aiohttp/_frozenlist.c,sha256=TJm7lrkQmqxgv34LNKxVSDGYoe_kzCDwe7UJhA9kWEw,288942
aiohttp/_frozenlist.cp38-win32.pyd,sha256=-LwRBSwiKsuRVDsWOXWpUFS33VWmgqHQhvGjTkHuOJk,54272
aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605
aiohttp/_headers.pxi,sha256=XgJL5FQRwL4uZQfegYShPclsErUlvG_xuMHs7dp_2-o,2027
aiohttp/_helpers.c,sha256=RXISYNjuRMMK62Kowg2lfSY4vRfuoGciuNoau4-sOUk,208656
aiohttp/_helpers.cp38-win32.pyd,sha256=n4MUfr0ziDiIx_eguPHyAVkig1627cWyskl2TU4w8Eg,39424
aiohttp/_helpers.pyi,sha256=mJRb5YdG8DxYbPfVddGRGmi93qqaJM30L1qFpgSKQuA,204
aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049
aiohttp/_http_parser.c,sha256=Mhpi2TRlfIs8WLQ-zI4xXI3d8HwFS0UR-7jqZ8tZbTs,997491
aiohttp/_http_parser.cp38-win32.pyd,sha256=sHBj4wpG57qUJk735EWsUdHTR6RzU8fSSE25OqvCF94,215552
aiohttp/_http_parser.pyx,sha256=oysZhQ4DMVW16NtPL-G0ZZySPa2PfG2C3z62Yy02DnM,28731
aiohttp/_http_writer.c,sha256=d2dqxvQRz3Su7TykeDRCwXHchJcV4oCcpowtw91UV9M,211618
aiohttp/_http_writer.cp38-win32.pyd,sha256=ruwTQmzPmM1P-d3Qm1Pk32lKY93NrRWFj6t9qM81bYQ,37376
aiohttp/_http_writer.pyx,sha256=vQgnMbmULTuhPvZ5cuZQgBhrqHoyE1caNnKeDtYZe5g,4201
aiohttp/_websocket.c,sha256=K0WO_6LIVKXYb8MXwaPTc_3LwTGB_B_s-LQ5VHQXHBQ,136604
aiohttp/_websocket.cp38-win32.pyd,sha256=DkG23gXcVfOITtKLyBDtMWo2rIiR9ZTlIPL7OO0tHYc,24576
aiohttp/_websocket.pyx,sha256=tJfygcVwKF_Xb6Pg48a6t50YO2xY4Rg0Wj7LcJJMi-U,1559
aiohttp/abc.py,sha256=lsf2bz-9KtqLhtI-e-tmgp3ynziMypYyEHvwOnFg7lQ,5392
aiohttp/base_protocol.py,sha256=k11bt9JrBkKJcEDqTtES6vIguce0haZ9jVhcMDt9LYs,2663
aiohttp/client.py,sha256=_-g2gD1Hm8XLRJme6ByaBRikIHbnQd_l65cOg_UHHUE,43979
aiohttp/client_exceptions.py,sha256=50ECpb_fyJ0GTop_y-wyU85acaX1g3AOTFtLPF3MXJ8,8494
aiohttp/client_proto.py,sha256=ETzD290mCPX_gUP1ioHO3BNcMkbBjw3Ho2Yb_TyTPeU,8037
aiohttp/client_reqrep.py,sha256=45ITtqQw2yv_xKZBBFrjjqjjH2pAvNsMpvzJg3_gXDc,36012
aiohttp/client_ws.py,sha256=AQlj-peBA0mGyra1t38sWlfV28MEM0SAATRXp1TsF9I,10694
aiohttp/connector.py,sha256=UhBBB1f4ujLLts3TEGTVEoqrFsOOHWJegdyKoy_EhoM,42479
aiohttp/cookiejar.py,sha256=yWXa7YUNED17-De0ozYf5xoaortyFL4VURPqkuWIUec,11881
aiohttp/formdata.py,sha256=VZCo9kmDb50lQUcRMDfAH3d5lnRxBq_AX38ge8vFI00,5807
aiohttp/frozenlist.py,sha256=I4zR368wRHXp402Z3f5lhd5i48b6A66MhHncW1JGkb4,1781
aiohttp/frozenlist.pyi,sha256=XmrOTtt3PH0evccYlSsFAkgWxqHizm9IyvRxvqSt2MA,1449
aiohttp/hdrs.py,sha256=iaXnHXOR_Dx0rvVkvmIZhc-7Egf2ByuSDI9tqskS0kQ,3449
aiohttp/helpers.py,sha256=UXx60KcqNxiKnYpb3ruB4SuLA8n8gi_Qpd5cDzxVpUk,22898
aiohttp/http.py,sha256=YzihfNgKUAnduXjLdyXfgQFWkJ4GlQpEdGgn8UWTmzs,2133
aiohttp/http_exceptions.py,sha256=V8sRLSfrsWzKGDuK9TYSx9LWDuJO6a-gOW1BFSDBx5M,2663
aiohttp/http_parser.py,sha256=v9csKsBv-rmOir1ikRBcDJDAaPMsFen1HoP8_Viz6xE,27912
aiohttp/http_websocket.py,sha256=IJLApqN53JsuQK-vU5rHgSzNjeeJhecwiS7AwLwWGKk,24741
aiohttp/http_writer.py,sha256=XhGCqy_lzdLyxIzjQ_ufPFfJKTTWx1sb6YZWvrOFUPA,5239
aiohttp/locks.py,sha256=l-cW8wUbIkHaovghT7gpY8Yp5Vlo-u2G7_CR5xQqEQ8,1234
aiohttp/log.py,sha256=kOWU6EcyBQESISm27vc8dVEz_h9zxozLa5WCya1RzhQ,325
aiohttp/multipart.py,sha256=XWVdsgOQbBPl9QID1mvafkb6v0i8IaNUrjykk36xTFc,32781
aiohttp/payload.py,sha256=QdGUaBQGGhKOARbsRtVeq-EdlL4C__0e8Sm5DSbPPSU,14027
aiohttp/payload_streamer.py,sha256=ZNWaWwAxOIricwfjH4-YrkCqehowVizM6fJ_JVDR480,2103
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
aiohttp/pytest_plugin.py,sha256=LOyTRuGLHL0XPAh5ByVaYeCReqRAwZfboXoCwtLxjVY,10835
aiohttp/resolver.py,sha256=pRF91jOjTNuCll5TMRjTe1OxnGZK4wjAggYLgvzXkGQ,3626
aiohttp/signals.py,sha256=_ge2XQXBDWHoyCI4E-nXC-sOEJGVrJm0zYGHH0E5woQ,948
aiohttp/signals.pyi,sha256=bwT4vJeDdymZ8_gdpmfV4RhIolbI-XwZLCnpaq0xwSU,324
aiohttp/streams.py,sha256=4ZEJXKDDfQu2sEzETcDaggbSee7t5hy2dhD9ipBVAhg,20459
aiohttp/tcp_helpers.py,sha256=1WVYM2C-HZQpgcksTyadRsl2_WeuXh_ECUxCcwji5d8,1631
aiohttp/test_utils.py,sha256=hK19Rw-SG7hC1-r60D2TMfkCPDINEURq-sa6NCSNvoI,20860
aiohttp/tracing.py,sha256=HKVbmVEVIaSYIdPNzk93Uqk-Z7CIMrPDoTdPpuGUzdA,13353
aiohttp/typedefs.py,sha256=8UijScgRRbeOOxLtkK9aqMNIFSOoqo65V0-KA40HCXo,1324
aiohttp/web.py,sha256=-Eh9m4aXyRXeaBkl7jvv_mlTH3d6IXsaS6llg9Yuido,19438
aiohttp/web_app.py,sha256=0Qd7E6EoKMIG6kP8YjmZCO2JDvVK5GMQX5c-Om38kaQ,17265
aiohttp/web_exceptions.py,sha256=e8BRGZO-gJuDv-ZWmDt3Fq84iq_xF4fuKJzzWX7go6Y,10106
aiohttp/web_fileresponse.py,sha256=VokjofoX_cm00PvOUnQlGQJPrHdKbE1T1Yy6ZgsxYWI,12772
aiohttp/web_log.py,sha256=ZBiwi0HPOiTuOJNHpolBzUoF1IdRJpRgcO2SoO_iKtI,8255
aiohttp/web_middlewares.py,sha256=p45cYhF5l_HO8AAP4pic7dMpYg7qu5HgidFPprluXO0,4190
aiohttp/web_protocol.py,sha256=lWVV7t_VBWXAC0K4hI4MgCP_VoHUv7IZEJyQJmG4DYg,22192
aiohttp/web_request.py,sha256=AwcGfvmCYevhzKedtDdYKPg999cX91bTIt9oeVyGqdY,25772
aiohttp/web_response.py,sha256=KRtJz-oBNuJBHJCXy20Q6DHihQWRN-9ux11Vsl_POAk,25485
aiohttp/web_routedef.py,sha256=w_ATxAAzmh28uIvbCCo0guotfEDqYSb1gG0ZJ2QbyEM,6099
aiohttp/web_runner.py,sha256=6PqQBGdC8BanrxpYRlMn_33MpsDqstjOIGDftECb2uc,11142
aiohttp/web_server.py,sha256=527MjryEIqWArFHMJlEABg3TcZgYtyJIFHY19Yvf3AI,2165
aiohttp/web_urldispatcher.py,sha256=z4TIMTja2r8TDV46As7_acBSomTlook6TlAldRWB0Lc,38908
aiohttp/web_ws.py,sha256=SUUDB0aQNe2nsv_fYjmkBwJJVjarI05JBzoReS9gCVs,16958
aiohttp/worker.py,sha256=hekSLWLEJVrHrIrZ3dQga7Jzgtx_Cf3ZW7Zfd1J1G3A,8178

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp38-cp38-win32

View file

@ -1,226 +1,216 @@
__version__ = '3.6.3'
__version__ = "3.8.1"
from typing import Tuple # noqa
from typing import Tuple
from . import hdrs as hdrs
from .client import BaseConnector as BaseConnector
from .client import ClientConnectionError as ClientConnectionError
from .client import (
BaseConnector as BaseConnector,
ClientConnectionError as ClientConnectionError,
ClientConnectorCertificateError as ClientConnectorCertificateError,
ClientConnectorError as ClientConnectorError,
ClientConnectorSSLError as ClientConnectorSSLError,
ClientError as ClientError,
ClientHttpProxyError as ClientHttpProxyError,
ClientOSError as ClientOSError,
ClientPayloadError as ClientPayloadError,
ClientProxyConnectionError as ClientProxyConnectionError,
ClientRequest as ClientRequest,
ClientResponse as ClientResponse,
ClientResponseError as ClientResponseError,
ClientSession as ClientSession,
ClientSSLError as ClientSSLError,
ClientTimeout as ClientTimeout,
ClientWebSocketResponse as ClientWebSocketResponse,
ContentTypeError as ContentTypeError,
Fingerprint as Fingerprint,
InvalidURL as InvalidURL,
NamedPipeConnector as NamedPipeConnector,
RequestInfo as RequestInfo,
ServerConnectionError as ServerConnectionError,
ServerDisconnectedError as ServerDisconnectedError,
ServerFingerprintMismatch as ServerFingerprintMismatch,
ServerTimeoutError as ServerTimeoutError,
TCPConnector as TCPConnector,
TooManyRedirects as TooManyRedirects,
UnixConnector as UnixConnector,
WSServerHandshakeError as WSServerHandshakeError,
request as request,
)
from .client import ClientConnectorError as ClientConnectorError
from .client import ClientConnectorSSLError as ClientConnectorSSLError
from .client import ClientError as ClientError
from .client import ClientHttpProxyError as ClientHttpProxyError
from .client import ClientOSError as ClientOSError
from .client import ClientPayloadError as ClientPayloadError
from .client import ClientProxyConnectionError as ClientProxyConnectionError
from .client import ClientRequest as ClientRequest
from .client import ClientResponse as ClientResponse
from .client import ClientResponseError as ClientResponseError
from .client import ClientSession as ClientSession
from .client import ClientSSLError as ClientSSLError
from .client import ClientTimeout as ClientTimeout
from .client import ClientWebSocketResponse as ClientWebSocketResponse
from .client import ContentTypeError as ContentTypeError
from .client import Fingerprint as Fingerprint
from .client import InvalidURL as InvalidURL
from .client import NamedPipeConnector as NamedPipeConnector
from .client import RequestInfo as RequestInfo
from .client import ServerConnectionError as ServerConnectionError
from .client import ServerDisconnectedError as ServerDisconnectedError
from .client import ServerFingerprintMismatch as ServerFingerprintMismatch
from .client import ServerTimeoutError as ServerTimeoutError
from .client import TCPConnector as TCPConnector
from .client import TooManyRedirects as TooManyRedirects
from .client import UnixConnector as UnixConnector
from .client import WSServerHandshakeError as WSServerHandshakeError
from .client import request as request
from .cookiejar import CookieJar as CookieJar
from .cookiejar import DummyCookieJar as DummyCookieJar
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
from .formdata import FormData as FormData
from .helpers import BasicAuth as BasicAuth
from .helpers import ChainMapProxy as ChainMapProxy
from .http import HttpVersion as HttpVersion
from .http import HttpVersion10 as HttpVersion10
from .http import HttpVersion11 as HttpVersion11
from .http import WebSocketError as WebSocketError
from .http import WSCloseCode as WSCloseCode
from .http import WSMessage as WSMessage
from .http import WSMsgType as WSMsgType
from .helpers import BasicAuth, ChainMapProxy, ETag
from .http import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
WebSocketError as WebSocketError,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
)
from .multipart import (
BadContentDispositionHeader as BadContentDispositionHeader,
)
from .multipart import BadContentDispositionParam as BadContentDispositionParam
from .multipart import BodyPartReader as BodyPartReader
from .multipart import MultipartReader as MultipartReader
from .multipart import MultipartWriter as MultipartWriter
from .multipart import (
BadContentDispositionParam as BadContentDispositionParam,
BodyPartReader as BodyPartReader,
MultipartReader as MultipartReader,
MultipartWriter as MultipartWriter,
content_disposition_filename as content_disposition_filename,
parse_content_disposition as parse_content_disposition,
)
from .payload import (
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
AsyncIterablePayload as AsyncIterablePayload,
BufferedReaderPayload as BufferedReaderPayload,
BytesIOPayload as BytesIOPayload,
BytesPayload as BytesPayload,
IOBasePayload as IOBasePayload,
JsonPayload as JsonPayload,
Payload as Payload,
StringIOPayload as StringIOPayload,
StringPayload as StringPayload,
TextIOPayload as TextIOPayload,
get_payload as get_payload,
payload_type as payload_type,
)
from .multipart import parse_content_disposition as parse_content_disposition
from .payload import PAYLOAD_REGISTRY as PAYLOAD_REGISTRY
from .payload import AsyncIterablePayload as AsyncIterablePayload
from .payload import BufferedReaderPayload as BufferedReaderPayload
from .payload import BytesIOPayload as BytesIOPayload
from .payload import BytesPayload as BytesPayload
from .payload import IOBasePayload as IOBasePayload
from .payload import JsonPayload as JsonPayload
from .payload import Payload as Payload
from .payload import StringIOPayload as StringIOPayload
from .payload import StringPayload as StringPayload
from .payload import TextIOPayload as TextIOPayload
from .payload import get_payload as get_payload
from .payload import payload_type as payload_type
from .payload_streamer import streamer as streamer
from .resolver import AsyncResolver as AsyncResolver
from .resolver import DefaultResolver as DefaultResolver
from .resolver import ThreadedResolver as ThreadedResolver
from .signals import Signal as Signal
from .streams import EMPTY_PAYLOAD as EMPTY_PAYLOAD
from .streams import DataQueue as DataQueue
from .streams import EofStream as EofStream
from .streams import FlowControlDataQueue as FlowControlDataQueue
from .streams import StreamReader as StreamReader
from .tracing import TraceConfig as TraceConfig
from .resolver import (
AsyncResolver as AsyncResolver,
DefaultResolver as DefaultResolver,
ThreadedResolver as ThreadedResolver,
)
from .streams import (
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
DataQueue as DataQueue,
EofStream as EofStream,
FlowControlDataQueue as FlowControlDataQueue,
StreamReader as StreamReader,
)
from .tracing import (
TraceConfig as TraceConfig,
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
)
from .tracing import (
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
)
from .tracing import (
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
)
from .tracing import (
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
)
from .tracing import (
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
)
from .tracing import TraceDnsCacheHitParams as TraceDnsCacheHitParams
from .tracing import TraceDnsCacheMissParams as TraceDnsCacheMissParams
from .tracing import (
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
)
from .tracing import (
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
)
from .tracing import TraceRequestChunkSentParams as TraceRequestChunkSentParams
from .tracing import TraceRequestEndParams as TraceRequestEndParams
from .tracing import TraceRequestExceptionParams as TraceRequestExceptionParams
from .tracing import TraceRequestRedirectParams as TraceRequestRedirectParams
from .tracing import TraceRequestStartParams as TraceRequestStartParams
from .tracing import (
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
TraceRequestEndParams as TraceRequestEndParams,
TraceRequestExceptionParams as TraceRequestExceptionParams,
TraceRequestRedirectParams as TraceRequestRedirectParams,
TraceRequestStartParams as TraceRequestStartParams,
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
__all__ = (
'hdrs',
__all__: Tuple[str, ...] = (
"hdrs",
# client
'BaseConnector',
'ClientConnectionError',
'ClientConnectorCertificateError',
'ClientConnectorError',
'ClientConnectorSSLError',
'ClientError',
'ClientHttpProxyError',
'ClientOSError',
'ClientPayloadError',
'ClientProxyConnectionError',
'ClientResponse',
'ClientRequest',
'ClientResponseError',
'ClientSSLError',
'ClientSession',
'ClientTimeout',
'ClientWebSocketResponse',
'ContentTypeError',
'Fingerprint',
'InvalidURL',
'RequestInfo',
'ServerConnectionError',
'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ServerTimeoutError',
'TCPConnector',
'TooManyRedirects',
'UnixConnector',
'NamedPipeConnector',
'WSServerHandshakeError',
'request',
"BaseConnector",
"ClientConnectionError",
"ClientConnectorCertificateError",
"ClientConnectorError",
"ClientConnectorSSLError",
"ClientError",
"ClientHttpProxyError",
"ClientOSError",
"ClientPayloadError",
"ClientProxyConnectionError",
"ClientResponse",
"ClientRequest",
"ClientResponseError",
"ClientSSLError",
"ClientSession",
"ClientTimeout",
"ClientWebSocketResponse",
"ContentTypeError",
"Fingerprint",
"InvalidURL",
"RequestInfo",
"ServerConnectionError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ServerTimeoutError",
"TCPConnector",
"TooManyRedirects",
"UnixConnector",
"NamedPipeConnector",
"WSServerHandshakeError",
"request",
# cookiejar
'CookieJar',
'DummyCookieJar',
"CookieJar",
"DummyCookieJar",
# formdata
'FormData',
"FormData",
# helpers
'BasicAuth',
'ChainMapProxy',
"BasicAuth",
"ChainMapProxy",
"ETag",
# http
'HttpVersion',
'HttpVersion10',
'HttpVersion11',
'WSMsgType',
'WSCloseCode',
'WSMessage',
'WebSocketError',
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
"WSMsgType",
"WSCloseCode",
"WSMessage",
"WebSocketError",
# multipart
'BadContentDispositionHeader',
'BadContentDispositionParam',
'BodyPartReader',
'MultipartReader',
'MultipartWriter',
'content_disposition_filename',
'parse_content_disposition',
"BadContentDispositionHeader",
"BadContentDispositionParam",
"BodyPartReader",
"MultipartReader",
"MultipartWriter",
"content_disposition_filename",
"parse_content_disposition",
# payload
'AsyncIterablePayload',
'BufferedReaderPayload',
'BytesIOPayload',
'BytesPayload',
'IOBasePayload',
'JsonPayload',
'PAYLOAD_REGISTRY',
'Payload',
'StringIOPayload',
'StringPayload',
'TextIOPayload',
'get_payload',
'payload_type',
"AsyncIterablePayload",
"BufferedReaderPayload",
"BytesIOPayload",
"BytesPayload",
"IOBasePayload",
"JsonPayload",
"PAYLOAD_REGISTRY",
"Payload",
"StringIOPayload",
"StringPayload",
"TextIOPayload",
"get_payload",
"payload_type",
# payload_streamer
'streamer',
"streamer",
# resolver
'AsyncResolver',
'DefaultResolver',
'ThreadedResolver',
# signals
'Signal',
'DataQueue',
'EMPTY_PAYLOAD',
'EofStream',
'FlowControlDataQueue',
'StreamReader',
"AsyncResolver",
"DefaultResolver",
"ThreadedResolver",
# streams
"DataQueue",
"EMPTY_PAYLOAD",
"EofStream",
"FlowControlDataQueue",
"StreamReader",
# tracing
'TraceConfig',
'TraceConnectionCreateEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionReuseconnParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceDnsResolveHostEndParams',
'TraceDnsResolveHostStartParams',
'TraceRequestChunkSentParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceRequestRedirectParams',
'TraceRequestStartParams',
'TraceResponseChunkReceivedParams',
) # type: Tuple[str, ...]
"TraceConfig",
"TraceConnectionCreateEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionReuseconnParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceDnsResolveHostEndParams",
"TraceDnsResolveHostStartParams",
"TraceRequestChunkSentParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceRequestRedirectParams",
"TraceRequestStartParams",
"TraceResponseChunkReceivedParams",
)
try:
from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa
__all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
except ImportError: # pragma: no cover
pass

View file

@ -1,140 +1,190 @@
from libc.stdint cimport uint16_t, uint32_t, uint64_t
from libc.stdint cimport (
int8_t,
int16_t,
int32_t,
int64_t,
uint8_t,
uint16_t,
uint32_t,
uint64_t,
)
cdef extern from "../vendor/http-parser/http_parser.h":
ctypedef int (*http_data_cb) (http_parser*,
const char *at,
size_t length) except -1
cdef extern from "../vendor/llhttp/build/llhttp.h":
ctypedef int (*http_cb) (http_parser*) except -1
struct http_parser:
unsigned int type
unsigned int flags
unsigned int state
unsigned int header_state
unsigned int index
uint32_t nread
struct llhttp__internal_s:
int32_t _index
void* _span_pos0
void* _span_cb0
int32_t error
const char* reason
const char* error_pos
void* data
void* _current
uint64_t content_length
uint8_t type
uint8_t method
uint8_t http_major
uint8_t http_minor
uint8_t header_state
uint8_t lenient_flags
uint8_t upgrade
uint8_t finish
uint16_t flags
uint16_t status_code
void* settings
unsigned short http_major
unsigned short http_minor
unsigned int status_code
unsigned int method
unsigned int http_errno
ctypedef llhttp__internal_s llhttp__internal_t
ctypedef llhttp__internal_t llhttp_t
unsigned int upgrade
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
void *data
struct llhttp_settings_s:
llhttp_cb on_message_begin
llhttp_data_cb on_url
llhttp_data_cb on_status
llhttp_data_cb on_header_field
llhttp_data_cb on_header_value
llhttp_cb on_headers_complete
llhttp_data_cb on_body
llhttp_cb on_message_complete
llhttp_cb on_chunk_header
llhttp_cb on_chunk_complete
struct http_parser_settings:
http_cb on_message_begin
http_data_cb on_url
http_data_cb on_status
http_data_cb on_header_field
http_data_cb on_header_value
http_cb on_headers_complete
http_data_cb on_body
http_cb on_message_complete
http_cb on_chunk_header
http_cb on_chunk_complete
llhttp_cb on_url_complete
llhttp_cb on_status_complete
llhttp_cb on_header_field_complete
llhttp_cb on_header_value_complete
enum http_parser_type:
ctypedef llhttp_settings_s llhttp_settings_t
enum llhttp_errno:
HPE_OK,
HPE_INTERNAL,
HPE_STRICT,
HPE_LF_EXPECTED,
HPE_UNEXPECTED_CONTENT_LENGTH,
HPE_CLOSED_CONNECTION,
HPE_INVALID_METHOD,
HPE_INVALID_URL,
HPE_INVALID_CONSTANT,
HPE_INVALID_VERSION,
HPE_INVALID_HEADER_TOKEN,
HPE_INVALID_CONTENT_LENGTH,
HPE_INVALID_CHUNK_SIZE,
HPE_INVALID_STATUS,
HPE_INVALID_EOF_STATE,
HPE_INVALID_TRANSFER_ENCODING,
HPE_CB_MESSAGE_BEGIN,
HPE_CB_HEADERS_COMPLETE,
HPE_CB_MESSAGE_COMPLETE,
HPE_CB_CHUNK_HEADER,
HPE_CB_CHUNK_COMPLETE,
HPE_PAUSED,
HPE_PAUSED_UPGRADE,
HPE_USER
ctypedef llhttp_errno llhttp_errno_t
enum llhttp_flags:
F_CONNECTION_KEEP_ALIVE,
F_CONNECTION_CLOSE,
F_CONNECTION_UPGRADE,
F_CHUNKED,
F_UPGRADE,
F_CONTENT_LENGTH,
F_SKIPBODY,
F_TRAILING,
F_TRANSFER_ENCODING
enum llhttp_lenient_flags:
LENIENT_HEADERS,
LENIENT_CHUNKED_LENGTH
enum llhttp_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
enum http_errno:
HPE_OK,
HPE_CB_message_begin,
HPE_CB_url,
HPE_CB_header_field,
HPE_CB_header_value,
HPE_CB_headers_complete,
HPE_CB_body,
HPE_CB_message_complete,
HPE_CB_status,
HPE_CB_chunk_header,
HPE_CB_chunk_complete,
HPE_INVALID_EOF_STATE,
HPE_HEADER_OVERFLOW,
HPE_CLOSED_CONNECTION,
HPE_INVALID_VERSION,
HPE_INVALID_STATUS,
HPE_INVALID_METHOD,
HPE_INVALID_URL,
HPE_INVALID_HOST,
HPE_INVALID_PORT,
HPE_INVALID_PATH,
HPE_INVALID_QUERY_STRING,
HPE_INVALID_FRAGMENT,
HPE_LF_EXPECTED,
HPE_INVALID_HEADER_TOKEN,
HPE_INVALID_CONTENT_LENGTH,
HPE_INVALID_CHUNK_SIZE,
HPE_INVALID_CONSTANT,
HPE_INVALID_INTERNAL_STATE,
HPE_STRICT,
HPE_PAUSED,
HPE_UNKNOWN
enum llhttp_finish_t:
HTTP_FINISH_SAFE,
HTTP_FINISH_SAFE_WITH_CB,
HTTP_FINISH_UNSAFE
enum flags:
F_CHUNKED,
F_CONNECTION_KEEP_ALIVE,
F_CONNECTION_CLOSE,
F_CONNECTION_UPGRADE,
F_TRAILING,
F_UPGRADE,
F_SKIPBODY,
F_CONTENTLENGTH
enum llhttp_method:
HTTP_DELETE,
HTTP_GET,
HTTP_HEAD,
HTTP_POST,
HTTP_PUT,
HTTP_CONNECT,
HTTP_OPTIONS,
HTTP_TRACE,
HTTP_COPY,
HTTP_LOCK,
HTTP_MKCOL,
HTTP_MOVE,
HTTP_PROPFIND,
HTTP_PROPPATCH,
HTTP_SEARCH,
HTTP_UNLOCK,
HTTP_BIND,
HTTP_REBIND,
HTTP_UNBIND,
HTTP_ACL,
HTTP_REPORT,
HTTP_MKACTIVITY,
HTTP_CHECKOUT,
HTTP_MERGE,
HTTP_MSEARCH,
HTTP_NOTIFY,
HTTP_SUBSCRIBE,
HTTP_UNSUBSCRIBE,
HTTP_PATCH,
HTTP_PURGE,
HTTP_MKCALENDAR,
HTTP_LINK,
HTTP_UNLINK,
HTTP_SOURCE,
HTTP_PRI,
HTTP_DESCRIBE,
HTTP_ANNOUNCE,
HTTP_SETUP,
HTTP_PLAY,
HTTP_PAUSE,
HTTP_TEARDOWN,
HTTP_GET_PARAMETER,
HTTP_SET_PARAMETER,
HTTP_REDIRECT,
HTTP_RECORD,
HTTP_FLUSH
enum http_method:
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
LINK, UNLINK
ctypedef llhttp_method llhttp_method_t;
void http_parser_init(http_parser *parser, http_parser_type type)
void llhttp_settings_init(llhttp_settings_t* settings)
void llhttp_init(llhttp_t* parser, llhttp_type type,
const llhttp_settings_t* settings)
size_t http_parser_execute(http_parser *parser,
const http_parser_settings *settings,
const char *data,
size_t len)
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
llhttp_errno_t llhttp_finish(llhttp_t* parser)
int http_should_keep_alive(const http_parser *parser)
int llhttp_message_needs_eof(const llhttp_t* parser)
void http_parser_settings_init(http_parser_settings *settings)
int llhttp_should_keep_alive(const llhttp_t* parser)
const char *http_errno_name(http_errno err)
const char *http_errno_description(http_errno err)
const char *http_method_str(http_method m)
void llhttp_pause(llhttp_t* parser)
void llhttp_resume(llhttp_t* parser)
# URL Parser
void llhttp_resume_after_upgrade(llhttp_t* parser)
enum http_parser_url_fields:
UF_SCHEMA = 0,
UF_HOST = 1,
UF_PORT = 2,
UF_PATH = 3,
UF_QUERY = 4,
UF_FRAGMENT = 5,
UF_USERINFO = 6,
UF_MAX = 7
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
const char* llhttp_get_error_reason(const llhttp_t* parser)
void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
const char* llhttp_get_error_pos(const llhttp_t* parser)
const char* llhttp_errno_name(llhttp_errno_t err)
struct http_parser_url_field_data:
uint16_t off
uint16_t len
const char* llhttp_method_name(llhttp_method_t method)
struct http_parser_url:
uint16_t field_set
uint16_t port
http_parser_url_field_data[<int>UF_MAX] field_data
void http_parser_url_init(http_parser_url *u)
int http_parser_parse_url(const char *buf,
size_t buflen,
int is_connect,
http_parser_url *u)
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)

View file

@ -24,7 +24,7 @@ find_header(const char *str, int size)
int count = -1;
pchar--;
INITIAL:
NEXT_CHAR();
switch (ch) {
case 'A':
@ -8591,16 +8591,6 @@ TRANSFER_ENCODIN:
U:
NEXT_CHAR();
switch (ch) {
case 'P':
if (last) {
return -1;
}
goto UP;
case 'p':
if (last) {
return -1;
}
goto UP;
case 'R':
if (last) {
return -1;
@ -8611,6 +8601,16 @@ U:
return -1;
}
goto UR;
case 'P':
if (last) {
return -1;
}
goto UP;
case 'p':
if (last) {
return -1;
}
goto UP;
case 'S':
if (last) {
return -1;
@ -8625,6 +8625,23 @@ U:
return -1;
}
UR:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return 66;
}
goto URI;
case 'i':
if (last) {
return 66;
}
goto URI;
default:
return -1;
}
UP:
NEXT_CHAR();
switch (ch) {
@ -8698,35 +8715,18 @@ UPGRAD:
switch (ch) {
case 'E':
if (last) {
return 66;
return 67;
}
goto UPGRADE;
case 'e':
if (last) {
return 66;
return 67;
}
goto UPGRADE;
default:
return -1;
}
UR:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return 67;
}
goto URI;
case 'i':
if (last) {
return 67;
}
goto URI;
default:
return -1;
}
US:
NEXT_CHAR();
switch (ch) {
@ -8939,26 +8939,6 @@ VI:
W:
NEXT_CHAR();
switch (ch) {
case 'A':
if (last) {
return -1;
}
goto WA;
case 'a':
if (last) {
return -1;
}
goto WA;
case 'E':
if (last) {
return -1;
}
goto WE;
case 'e':
if (last) {
return -1;
}
goto WE;
case 'W':
if (last) {
return -1;
@ -8969,351 +8949,16 @@ W:
return -1;
}
goto WW;
default:
return -1;
}
WA:
NEXT_CHAR();
switch (ch) {
case 'N':
case 'A':
if (last) {
return -1;
}
goto WAN;
case 'n':
goto WA;
case 'a':
if (last) {
return -1;
}
goto WAN;
case 'R':
if (last) {
return -1;
}
goto WAR;
case 'r':
if (last) {
return -1;
}
goto WAR;
default:
return -1;
}
WAN:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return -1;
}
goto WANT;
case 't':
if (last) {
return -1;
}
goto WANT;
default:
return -1;
}
WANT:
NEXT_CHAR();
switch (ch) {
case '-':
if (last) {
return -1;
}
goto WANT_;
default:
return -1;
}
WANT_:
NEXT_CHAR();
switch (ch) {
case 'D':
if (last) {
return -1;
}
goto WANT_D;
case 'd':
if (last) {
return -1;
}
goto WANT_D;
default:
return -1;
}
WANT_D:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WANT_DI;
case 'i':
if (last) {
return -1;
}
goto WANT_DI;
default:
return -1;
}
WANT_DI:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return -1;
}
goto WANT_DIG;
case 'g':
if (last) {
return -1;
}
goto WANT_DIG;
default:
return -1;
}
WANT_DIG:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WANT_DIGE;
case 'e':
if (last) {
return -1;
}
goto WANT_DIGE;
default:
return -1;
}
WANT_DIGE:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WANT_DIGES;
case 's':
if (last) {
return -1;
}
goto WANT_DIGES;
default:
return -1;
}
WANT_DIGES:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 71;
}
goto WANT_DIGEST;
case 't':
if (last) {
return 71;
}
goto WANT_DIGEST;
default:
return -1;
}
WAR:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARN;
case 'n':
if (last) {
return -1;
}
goto WARN;
default:
return -1;
}
WARN:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WARNI;
case 'i':
if (last) {
return -1;
}
goto WARNI;
default:
return -1;
}
WARNI:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARNIN;
case 'n':
if (last) {
return -1;
}
goto WARNIN;
default:
return -1;
}
WARNIN:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return 72;
}
goto WARNING;
case 'g':
if (last) {
return 72;
}
goto WARNING;
default:
return -1;
}
WE:
NEXT_CHAR();
switch (ch) {
case 'B':
if (last) {
return -1;
}
goto WEB;
case 'b':
if (last) {
return -1;
}
goto WEB;
default:
return -1;
}
WEB:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WEBS;
case 's':
if (last) {
return -1;
}
goto WEBS;
default:
return -1;
}
WEBS:
NEXT_CHAR();
switch (ch) {
case 'O':
if (last) {
return -1;
}
goto WEBSO;
case 'o':
if (last) {
return -1;
}
goto WEBSO;
default:
return -1;
}
WEBSO:
NEXT_CHAR();
switch (ch) {
case 'C':
if (last) {
return -1;
}
goto WEBSOC;
case 'c':
if (last) {
return -1;
}
goto WEBSOC;
default:
return -1;
}
WEBSOC:
NEXT_CHAR();
switch (ch) {
case 'K':
if (last) {
return -1;
}
goto WEBSOCK;
case 'k':
if (last) {
return -1;
}
goto WEBSOCK;
default:
return -1;
}
WEBSOCK:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WEBSOCKE;
case 'e':
if (last) {
return -1;
}
goto WEBSOCKE;
default:
return -1;
}
WEBSOCKE:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 73;
}
goto WEBSOCKET;
case 't':
if (last) {
return 73;
}
goto WEBSOCKET;
goto WA;
default:
return -1;
}
@ -9539,18 +9184,244 @@ WWW_AUTHENTICAT:
switch (ch) {
case 'E':
if (last) {
return 74;
return 71;
}
goto WWW_AUTHENTICATE;
case 'e':
if (last) {
return 74;
return 71;
}
goto WWW_AUTHENTICATE;
default:
return -1;
}
WA:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WAN;
case 'n':
if (last) {
return -1;
}
goto WAN;
case 'R':
if (last) {
return -1;
}
goto WAR;
case 'r':
if (last) {
return -1;
}
goto WAR;
default:
return -1;
}
WAN:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return -1;
}
goto WANT;
case 't':
if (last) {
return -1;
}
goto WANT;
default:
return -1;
}
WANT:
NEXT_CHAR();
switch (ch) {
case '-':
if (last) {
return -1;
}
goto WANT_;
default:
return -1;
}
WANT_:
NEXT_CHAR();
switch (ch) {
case 'D':
if (last) {
return -1;
}
goto WANT_D;
case 'd':
if (last) {
return -1;
}
goto WANT_D;
default:
return -1;
}
WANT_D:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WANT_DI;
case 'i':
if (last) {
return -1;
}
goto WANT_DI;
default:
return -1;
}
WANT_DI:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return -1;
}
goto WANT_DIG;
case 'g':
if (last) {
return -1;
}
goto WANT_DIG;
default:
return -1;
}
WANT_DIG:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WANT_DIGE;
case 'e':
if (last) {
return -1;
}
goto WANT_DIGE;
default:
return -1;
}
WANT_DIGE:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WANT_DIGES;
case 's':
if (last) {
return -1;
}
goto WANT_DIGES;
default:
return -1;
}
WANT_DIGES:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 72;
}
goto WANT_DIGEST;
case 't':
if (last) {
return 72;
}
goto WANT_DIGEST;
default:
return -1;
}
WAR:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARN;
case 'n':
if (last) {
return -1;
}
goto WARN;
default:
return -1;
}
WARN:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WARNI;
case 'i':
if (last) {
return -1;
}
goto WARNI;
default:
return -1;
}
WARNI:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARNIN;
case 'n':
if (last) {
return -1;
}
goto WARNIN;
default:
return -1;
}
WARNIN:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return 73;
}
goto WARNING;
case 'g':
if (last) {
return 73;
}
goto WARNING;
default:
return -1;
}
X:
NEXT_CHAR();
switch (ch) {
@ -9787,12 +9658,12 @@ X_FORWARDED_FO:
switch (ch) {
case 'R':
if (last) {
return 75;
return 74;
}
goto X_FORWARDED_FOR;
case 'r':
if (last) {
return 75;
return 74;
}
goto X_FORWARDED_FOR;
default:
@ -9838,12 +9709,12 @@ X_FORWARDED_HOS:
switch (ch) {
case 'T':
if (last) {
return 76;
return 75;
}
goto X_FORWARDED_HOST;
case 't':
if (last) {
return 76;
return 75;
}
goto X_FORWARDED_HOST;
default:
@ -9906,12 +9777,12 @@ X_FORWARDED_PROT:
switch (ch) {
case 'O':
if (last) {
return 77;
return 76;
}
goto X_FORWARDED_PROTO;
case 'o':
if (last) {
return 77;
return 76;
}
goto X_FORWARDED_PROTO;
default:
@ -9989,7 +9860,6 @@ VARY:
VIA:
WANT_DIGEST:
WARNING:
WEBSOCKET:
WWW_AUTHENTICATE:
X_FORWARDED_FOR:
X_FORWARDED_HOST:

File diff suppressed because it is too large Load diff

View file

@ -1,108 +0,0 @@
from collections.abc import MutableSequence
cdef class FrozenList:
cdef readonly bint frozen
cdef list _items
def __init__(self, items=None):
self.frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
cdef object _check_frozen(self):
if self.frozen:
raise RuntimeError("Cannot modify frozen list.")
cdef inline object _fast_len(self):
return len(self._items)
def freeze(self):
self.frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
self._check_frozen()
self._items[index] = value
def __delitem__(self, index):
self._check_frozen()
del self._items[index]
def __len__(self):
return self._fast_len()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __richcmp__(self, other, op):
if op == 0: # <
return list(self) < other
if op == 1: # <=
return list(self) <= other
if op == 2: # ==
return list(self) == other
if op == 3: # !=
return list(self) != other
if op == 4: # >
return list(self) > other
if op == 5: # =>
return list(self) >= other
def insert(self, pos, item):
self._check_frozen()
self._items.insert(pos, item)
def __contains__(self, item):
return item in self._items
def __iadd__(self, items):
self._check_frozen()
self._items += list(items)
return self
def index(self, item):
return self._items.index(item)
def remove(self, item):
self._check_frozen()
self._items.remove(item)
def clear(self):
self._check_frozen()
self._items.clear()
def extend(self, items):
self._check_frozen()
self._items += list(items)
def reverse(self):
self._check_frozen()
self._items.reverse()
def pop(self, index=-1):
self._check_frozen()
return self._items.pop(index)
def append(self, item):
self._check_frozen()
return self._items.append(item)
def count(self, item):
return self._items.count(item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
self._items)
MutableSequence.register(FrozenList)

View file

@ -69,15 +69,14 @@ cdef tuple headers = (
hdrs.TE,
hdrs.TRAILER,
hdrs.TRANSFER_ENCODING,
hdrs.UPGRADE,
hdrs.URI,
hdrs.UPGRADE,
hdrs.USER_AGENT,
hdrs.VARY,
hdrs.VIA,
hdrs.WWW_AUTHENTICATE,
hdrs.WANT_DIGEST,
hdrs.WARNING,
hdrs.WEBSOCKET,
hdrs.WWW_AUTHENTICATE,
hdrs.X_FORWARDED_FOR,
hdrs.X_FORWARDED_HOST,
hdrs.X_FORWARDED_PROTO,

View file

@ -1,14 +1,16 @@
/* Generated by Cython 0.29.13 */
/* Generated by Cython 0.29.24 */
#ifndef PY_SSIZE_T_CLEAN
#define PY_SSIZE_T_CLEAN
#endif /* PY_SSIZE_T_CLEAN */
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_13"
#define CYTHON_HEX_VERSION 0x001D0DF0
#define CYTHON_ABI "0_29_24"
#define CYTHON_HEX_VERSION 0x001D18F0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
@ -426,8 +428,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#if defined(PyUnicode_IS_READY)
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#else
#define __Pyx_PyUnicode_READY(op) (0)
#endif
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
@ -435,7 +441,15 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#endif
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
@ -484,8 +498,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
@ -496,6 +512,13 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
@ -535,7 +558,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
@ -576,11 +599,10 @@ static CYTHON_INLINE float __PYX_NAN() {
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
@ -801,7 +823,7 @@ static const char *__pyx_filename;
static const char *__pyx_f[] = {
"aiohttp/_helpers.pyx",
"aiohttp\\_helpers.pyx",
"stringsource",
};
@ -1130,6 +1152,9 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam
#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
#endif
/* PyObjectGetAttrStrNoError.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
@ -1159,12 +1184,17 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* GCCDiagnostics.proto */
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
/* CIntFromPy.proto */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* CIntFromPy.proto */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
@ -1288,6 +1318,9 @@ static PyObject *__pyx_codeobj__3;
static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_wrapped = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
@ -1338,6 +1371,9 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__init__", 0);
/* "aiohttp/_helpers.pyx":14
@ -1413,6 +1449,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __py
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__get__", 0);
/* "aiohttp/_helpers.pyx":19
@ -1488,6 +1527,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7a
PyObject *__pyx_t_13 = NULL;
int __pyx_t_14;
int __pyx_t_15;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__get__", 0);
/* "aiohttp/_helpers.pyx":22
@ -1806,6 +1848,9 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __py
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__set__", 0);
/* "aiohttp/_helpers.pyx":35
@ -1866,6 +1911,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":5
@ -2104,6 +2152,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":17
@ -2149,6 +2200,9 @@ static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__p
PyObject *__pyx_v___pyx_type = 0;
long __pyx_v___pyx_checksum;
PyObject *__pyx_v___pyx_state = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__pyx_unpickle_reify (wrapper)", 0);
@ -2226,6 +2280,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_reify", 0);
/* "(tree fragment)":4
@ -2414,6 +2471,9 @@ static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(stru
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_reify__set_state", 0);
/* "(tree fragment)":12
@ -2626,7 +2686,12 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
sizeof(struct __pyx_obj_7aiohttp_8_helpers_reify), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_7aiohttp_8_helpers_reify, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
@ -2679,6 +2744,9 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static PyMethodDef __pyx_methods[] = {
@ -2840,6 +2908,9 @@ static int __Pyx_modinit_function_export_code(void) {
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
@ -2884,17 +2955,19 @@ static int __Pyx_modinit_function_import_code(void) {
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
@ -2976,6 +3049,9 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__helpers(PyObject *__pyx_pyinit_mo
#endif
{
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
@ -3023,11 +3099,9 @@ if (!__Pyx_RefNanny) {
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
#ifdef WITH_THREAD /* Python build with threading support? */
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
PyEval_InitThreads();
#endif
#endif
/*--- Module creation code ---*/
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_m = __pyx_pyinit_module;
@ -3064,14 +3138,14 @@ if (!__Pyx_RefNanny) {
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_type_import_code();
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
@ -3206,7 +3280,7 @@ static int __Pyx_ParseOptionalKeywords(
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
@ -3233,7 +3307,7 @@ static int __Pyx_ParseOptionalKeywords(
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -3249,7 +3323,7 @@ static int __Pyx_ParseOptionalKeywords(
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -3729,7 +3803,7 @@ done:
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = func->ob_type->tp_call;
ternaryfunc call = Py_TYPE(func)->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
@ -3816,7 +3890,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
return __Pyx_PyObject_CallMethO(func, arg);
#if CYTHON_FAST_PYCCALL
} else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
} else if (__Pyx_PyFastCFunction_Check(func)) {
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
#endif
}
@ -4136,7 +4210,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if (strchr(__Pyx_MODULE_NAME, '.')) {
if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
@ -4253,6 +4327,28 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam
}
#endif
/* PyObjectGetAttrStrNoError */
static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
__Pyx_PyErr_Clear();
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
PyObject *result;
#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) {
return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1);
}
#endif
result = __Pyx_PyObject_GetAttrStr(obj, attr_name);
if (unlikely(!result)) {
__Pyx_PyObject_GetAttrStr_ClearAttributeError();
}
return result;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
@ -4280,43 +4376,51 @@ static int __Pyx_setup_reduce(PyObject* type_obj) {
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD;
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD;
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD;
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD;
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD;
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython);
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (reduce == object_reduce || PyErr_Occurred()) {
goto __PYX_BAD;
}
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD;
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython);
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (!setstate || PyErr_Occurred()) {
goto __PYX_BAD;
}
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto GOOD;
BAD:
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
GOOD:
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
@ -4331,7 +4435,7 @@ GOOD:
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
@ -4435,7 +4539,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
@ -4558,40 +4662,16 @@ bad:
return (target_type) value;\
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
@ -4778,9 +4858,54 @@ raise_neg_overflow:
return (long) -1;
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const int neg_one = (int) -1, const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {

View file

@ -2,7 +2,5 @@ from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...

File diff suppressed because it is too large Load diff

View file

@ -3,27 +3,44 @@
# Based on https://github.com/MagicStack/httptools
#
from __future__ import absolute_import, print_function
from cpython.mem cimport PyMem_Malloc, PyMem_Free
from libc.string cimport memcpy
from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE,
Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize)
from multidict import (CIMultiDict as _CIMultiDict,
CIMultiDictProxy as _CIMultiDictProxy)
from cpython cimport (
Py_buffer,
PyBUF_SIMPLE,
PyBuffer_Release,
PyBytes_AsString,
PyBytes_AsStringAndSize,
PyObject_GetBuffer,
)
from cpython.mem cimport PyMem_Free, PyMem_Malloc
from libc.limits cimport ULLONG_MAX
from libc.string cimport memcpy
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
from yarl import URL as _URL
from aiohttp import hdrs
from .http_exceptions import (
BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError,
PayloadEncodingError, ContentLengthError, TransferEncodingError)
from .http_writer import (HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11)
BadHttpMessage,
BadStatusLine,
ContentLengthError,
InvalidHeader,
InvalidURLError,
LineTooLong,
PayloadEncodingError,
TransferEncodingError,
)
from .http_parser import DeflateBuffer as _DeflateBuffer
from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD,
StreamReader as _StreamReader)
from .http_writer import (
HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11,
)
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
cimport cython
from aiohttp cimport _cparser as cparser
include "_headers.pxi"
@ -63,13 +80,13 @@ cdef inline object extend(object buf, const char* at, size_t length):
memcpy(ptr + s, at, length)
DEF METHODS_COUNT = 34;
DEF METHODS_COUNT = 46;
cdef list _http_method = []
for i in range(METHODS_COUNT):
_http_method.append(
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
cdef inline str http_method_str(int i):
@ -255,8 +272,8 @@ cdef _new_response_message(object version,
cdef class HttpParser:
cdef:
cparser.http_parser* _cparser
cparser.http_parser_settings* _csettings
cparser.llhttp_t* _cparser
cparser.llhttp_settings_t* _csettings
bytearray _raw_name
bytearray _raw_value
@ -270,6 +287,7 @@ cdef class HttpParser:
size_t _max_field_size
size_t _max_headers
bint _response_with_body
bint _read_until_eof
bint _started
object _url
@ -285,19 +303,20 @@ cdef class HttpParser:
object _payload_exception
object _last_error
bint _auto_decompress
int _limit
str _content_encoding
Py_buffer py_buf
def __cinit__(self):
self._cparser = <cparser.http_parser*> \
PyMem_Malloc(sizeof(cparser.http_parser))
self._cparser = <cparser.llhttp_t*> \
PyMem_Malloc(sizeof(cparser.llhttp_t))
if self._cparser is NULL:
raise MemoryError()
self._csettings = <cparser.http_parser_settings*> \
PyMem_Malloc(sizeof(cparser.http_parser_settings))
self._csettings = <cparser.llhttp_settings_t*> \
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
if self._csettings is NULL:
raise MemoryError()
@ -305,17 +324,20 @@ cdef class HttpParser:
PyMem_Free(self._cparser)
PyMem_Free(self._csettings)
cdef _init(self, cparser.http_parser_type mode,
object protocol, object loop, object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint auto_decompress=True):
cparser.http_parser_init(self._cparser, mode)
cdef _init(
self, cparser.llhttp_type mode,
object protocol, object loop, int limit,
object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True,
):
cparser.llhttp_settings_init(self._csettings)
cparser.llhttp_init(self._cparser, mode, self._csettings)
self._cparser.data = <void*>self
self._cparser.content_length = 0
cparser.http_parser_settings_init(self._csettings)
self._protocol = protocol
self._loop = loop
self._timer = timer
@ -334,6 +356,7 @@ cdef class HttpParser:
self._max_headers = max_headers
self._max_field_size = max_field_size
self._response_with_body = response_with_body
self._read_until_eof = read_until_eof
self._upgraded = False
self._auto_decompress = auto_decompress
self._content_encoding = None
@ -350,6 +373,7 @@ cdef class HttpParser:
self._csettings.on_chunk_complete = cb_on_chunk_complete
self._last_error = None
self._limit = limit
cdef _process_header(self):
if self._raw_name:
@ -394,7 +418,7 @@ cdef class HttpParser:
self._process_header()
method = http_method_str(self._cparser.method)
should_close = not cparser.http_should_keep_alive(self._cparser)
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
upgrade = self._cparser.upgrade
chunked = self._cparser.flags & cparser.F_CHUNKED
@ -427,10 +451,16 @@ cdef class HttpParser:
headers, raw_headers, should_close, encoding,
upgrade, chunked)
if (self._cparser.content_length > 0 or chunked or
self._cparser.method == 5): # CONNECT: 5
if (
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
self._cparser.method == 5 or # CONNECT: 5
(self._cparser.status_code >= 199 and
self._cparser.content_length == 0 and
self._read_until_eof)
):
payload = StreamReader(
self._protocol, timer=self._timer, loop=self._loop)
self._protocol, timer=self._timer, loop=self._loop,
limit=self._limit)
else:
payload = EMPTY_PAYLOAD
@ -457,7 +487,7 @@ cdef class HttpParser:
pass
cdef inline http_version(self):
cdef cparser.http_parser* parser = self._cparser
cdef cparser.llhttp_t* parser = self._cparser
if parser.http_major == 1:
if parser.http_minor == 0:
@ -476,12 +506,11 @@ cdef class HttpParser:
if self._cparser.flags & cparser.F_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
elif self._cparser.http_errno != cparser.HPE_OK:
desc = cparser.http_errno_description(
<cparser.http_errno> self._cparser.http_errno)
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
desc = cparser.llhttp_get_error_reason(self._cparser)
raise PayloadEncodingError(desc.decode('latin-1'))
else:
self._payload.feed_eof()
@ -494,31 +523,30 @@ cdef class HttpParser:
cdef:
size_t data_len
size_t nb
cdef cparser.llhttp_errno_t errno
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
data_len = <size_t>self.py_buf.len
nb = cparser.http_parser_execute(
errno = cparser.llhttp_execute(
self._cparser,
self._csettings,
<char*>self.py_buf.buf,
data_len)
if errno is cparser.HPE_PAUSED_UPGRADE:
cparser.llhttp_resume_after_upgrade(self._cparser)
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
PyBuffer_Release(&self.py_buf)
# i am not sure about cparser.HPE_INVALID_METHOD,
# seems get err for valid request
# test_client_functional.py::test_post_data_with_bytesio_file
if (self._cparser.http_errno != cparser.HPE_OK and
(self._cparser.http_errno != cparser.HPE_INVALID_METHOD or
self._cparser.method == 0)):
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
if self._payload_error == 0:
if self._last_error is not None:
ex = self._last_error
self._last_error = None
else:
ex = parser_error_from_errno(
<cparser.http_errno> self._cparser.http_errno)
ex = parser_error_from_errno(self._cparser)
self._payload = None
raise ex
@ -533,44 +561,76 @@ cdef class HttpParser:
else:
return messages, False, b''
def set_upgraded(self, val):
self._upgraded = val
cdef class HttpRequestParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False):
self._init(cparser.HTTP_REQUEST, protocol, loop, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body)
def __init__(
self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True,
):
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, read_until_eof,
auto_decompress)
cdef object _on_status_complete(self):
cdef Py_buffer py_buf
if not self._buf:
return
self._path = self._buf.decode('utf-8', 'surrogateescape')
if self._cparser.method == 5: # CONNECT
self._url = URL(self._path)
else:
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
try:
self._url = _parse_url(<char*>py_buf.buf,
py_buf.len)
finally:
PyBuffer_Release(&py_buf)
PyByteArray_Resize(self._buf, 0)
cdef int idx1, idx2
if not self._buf:
return
self._path = self._buf.decode('utf-8', 'surrogateescape')
try:
idx3 = len(self._path)
idx1 = self._path.find("?")
if idx1 == -1:
query = ""
idx2 = self._path.find("#")
if idx2 == -1:
path = self._path
fragment = ""
else:
path = self._path[0: idx2]
fragment = self._path[idx2+1:]
else:
path = self._path[0:idx1]
idx1 += 1
idx2 = self._path.find("#", idx1+1)
if idx2 == -1:
query = self._path[idx1:]
fragment = ""
else:
query = self._path[idx1: idx2]
fragment = self._path[idx2+1:]
self._url = URL.build(
path=path,
query_string=query,
fragment=fragment,
encoded=True,
)
finally:
PyByteArray_Resize(self._buf, 0)
cdef class HttpResponseParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
self._init(cparser.HTTP_RESPONSE, protocol, loop, timer,
def __init__(
self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True
):
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, auto_decompress)
payload_exception, response_with_body, read_until_eof,
auto_decompress)
cdef object _on_status_complete(self):
if self._buf:
@ -579,7 +639,7 @@ cdef class HttpResponseParser(HttpParser):
else:
self._reason = self._reason or ''
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
pyparser._started = True
@ -591,7 +651,7 @@ cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
return 0
cdef int cb_on_url(cparser.http_parser* parser,
cdef int cb_on_url(cparser.llhttp_t* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
@ -606,7 +666,7 @@ cdef int cb_on_url(cparser.http_parser* parser,
return 0
cdef int cb_on_status(cparser.http_parser* parser,
cdef int cb_on_status(cparser.llhttp_t* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef str reason
@ -622,7 +682,7 @@ cdef int cb_on_status(cparser.http_parser* parser,
return 0
cdef int cb_on_header_field(cparser.http_parser* parser,
cdef int cb_on_header_field(cparser.llhttp_t* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
@ -640,7 +700,7 @@ cdef int cb_on_header_field(cparser.http_parser* parser,
return 0
cdef int cb_on_header_value(cparser.http_parser* parser,
cdef int cb_on_header_value(cparser.llhttp_t* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
@ -657,7 +717,7 @@ cdef int cb_on_header_value(cparser.http_parser* parser,
return 0
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_status_complete()
@ -672,7 +732,7 @@ cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
return 0
cdef int cb_on_body(cparser.http_parser* parser,
cdef int cb_on_body(cparser.llhttp_t* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef bytes body = at[:length]
@ -689,7 +749,7 @@ cdef int cb_on_body(cparser.http_parser* parser,
return 0
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._started = False
@ -701,7 +761,7 @@ cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
return 0
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_header()
@ -712,7 +772,7 @@ cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
return 0
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_complete()
@ -723,19 +783,21 @@ cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
return 0
cdef parser_error_from_errno(cparser.http_errno errno):
cdef bytes desc = cparser.http_errno_description(errno)
cdef parser_error_from_errno(cparser.llhttp_t* parser):
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
if errno in (cparser.HPE_CB_message_begin,
cparser.HPE_CB_url,
cparser.HPE_CB_header_field,
cparser.HPE_CB_header_value,
cparser.HPE_CB_headers_complete,
cparser.HPE_CB_body,
cparser.HPE_CB_message_complete,
cparser.HPE_CB_status,
cparser.HPE_CB_chunk_header,
cparser.HPE_CB_chunk_complete):
if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
cparser.HPE_CB_HEADERS_COMPLETE,
cparser.HPE_CB_MESSAGE_COMPLETE,
cparser.HPE_CB_CHUNK_HEADER,
cparser.HPE_CB_CHUNK_COMPLETE,
cparser.HPE_INVALID_CONSTANT,
cparser.HPE_INVALID_HEADER_TOKEN,
cparser.HPE_INVALID_CONTENT_LENGTH,
cparser.HPE_INVALID_CHUNK_SIZE,
cparser.HPE_INVALID_EOF_STATE,
cparser.HPE_INVALID_TRANSFER_ENCODING):
cls = BadHttpMessage
elif errno == cparser.HPE_INVALID_STATUS:
@ -744,6 +806,9 @@ cdef parser_error_from_errno(cparser.http_errno errno):
elif errno == cparser.HPE_INVALID_METHOD:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_VERSION:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_URL:
cls = InvalidURLError
@ -751,96 +816,3 @@ cdef parser_error_from_errno(cparser.http_errno errno):
cls = BadHttpMessage
return cls(desc.decode('latin-1'))
def parse_url(url):
cdef:
Py_buffer py_buf
char* buf_data
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
try:
buf_data = <char*>py_buf.buf
return _parse_url(buf_data, py_buf.len)
finally:
PyBuffer_Release(&py_buf)
cdef _parse_url(char* buf_data, size_t length):
cdef:
cparser.http_parser_url* parsed
int res
str schema = None
str host = None
object port = None
str path = None
str query = None
str fragment = None
str user = None
str password = None
str userinfo = None
object result = None
int off
int ln
parsed = <cparser.http_parser_url*> \
PyMem_Malloc(sizeof(cparser.http_parser_url))
if parsed is NULL:
raise MemoryError()
cparser.http_parser_url_init(parsed)
try:
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
if res == 0:
if parsed.field_set & (1 << cparser.UF_SCHEMA):
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
schema = ''
if parsed.field_set & (1 << cparser.UF_HOST):
off = parsed.field_data[<int>cparser.UF_HOST].off
ln = parsed.field_data[<int>cparser.UF_HOST].len
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
host = ''
if parsed.field_set & (1 << cparser.UF_PORT):
port = parsed.port
if parsed.field_set & (1 << cparser.UF_PATH):
off = parsed.field_data[<int>cparser.UF_PATH].off
ln = parsed.field_data[<int>cparser.UF_PATH].len
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
path = ''
if parsed.field_set & (1 << cparser.UF_QUERY):
off = parsed.field_data[<int>cparser.UF_QUERY].off
ln = parsed.field_data[<int>cparser.UF_QUERY].len
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
query = ''
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
fragment = ''
if parsed.field_set & (1 << cparser.UF_USERINFO):
off = parsed.field_data[<int>cparser.UF_USERINFO].off
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
user, sep, password = userinfo.partition(':')
return URL_build(scheme=schema,
user=user, password=password, host=host, port=port,
path=path, query=query, fragment=fragment)
else:
raise InvalidURLError("invalid url {!r}".format(buf_data))
finally:
PyMem_Free(parsed)

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,9 @@
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
from cpython.object cimport PyObject_Str
from libc.stdint cimport uint8_t, uint64_t
from libc.string cimport memcpy
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.object cimport PyObject_Str
from multidict import istr
@ -112,6 +111,14 @@ cdef str to_str(object s):
return str(s)
cdef void _safe_header(str string) except *:
if "\r" in string or "\n" in string:
raise ValueError(
"Newline or carriage return character detected in HTTP status message or "
"header. This is a potential security issue."
)
def _serialize_headers(str status_line, headers):
cdef Writer writer
cdef object key
@ -120,6 +127,10 @@ def _serialize_headers(str status_line, headers):
_init_writer(&writer)
for key, val in headers.items():
_safe_header(to_str(key))
_safe_header(to_str(val))
try:
if _write_str(&writer, status_line) < 0:
raise

View file

@ -1,14 +1,16 @@
/* Generated by Cython 0.29.13 */
/* Generated by Cython 0.29.24 */
#ifndef PY_SSIZE_T_CLEAN
#define PY_SSIZE_T_CLEAN
#endif /* PY_SSIZE_T_CLEAN */
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_13"
#define CYTHON_HEX_VERSION 0x001D0DF0
#define CYTHON_ABI "0_29_24"
#define CYTHON_HEX_VERSION 0x001D18F0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
@ -426,8 +428,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#if defined(PyUnicode_IS_READY)
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#else
#define __Pyx_PyUnicode_READY(op) (0)
#endif
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
@ -435,7 +441,15 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#endif
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
@ -484,8 +498,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
@ -496,6 +512,13 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
@ -535,7 +558,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
@ -576,11 +599,10 @@ static CYTHON_INLINE float __PYX_NAN() {
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
@ -805,7 +827,7 @@ static const char *__pyx_filename;
static const char *__pyx_f[] = {
"aiohttp/_websocket.pyx",
"aiohttp\\_websocket.pyx",
"type.pxd",
"bool.pxd",
"complex.pxd",
@ -1043,6 +1065,11 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
/* GCCDiagnostics.proto */
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
@ -1179,7 +1206,7 @@ static const char __pyx_k_uint64_msk[] = "uint64_msk";
static const char __pyx_k_aiohttp__websocket[] = "aiohttp._websocket";
static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
static const char __pyx_k_websocket_mask_cython[] = "_websocket_mask_cython";
static const char __pyx_k_aiohttp__websocket_pyx[] = "aiohttp/_websocket.pyx";
static const char __pyx_k_aiohttp__websocket_pyx[] = "aiohttp\\_websocket.pyx";
static PyObject *__pyx_n_s_aiohttp__websocket;
static PyObject *__pyx_kp_s_aiohttp__websocket_pyx;
static PyObject *__pyx_n_s_cline_in_traceback;
@ -1201,8 +1228,8 @@ static PyObject *__pyx_tuple_;
static PyObject *__pyx_codeobj__2;
/* Late includes */
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
@ -1216,6 +1243,9 @@ static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"
static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_mask = 0;
PyObject *__pyx_v_data = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0);
@ -1242,11 +1272,11 @@ static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error)
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 11, __pyx_L3_error)
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 9, __pyx_L3_error)
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 11, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
goto __pyx_L5_argtuple_error;
@ -1259,7 +1289,7 @@ static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error)
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 11, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
@ -1292,11 +1322,14 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
Py_ssize_t __pyx_t_9;
Py_ssize_t __pyx_t_10;
Py_ssize_t __pyx_t_11;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_websocket_mask_cython", 0);
__Pyx_INCREF(__pyx_v_mask);
__Pyx_INCREF(__pyx_v_data);
/* "aiohttp/_websocket.pyx":20
/* "aiohttp/_websocket.pyx":22
* uint64_t uint64_msk
*
* assert len(mask) == 4 # <<<<<<<<<<<<<<
@ -1305,15 +1338,15 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
#ifndef CYTHON_WITHOUT_ASSERTIONS
if (unlikely(!Py_OptimizeFlag)) {
__pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 20, __pyx_L1_error)
__pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error)
if (unlikely(!((__pyx_t_1 == 4) != 0))) {
PyErr_SetNone(PyExc_AssertionError);
__PYX_ERR(0, 20, __pyx_L1_error)
__PYX_ERR(0, 22, __pyx_L1_error)
}
}
#endif
/* "aiohttp/_websocket.pyx":22
/* "aiohttp/_websocket.pyx":24
* assert len(mask) == 4
*
* if not isinstance(mask, bytes): # <<<<<<<<<<<<<<
@ -1324,19 +1357,19 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0);
if (__pyx_t_3) {
/* "aiohttp/_websocket.pyx":23
/* "aiohttp/_websocket.pyx":25
*
* if not isinstance(mask, bytes):
* mask = bytes(mask) # <<<<<<<<<<<<<<
*
* if isinstance(data, bytearray):
*/
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error)
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 25, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_mask, __pyx_t_4);
__pyx_t_4 = 0;
/* "aiohttp/_websocket.pyx":22
/* "aiohttp/_websocket.pyx":24
* assert len(mask) == 4
*
* if not isinstance(mask, bytes): # <<<<<<<<<<<<<<
@ -1345,7 +1378,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
}
/* "aiohttp/_websocket.pyx":25
/* "aiohttp/_websocket.pyx":27
* mask = bytes(mask)
*
* if isinstance(data, bytearray): # <<<<<<<<<<<<<<
@ -1356,7 +1389,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* "aiohttp/_websocket.pyx":26
/* "aiohttp/_websocket.pyx":28
*
* if isinstance(data, bytearray):
* data = <bytearray>data # <<<<<<<<<<<<<<
@ -1368,7 +1401,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4);
__pyx_t_4 = 0;
/* "aiohttp/_websocket.pyx":25
/* "aiohttp/_websocket.pyx":27
* mask = bytes(mask)
*
* if isinstance(data, bytearray): # <<<<<<<<<<<<<<
@ -1378,7 +1411,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
goto __pyx_L4;
}
/* "aiohttp/_websocket.pyx":28
/* "aiohttp/_websocket.pyx":30
* data = <bytearray>data
* else:
* data = bytearray(data) # <<<<<<<<<<<<<<
@ -1386,45 +1419,45 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
* data_len = len(data)
*/
/*else*/ {
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error)
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 30, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4);
__pyx_t_4 = 0;
}
__pyx_L4:;
/* "aiohttp/_websocket.pyx":30
/* "aiohttp/_websocket.pyx":32
* data = bytearray(data)
*
* data_len = len(data) # <<<<<<<<<<<<<<
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
*/
__pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 30, __pyx_L1_error)
__pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 32, __pyx_L1_error)
__pyx_v_data_len = __pyx_t_1;
/* "aiohttp/_websocket.pyx":31
/* "aiohttp/_websocket.pyx":33
*
* data_len = len(data)
* in_buf = <unsigned char*>PyByteArray_AsString(data) # <<<<<<<<<<<<<<
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
* uint32_msk = (<uint32_t*>mask_buf)[0]
*/
if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 31, __pyx_L1_error)
__pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 31, __pyx_L1_error)
if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 33, __pyx_L1_error)
__pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 33, __pyx_L1_error)
__pyx_v_in_buf = ((unsigned char *)__pyx_t_5);
/* "aiohttp/_websocket.pyx":32
/* "aiohttp/_websocket.pyx":34
* data_len = len(data)
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask) # <<<<<<<<<<<<<<
* uint32_msk = (<uint32_t*>mask_buf)[0]
*
*/
__pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 32, __pyx_L1_error)
__pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 34, __pyx_L1_error)
__pyx_v_mask_buf = ((unsigned char const *)__pyx_t_5);
/* "aiohttp/_websocket.pyx":33
/* "aiohttp/_websocket.pyx":35
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
* uint32_msk = (<uint32_t*>mask_buf)[0] # <<<<<<<<<<<<<<
@ -1433,7 +1466,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]);
/* "aiohttp/_websocket.pyx":38
/* "aiohttp/_websocket.pyx":40
* # does it need in python ?! malloc() always aligns to sizeof(long) bytes
*
* if sizeof(size_t) >= 8: # <<<<<<<<<<<<<<
@ -1443,7 +1476,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = (((sizeof(size_t)) >= 8) != 0);
if (__pyx_t_2) {
/* "aiohttp/_websocket.pyx":39
/* "aiohttp/_websocket.pyx":41
*
* if sizeof(size_t) >= 8:
* uint64_msk = uint32_msk # <<<<<<<<<<<<<<
@ -1452,7 +1485,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint64_msk = __pyx_v_uint32_msk;
/* "aiohttp/_websocket.pyx":40
/* "aiohttp/_websocket.pyx":42
* if sizeof(size_t) >= 8:
* uint64_msk = uint32_msk
* uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<<
@ -1461,7 +1494,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk);
/* "aiohttp/_websocket.pyx":42
/* "aiohttp/_websocket.pyx":44
* uint64_msk = (uint64_msk << 32) | uint32_msk
*
* while data_len >= 8: # <<<<<<<<<<<<<<
@ -1472,7 +1505,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = ((__pyx_v_data_len >= 8) != 0);
if (!__pyx_t_2) break;
/* "aiohttp/_websocket.pyx":43
/* "aiohttp/_websocket.pyx":45
*
* while data_len >= 8:
* (<uint64_t*>in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<<
@ -1483,7 +1516,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_7 = 0;
(__pyx_t_6[__pyx_t_7]) = ((__pyx_t_6[__pyx_t_7]) ^ __pyx_v_uint64_msk);
/* "aiohttp/_websocket.pyx":44
/* "aiohttp/_websocket.pyx":46
* while data_len >= 8:
* (<uint64_t*>in_buf)[0] ^= uint64_msk
* in_buf += 8 # <<<<<<<<<<<<<<
@ -1492,7 +1525,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_in_buf = (__pyx_v_in_buf + 8);
/* "aiohttp/_websocket.pyx":45
/* "aiohttp/_websocket.pyx":47
* (<uint64_t*>in_buf)[0] ^= uint64_msk
* in_buf += 8
* data_len -= 8 # <<<<<<<<<<<<<<
@ -1502,7 +1535,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_v_data_len = (__pyx_v_data_len - 8);
}
/* "aiohttp/_websocket.pyx":38
/* "aiohttp/_websocket.pyx":40
* # does it need in python ?! malloc() always aligns to sizeof(long) bytes
*
* if sizeof(size_t) >= 8: # <<<<<<<<<<<<<<
@ -1511,7 +1544,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
}
/* "aiohttp/_websocket.pyx":48
/* "aiohttp/_websocket.pyx":50
*
*
* while data_len >= 4: # <<<<<<<<<<<<<<
@ -1522,7 +1555,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = ((__pyx_v_data_len >= 4) != 0);
if (!__pyx_t_2) break;
/* "aiohttp/_websocket.pyx":49
/* "aiohttp/_websocket.pyx":51
*
* while data_len >= 4:
* (<uint32_t*>in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<<
@ -1533,7 +1566,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_7 = 0;
(__pyx_t_8[__pyx_t_7]) = ((__pyx_t_8[__pyx_t_7]) ^ __pyx_v_uint32_msk);
/* "aiohttp/_websocket.pyx":50
/* "aiohttp/_websocket.pyx":52
* while data_len >= 4:
* (<uint32_t*>in_buf)[0] ^= uint32_msk
* in_buf += 4 # <<<<<<<<<<<<<<
@ -1542,7 +1575,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_in_buf = (__pyx_v_in_buf + 4);
/* "aiohttp/_websocket.pyx":51
/* "aiohttp/_websocket.pyx":53
* (<uint32_t*>in_buf)[0] ^= uint32_msk
* in_buf += 4
* data_len -= 4 # <<<<<<<<<<<<<<
@ -1552,7 +1585,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_v_data_len = (__pyx_v_data_len - 4);
}
/* "aiohttp/_websocket.pyx":53
/* "aiohttp/_websocket.pyx":55
* data_len -= 4
*
* for i in range(0, data_len): # <<<<<<<<<<<<<<
@ -1563,7 +1596,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_9; __pyx_t_10+=1) {
__pyx_v_i = __pyx_t_10;
/* "aiohttp/_websocket.pyx":54
/* "aiohttp/_websocket.pyx":56
*
* for i in range(0, data_len):
* in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<<
@ -1572,8 +1605,8 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
(__pyx_v_in_buf[__pyx_t_11]) = ((__pyx_v_in_buf[__pyx_t_11]) ^ (__pyx_v_mask_buf[__pyx_v_i]));
}
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
@ -1660,7 +1693,7 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
{0, 0, 0, 0, 0, 0, 0}
};
static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
__pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 53, __pyx_L1_error)
__pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 55, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
@ -1670,17 +1703,17 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
* """
*/
__pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple_);
__Pyx_GIVEREF(__pyx_tuple_);
__pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 11, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
@ -1738,6 +1771,9 @@ static int __Pyx_modinit_type_init_code(void) {
static int __Pyx_modinit_type_import_code(void) {
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
/*--- Type import code ---*/
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error)
@ -1786,17 +1822,19 @@ static int __Pyx_modinit_function_import_code(void) {
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
@ -1878,6 +1916,9 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_
#endif
{
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
@ -1925,11 +1966,9 @@ if (!__Pyx_RefNanny) {
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
#ifdef WITH_THREAD /* Python build with threading support? */
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
PyEval_InitThreads();
#endif
#endif
/*--- Module creation code ---*/
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_m = __pyx_pyinit_module;
@ -1966,15 +2005,15 @@ if (!__Pyx_RefNanny) {
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
(void)__Pyx_modinit_type_init_code();
if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
/*--- Execution code ---*/
@ -1982,22 +2021,22 @@ if (!__Pyx_RefNanny) {
if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
* """
*/
__pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error)
if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "aiohttp/_websocket.pyx":1
* from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<<
*
* #from cpython cimport PyByteArray_AsString # cython still not exports that
*
*/
__pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
@ -2136,7 +2175,7 @@ static int __Pyx_ParseOptionalKeywords(
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
@ -2163,7 +2202,7 @@ static int __Pyx_ParseOptionalKeywords(
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -2179,7 +2218,7 @@ static int __Pyx_ParseOptionalKeywords(
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -2362,7 +2401,7 @@ done:
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = func->ob_type->tp_call;
ternaryfunc call = Py_TYPE(func)->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
@ -2420,7 +2459,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
return __Pyx_PyObject_CallMethO(func, arg);
#if CYTHON_FAST_PYCCALL
} else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
} else if (__Pyx_PyFastCFunction_Check(func)) {
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
#endif
}
@ -2551,7 +2590,7 @@ static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
@ -2655,7 +2694,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
@ -2758,7 +2797,14 @@ bad:
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
@ -2811,7 +2857,14 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
@ -3000,7 +3053,14 @@ raise_neg_overflow:
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const int neg_one = (int) -1, const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {

View file

@ -1,11 +1,13 @@
from cpython cimport PyBytes_AsString
#from cpython cimport PyByteArray_AsString # cython still not exports that
cdef extern from "Python.h":
char* PyByteArray_AsString(bytearray ba) except NULL
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
def _websocket_mask_cython(object mask, object data):
"""Note, this function mutates its `data` argument
"""

View file

@ -2,7 +2,7 @@ import asyncio
import logging
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel # noqa
from http.cookies import BaseCookie, Morsel
from typing import (
TYPE_CHECKING,
Any,
@ -16,24 +16,23 @@ from typing import (
Tuple,
)
from multidict import CIMultiDict # noqa
from multidict import CIMultiDict
from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
from .web_app import Application
from .web_exceptions import HTTPException
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
else:
BaseRequest = Request = Application = StreamResponse = None
HTTPException = None
class AbstractRouter(ABC):
def __init__(self) -> None:
self._frozen = False
@ -54,12 +53,11 @@ class AbstractRouter(ABC):
self._frozen = True
@abstractmethod
async def resolve(self, request: Request) -> 'AbstractMatchInfo':
async def resolve(self, request: Request) -> "AbstractMatchInfo":
"""Return MATCH_INFO for given request"""
class AbstractMatchInfo(ABC):
@property # pragma: no branch
@abstractmethod
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
@ -123,8 +121,7 @@ class AbstractResolver(ABC):
"""Abstract DNS resolver."""
@abstractmethod
async def resolve(self, host: str,
port: int, family: int) -> List[Dict[str, Any]]:
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
"""Return IP address for given hostname"""
@abstractmethod
@ -138,25 +135,29 @@ else:
IterableBase = Iterable
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
class AbstractCookieJar(Sized, IterableBase):
"""Abstract Cookie Jar."""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
@abstractmethod
def clear(self) -> None:
"""Clear all cookies."""
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
"""Clear all cookies if no predicate is passed."""
@abstractmethod
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def clear_domain(self, domain: str) -> None:
"""Clear all cookies for domain and all subdomains."""
@abstractmethod
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
@abstractmethod
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
"""Return the jar's cookies filtered by their attributes."""
@ -172,7 +173,7 @@ class AbstractStreamWriter(ABC):
"""Write chunk into stream."""
@abstractmethod
async def write_eof(self, chunk: bytes=b'') -> None:
async def write_eof(self, chunk: bytes = b"") -> None:
"""Write last chunk."""
@abstractmethod
@ -180,7 +181,7 @@ class AbstractStreamWriter(ABC):
"""Flush the write buffer."""
@abstractmethod
def enable_compression(self, encoding: str='deflate') -> None:
def enable_compression(self, encoding: str = "deflate") -> None:
"""Enable HTTP body compression"""
@abstractmethod
@ -188,8 +189,9 @@ class AbstractStreamWriter(ABC):
"""Enable HTTP chunked mode"""
@abstractmethod
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write HTTP headers"""
@ -201,8 +203,5 @@ class AbstractAccessLogger(ABC):
self.log_format = log_format
@abstractmethod
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
"""Emit log to logger."""

View file

@ -5,8 +5,14 @@ from .tcp_helpers import tcp_nodelay
class BaseProtocol(asyncio.Protocol):
__slots__ = ('_loop', '_paused', '_drain_waiter',
'_connection_lost', '_reading_paused', 'transport')
__slots__ = (
"_loop",
"_paused",
"_drain_waiter",
"_connection_lost",
"_reading_paused",
"transport",
)
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop # type: asyncio.AbstractEventLoop
@ -71,11 +77,11 @@ class BaseProtocol(asyncio.Protocol):
async def _drain_helper(self) -> None:
if self._connection_lost:
raise ConnectionResetError('Connection lost')
raise ConnectionResetError("Connection lost")
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self._loop.create_future()
self._drain_waiter = waiter
await waiter
if waiter is None:
waiter = self._loop.create_future()
self._drain_waiter = waiter
await asyncio.shield(waiter)

File diff suppressed because it is too large Load diff

View file

@ -4,38 +4,42 @@ import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
from .typedefs import _CIMultiDict
from .http_parser import RawResponseMessage
from .typedefs import LooseHeaders
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
ssl = SSLContext = None # type: ignore[assignment]
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
Fingerprint)
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
else:
RequestInfo = ClientResponse = ConnectionKey = None
__all__ = (
'ClientError',
'ClientConnectionError',
'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
'ClientSSLError',
'ClientConnectorSSLError', 'ClientConnectorCertificateError',
'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ClientResponseError', 'ClientHttpProxyError',
'WSServerHandshakeError', 'ContentTypeError',
'ClientPayloadError', 'InvalidURL')
"ClientError",
"ClientConnectionError",
"ClientOSError",
"ClientConnectorError",
"ClientProxyConnectionError",
"ClientSSLError",
"ClientConnectorSSLError",
"ClientConnectorCertificateError",
"ServerConnectionError",
"ServerTimeoutError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ClientResponseError",
"ClientHttpProxyError",
"WSServerHandshakeError",
"ContentTypeError",
"ClientPayloadError",
"InvalidURL",
)
class ClientError(Exception):
@ -48,21 +52,28 @@ class ClientResponseError(ClientError):
request_info: instance of RequestInfo
"""
def __init__(self, request_info: RequestInfo,
history: Tuple[ClientResponse, ...], *,
code: Optional[int]=None,
status: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(
self,
request_info: RequestInfo,
history: Tuple[ClientResponse, ...],
*,
code: Optional[int] = None,
status: Optional[int] = None,
message: str = "",
headers: Optional[LooseHeaders] = None,
) -> None:
self.request_info = request_info
if code is not None:
if status is not None:
raise ValueError(
"Both code and status arguments are provided; "
"code is deprecated, use status instead")
warnings.warn("code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
"code is deprecated, use status instead"
)
warnings.warn(
"code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
if status is not None:
self.status = status
elif code is not None:
@ -75,31 +86,38 @@ class ClientResponseError(ClientError):
self.args = (request_info, history)
def __str__(self) -> str:
return ("%s, message=%r, url=%r" %
(self.status, self.message, self.request_info.real_url))
return "{}, message={!r}, url={!r}".format(
self.status,
self.message,
self.request_info.real_url,
)
def __repr__(self) -> str:
args = "%r, %r" % (self.request_info, self.history)
args = f"{self.request_info!r}, {self.history!r}"
if self.status != 0:
args += ", status=%r" % (self.status,)
if self.message != '':
args += ", message=%r" % (self.message,)
args += f", status={self.status!r}"
if self.message != "":
args += f", message={self.message!r}"
if self.headers is not None:
args += ", headers=%r" % (self.headers,)
return "%s(%s)" % (type(self).__name__, args)
args += f", headers={self.headers!r}"
return f"{type(self).__name__}({args})"
@property
def code(self) -> int:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
return self.status
@code.setter
def code(self, value: int) -> None:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
self.status = value
@ -138,8 +156,8 @@ class ClientConnectorError(ClientOSError):
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
def __init__(self, connection_key: ConnectionKey,
os_error: OSError) -> None:
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
self._conn_key = connection_key
self._os_error = os_error
super().__init__(os_error.errno, os_error.strerror)
@ -158,13 +176,13 @@ class ClientConnectorError(ClientOSError):
return self._conn_key.port
@property
def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
return self._conn_key.ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
.format(self, self.ssl if self.ssl is not None else 'default',
self.strerror))
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
self, self.ssl if self.ssl is not None else "default", self.strerror
)
# OSError.__reduce__ does too much black magick
__reduce__ = BaseException.__reduce__
@ -178,6 +196,29 @@ class ClientProxyConnectionError(ClientConnectorError):
"""
class UnixClientConnectorError(ClientConnectorError):
"""Unix connector error.
Raised in :py:class:`aiohttp.connector.UnixConnector`
if connection to unix socket can not be established.
"""
def __init__(
self, path: str, connection_key: ConnectionKey, os_error: OSError
) -> None:
self._path = path
super().__init__(connection_key, os_error)
@property
def path(self) -> str:
return self._path
def __str__(self) -> str:
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
self, self.ssl if self.ssl is not None else "default", self.strerror
)
class ServerConnectionError(ClientConnectionError):
"""Server connection errors."""
@ -185,12 +226,12 @@ class ServerConnectionError(ClientConnectionError):
class ServerDisconnectedError(ServerConnectionError):
"""Server disconnected."""
def __init__(self, message: Optional[str]=None) -> None:
self.message = message
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
if message is None:
self.args = ()
else:
self.args = (message,)
message = "Server disconnected"
self.args = (message,)
self.message = message
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
@ -200,8 +241,7 @@ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
class ServerFingerprintMismatch(ServerConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected: bytes, got: bytes,
host: str, port: int) -> None:
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
self.expected = expected
self.got = got
self.host = host
@ -209,9 +249,9 @@ class ServerFingerprintMismatch(ServerConnectionError):
self.args = (expected, got, host, port)
def __repr__(self) -> str:
return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
self.__class__.__name__, self.expected, self.got,
self.host, self.port)
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
self.__class__.__name__, self.expected, self.got, self.host, self.port
)
class ClientPayloadError(ClientError):
@ -222,7 +262,8 @@ class InvalidURL(ClientError, ValueError):
"""Invalid URL.
URL used for fetching is malformed, e.g. it doesn't contains host
part."""
part.
"""
# Derive from ValueError for backward compatibility
@ -236,7 +277,7 @@ class InvalidURL(ClientError, ValueError):
return self.args[0]
def __repr__(self) -> str:
return '<{} {}>'.format(self.__class__.__name__, self.url)
return f"<{self.__class__.__name__} {self.url}>"
class ClientSSLError(ClientConnectorError):
@ -245,27 +286,34 @@ class ClientSSLError(ClientConnectorError):
if ssl is not None:
cert_errors = (ssl.CertificateError,)
cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
cert_errors_bases = (
ClientSSLError,
ssl.CertificateError,
)
ssl_errors = (ssl.SSLError,)
ssl_error_bases = (ClientSSLError, ssl.SSLError)
else: # pragma: no cover
cert_errors = tuple()
cert_errors_bases = (ClientSSLError, ValueError,)
cert_errors_bases = (
ClientSSLError,
ValueError,
)
ssl_errors = tuple()
ssl_error_bases = (ClientSSLError,)
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
"""Response ssl error."""
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
"""Response certificate error."""
def __init__(self, connection_key:
ConnectionKey, certificate_error: Exception) -> None:
def __init__(
self, connection_key: ConnectionKey, certificate_error: Exception
) -> None:
self._conn_key = connection_key
self._certificate_error = certificate_error
self.args = (connection_key, certificate_error)
@ -287,6 +335,8 @@ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
return self._conn_key.is_ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
'[{0.certificate_error.__class__.__name__}: '
'{0.certificate_error.args}]'.format(self))
return (
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
"[{0.certificate_error.__class__.__name__}: "
"{0.certificate_error.args}]".format(self)
)

View file

@ -14,24 +14,22 @@ from .http import HttpResponseParser, RawResponseMessage
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
class ResponseHandler(BaseProtocol,
DataQueue[Tuple[RawResponseMessage, StreamReader]]):
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self,
loop: asyncio.AbstractEventLoop) -> None:
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
BaseProtocol.__init__(self, loop=loop)
DataQueue.__init__(self, loop)
self._should_close = False
self._payload = None
self._payload: Optional[StreamReader] = None
self._skip_payload = False
self._payload_parser = None
self._timer = None
self._tail = b''
self._tail = b""
self._upgraded = False
self._parser = None # type: Optional[HttpResponseParser]
@ -44,14 +42,17 @@ class ResponseHandler(BaseProtocol,
@property
def should_close(self) -> bool:
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) > 0 or bool(self._tail))
return (
self._should_close
or self._upgraded
or self.exception() is not None
or self._payload_parser is not None
or len(self) > 0
or bool(self._tail)
)
def force_close(self) -> None:
self._should_close = True
@ -65,7 +66,7 @@ class ResponseHandler(BaseProtocol,
self._drop_timeout()
def is_connected(self) -> bool:
return self.transport is not None
return self.transport is not None and not self.transport.is_closing()
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._drop_timeout()
@ -81,8 +82,8 @@ class ResponseHandler(BaseProtocol,
except Exception:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError(
'Response payload is not completed'))
ClientPayloadError("Response payload is not completed")
)
if not self.is_eof():
if isinstance(exc, OSError):
@ -130,27 +131,37 @@ class ResponseHandler(BaseProtocol,
self._drop_timeout()
if self._tail:
data, self._tail = self._tail, b''
data, self._tail = self._tail, b""
self.data_received(data)
def set_response_params(self, *, timer: BaseTimerContext=None,
skip_payload: bool=False,
read_until_eof: bool=False,
auto_decompress: bool=True,
read_timeout: Optional[float]=None) -> None:
def set_response_params(
self,
*,
timer: Optional[BaseTimerContext] = None,
skip_payload: bool = False,
read_until_eof: bool = False,
auto_decompress: bool = True,
read_timeout: Optional[float] = None,
read_bufsize: int = 2 ** 16,
) -> None:
self._skip_payload = skip_payload
self._read_timeout = read_timeout
self._reschedule_timeout()
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
self,
self._loop,
read_bufsize,
timer=timer,
payload_exception=ClientPayloadError,
response_with_body=not skip_payload,
read_until_eof=read_until_eof,
auto_decompress=auto_decompress)
auto_decompress=auto_decompress,
)
if self._tail:
data, self._tail = self._tail, b''
data, self._tail = self._tail, b""
self.data_received(data)
def _drop_timeout(self) -> None:
@ -165,7 +176,8 @@ class ResponseHandler(BaseProtocol,
if timeout:
self._read_timeout_handle = self._loop.call_later(
timeout, self._on_read_timeout)
timeout, self._on_read_timeout
)
else:
self._read_timeout_handle = None
@ -211,7 +223,7 @@ class ResponseHandler(BaseProtocol,
self._upgraded = upgraded
payload = None
payload: Optional[StreamReader] = None
for message, payload in messages:
if message.should_close:
self._should_close = True
@ -219,7 +231,7 @@ class ResponseHandler(BaseProtocol,
self._payload = payload
if self._skip_payload or message.code in (204, 304):
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore # noqa
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
if payload is not None:

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
"""WebSocket client for asyncio."""
import asyncio
from typing import Any, Optional
from typing import Any, Optional, cast
import async_timeout
@ -12,11 +12,12 @@ from .http import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
WebSocketError,
WSCloseCode,
WSMessage,
WSMsgType,
)
from .http_websocket import WebSocketWriter # WSMessage
from .streams import EofStream, FlowControlDataQueue # noqa
from .streams import EofStream, FlowControlDataQueue
from .typedefs import (
DEFAULT_JSON_DECODER,
DEFAULT_JSON_ENCODER,
@ -26,21 +27,22 @@ from .typedefs import (
class ClientWebSocketResponse:
def __init__(self,
reader: 'FlowControlDataQueue[WSMessage]',
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float]=None,
heartbeat: Optional[float]=None,
compress: int=0,
client_notakeover: bool=False) -> None:
def __init__(
self,
reader: "FlowControlDataQueue[WSMessage]",
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float] = None,
heartbeat: Optional[float] = None,
compress: int = 0,
client_notakeover: bool = False,
) -> None:
self._response = response
self._conn = response.connection
@ -55,10 +57,10 @@ class ClientWebSocketResponse:
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb = None
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb = None
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
self._loop = loop
self._waiting = None # type: Optional[asyncio.Future[bool]]
self._exception = None # type: Optional[BaseException]
@ -81,7 +83,8 @@ class ClientWebSocketResponse:
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
@ -93,12 +96,13 @@ class ClientWebSocketResponse:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if not self._closed:
self._closed = True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
self._response.close()
@ -122,7 +126,7 @@ class ClientWebSocketResponse:
def client_notakeover(self) -> bool:
return self._client_notakeover
def get_extra_info(self, name: str, default: Any=None) -> Any:
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
@ -135,31 +139,32 @@ class ClientWebSocketResponse:
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
await self._writer.ping(message)
async def pong(self, message: bytes=b'') -> None:
async def pong(self, message: bytes = b"") -> None:
await self._writer.pong(message)
async def send_str(self, data: str,
compress: Optional[int]=None) -> None:
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes,
compress: Optional[int]=None) -> None:
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(self, data: Any,
compress: Optional[int]=None,
*, dumps: JSONEncoder=DEFAULT_JSON_ENCODER) -> None:
async def send_json(
self,
data: Any,
compress: Optional[int] = None,
*,
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def close(self, *, code: int=1000, message: bytes=b'') -> bool:
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
@ -172,11 +177,11 @@ class ClientWebSocketResponse:
try:
await self._writer.close(code, message)
except asyncio.CancelledError:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._response.close()
raise
except Exception as exc:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
self._response.close()
return True
@ -187,14 +192,14 @@ class ClientWebSocketResponse:
while True:
try:
with async_timeout.timeout(self._timeout, loop=self._loop):
async with async_timeout.timeout(self._timeout):
msg = await self._reader.read()
except asyncio.CancelledError:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._response.close()
raise
except Exception as exc:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
self._response.close()
return True
@ -206,11 +211,10 @@ class ClientWebSocketResponse:
else:
return False
async def receive(self, timeout: Optional[float]=None) -> WSMessage:
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
return WS_CLOSED_MESSAGE
@ -221,9 +225,7 @@ class ClientWebSocketResponse:
try:
self._waiting = self._loop.create_future()
try:
with async_timeout.timeout(
timeout or self._receive_timeout,
loop=self._loop):
async with async_timeout.timeout(timeout or self._receive_timeout):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
@ -231,15 +233,15 @@ class ClientWebSocketResponse:
self._waiting = None
set_result(waiter, True)
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except EofStream:
self._close_code = 1000
self._close_code = WSCloseCode.OK
await self.close()
return WSMessage(WSMsgType.CLOSED, None, None)
except ClientError:
self._closed = True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
return WS_CLOSED_MESSAGE
except WebSocketError as exc:
self._close_code = exc.code
@ -248,7 +250,7 @@ class ClientWebSocketResponse:
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
@ -267,35 +269,32 @@ class ClientWebSocketResponse:
return msg
async def receive_str(self, *, timeout: Optional[float]=None) -> str:
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not str".format(msg.type,
msg.data))
return msg.data
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
return cast(str, msg.data)
async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes:
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
return msg.data
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return cast(bytes, msg.data)
async def receive_json(self,
*, loads: JSONDecoder=DEFAULT_JSON_DECODER,
timeout: Optional[float]=None) -> Any:
async def receive_json(
self,
*,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
timeout: Optional[float] = None,
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
def __aiter__(self) -> 'ClientWebSocketResponse':
def __aiter__(self) -> "ClientWebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg

File diff suppressed because it is too large Load diff

View file

@ -1,16 +1,18 @@
import asyncio
import contextlib
import datetime
import os # noqa
import pathlib
import pickle
import re
from collections import defaultdict
from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa
from http.cookies import BaseCookie, Morsel, SimpleCookie
from typing import ( # noqa
DefaultDict,
Dict,
Iterable,
Iterator,
List,
Mapping,
Optional,
Set,
@ -21,14 +23,14 @@ from typing import ( # noqa
from yarl import URL
from .abc import AbstractCookieJar
from .abc import AbstractCookieJar, ClearCookiePredicate
from .helpers import is_ip_address, next_whole_second
from .typedefs import LooseCookies, PathLike
from .typedefs import LooseCookies, PathLike, StrOrURL
__all__ = ('CookieJar', 'DummyCookieJar')
__all__ = ("CookieJar", "DummyCookieJar")
CookieItem = Union[str, 'Morsel[str]']
CookieItem = Union[str, "Morsel[str]"]
class CookieJar(AbstractCookieJar):
@ -36,46 +38,107 @@ class CookieJar(AbstractCookieJar):
DATE_TOKENS_RE = re.compile(
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)")
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
)
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|"
"(aug)|(sep)|(oct)|(nov)|(dec)", re.I)
DATE_MONTH_RE = re.compile(
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
re.I,
)
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
MAX_TIME = datetime.datetime.max.replace(
tzinfo=datetime.timezone.utc)
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
def __init__(self, *, unsafe: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
def __init__(
self,
*,
unsafe: bool = False,
quote_cookie: bool = True,
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(loop=loop)
self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa
self._cookies = defaultdict(
SimpleCookie
) # type: DefaultDict[str, SimpleCookie[str]]
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
self._unsafe = unsafe
self._quote_cookie = quote_cookie
if treat_as_secure_origin is None:
treat_as_secure_origin = []
elif isinstance(treat_as_secure_origin, URL):
treat_as_secure_origin = [treat_as_secure_origin.origin()]
elif isinstance(treat_as_secure_origin, str):
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
else:
treat_as_secure_origin = [
URL(url).origin() if isinstance(url, str) else url.origin()
for url in treat_as_secure_origin
]
self._treat_as_secure_origin = treat_as_secure_origin
self._next_expiration = next_whole_second()
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime] # noqa: E501
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
# #4515: datetime.max may not be representable on 32-bit platforms
self._max_time = self.MAX_TIME
try:
self._max_time.timestamp()
except OverflowError:
self._max_time = self.MAX_32BIT_TIME
def save(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='wb') as f:
with file_path.open(mode="wb") as f:
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
def load(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='rb') as f:
with file_path.open(mode="rb") as f:
self._cookies = pickle.load(f)
def clear(self) -> None:
self._cookies.clear()
self._host_only_cookies.clear()
self._next_expiration = next_whole_second()
self._expirations.clear()
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
if predicate is None:
self._next_expiration = next_whole_second()
self._cookies.clear()
self._host_only_cookies.clear()
self._expirations.clear()
return
def __iter__(self) -> 'Iterator[Morsel[str]]':
to_del = []
now = datetime.datetime.now(datetime.timezone.utc)
for domain, cookie in self._cookies.items():
for name, morsel in cookie.items():
key = (domain, name)
if (
key in self._expirations and self._expirations[key] <= now
) or predicate(morsel):
to_del.append(key)
for domain, name in to_del:
key = (domain, name)
self._host_only_cookies.discard(key)
if key in self._expirations:
del self._expirations[(domain, name)]
self._cookies[domain].pop(name, None)
next_expiration = min(self._expirations.values(), default=self._max_time)
try:
self._next_expiration = next_expiration.replace(
microsecond=0
) + datetime.timedelta(seconds=1)
except OverflowError:
self._next_expiration = self._max_time
def clear_domain(self, domain: str) -> None:
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
def __iter__(self) -> "Iterator[Morsel[str]]":
self._do_expiration()
for val in self._cookies.values():
yield from val.values()
@ -84,39 +147,13 @@ class CookieJar(AbstractCookieJar):
return sum(1 for i in self)
def _do_expiration(self) -> None:
now = datetime.datetime.now(datetime.timezone.utc)
if self._next_expiration > now:
return
if not self._expirations:
return
next_expiration = self.MAX_TIME
to_del = []
cookies = self._cookies
expirations = self._expirations
for (domain, name), when in expirations.items():
if when <= now:
cookies[domain].pop(name, None)
to_del.append((domain, name))
self._host_only_cookies.discard((domain, name))
else:
next_expiration = min(next_expiration, when)
for key in to_del:
del expirations[key]
self.clear(lambda x: False)
try:
self._next_expiration = (next_expiration.replace(microsecond=0) +
datetime.timedelta(seconds=1))
except OverflowError:
self._next_expiration = self.MAX_TIME
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str
) -> None:
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
self._next_expiration = min(self._next_expiration, when)
self._expirations[(domain, name)] = when
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
hostname = response_url.raw_host
@ -125,18 +162,18 @@ class CookieJar(AbstractCookieJar):
return
if isinstance(cookies, Mapping):
cookies = cookies.items() # type: ignore
cookies = cookies.items()
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp = SimpleCookie()
tmp[name] = cookie # type: ignore
tmp = SimpleCookie() # type: SimpleCookie[str]
tmp[name] = cookie # type: ignore[assignment]
cookie = tmp[name]
domain = cookie["domain"]
# ignore domains with trailing dots
if domain.endswith('.'):
if domain.endswith("."):
domain = ""
del cookie["domain"]
@ -163,7 +200,7 @@ class CookieJar(AbstractCookieJar):
path = "/"
else:
# Cut everything from the last slash to the end
path = "/" + path[1:path.rfind("/")]
path = "/" + path[1 : path.rfind("/")]
cookie["path"] = path
max_age = cookie["max-age"]
@ -171,13 +208,12 @@ class CookieJar(AbstractCookieJar):
try:
delta_seconds = int(max_age)
try:
max_age_expiration = (
datetime.datetime.now(datetime.timezone.utc) +
datetime.timedelta(seconds=delta_seconds))
max_age_expiration = datetime.datetime.now(
datetime.timezone.utc
) + datetime.timedelta(seconds=delta_seconds)
except OverflowError:
max_age_expiration = self.MAX_TIME
self._expire_cookie(max_age_expiration,
domain, name)
max_age_expiration = self._max_time
self._expire_cookie(max_age_expiration, domain, name)
except ValueError:
cookie["max-age"] = ""
@ -186,8 +222,7 @@ class CookieJar(AbstractCookieJar):
if expires:
expire_time = self._parse_date(expires)
if expire_time:
self._expire_cookie(expire_time,
domain, name)
self._expire_cookie(expire_time, domain, name)
else:
cookie["expires"] = ""
@ -195,13 +230,24 @@ class CookieJar(AbstractCookieJar):
self._do_expiration()
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]':
def filter_cookies(
self, request_url: URL = URL()
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
"""Returns this jar's cookies filtered by their attributes."""
self._do_expiration()
request_url = URL(request_url)
filtered = SimpleCookie()
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
SimpleCookie() if self._quote_cookie else BaseCookie()
)
hostname = request_url.raw_host or ""
is_not_secure = request_url.scheme not in ("https", "wss")
request_origin = URL()
with contextlib.suppress(ValueError):
request_origin = request_url.origin()
is_not_secure = (
request_url.scheme not in ("https", "wss")
and request_origin not in self._treat_as_secure_origin
)
for cookie in self:
name = cookie.key
@ -229,7 +275,7 @@ class CookieJar(AbstractCookieJar):
# It's critical we use the Morsel so the coded_value
# (based on cookie version) is preserved
mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel()))
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
filtered[name] = mrsl_val
@ -244,7 +290,7 @@ class CookieJar(AbstractCookieJar):
if not hostname.endswith(domain):
return False
non_matching = hostname[:-len(domain)]
non_matching = hostname[: -len(domain)]
if not non_matching.endswith("."):
return False
@ -266,7 +312,7 @@ class CookieJar(AbstractCookieJar):
if cookie_path.endswith("/"):
return True
non_matching = req_path[len(cookie_path):]
non_matching = req_path[len(cookie_path) :]
return non_matching.startswith("/")
@ -294,8 +340,7 @@ class CookieJar(AbstractCookieJar):
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
hour, minute, second = [
int(s) for s in time_match.groups()]
hour, minute, second = (int(s) for s in time_match.groups())
continue
if not found_day:
@ -333,9 +378,9 @@ class CookieJar(AbstractCookieJar):
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
return datetime.datetime(year, month, day,
hour, minute, second,
tzinfo=datetime.timezone.utc)
return datetime.datetime(
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
)
class DummyCookieJar(AbstractCookieJar):
@ -345,24 +390,24 @@ class DummyCookieJar(AbstractCookieJar):
"""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
super().__init__(loop=loop)
def __iter__(self) -> 'Iterator[Morsel[str]]':
def __iter__(self) -> "Iterator[Morsel[str]]":
while False:
yield None
def __len__(self) -> int:
return 0
def clear(self) -> None:
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
pass
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def clear_domain(self, domain: str) -> None:
pass
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
pass
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
return SimpleCookie()

View file

@ -1,5 +1,5 @@
import io
from typing import Any, Iterable, List, Optional # noqa
from typing import Any, Iterable, List, Optional
from urllib.parse import urlencode
from multidict import MultiDict, MultiDictProxy
@ -8,20 +8,25 @@ from . import hdrs, multipart, payload
from .helpers import guess_filename
from .payload import Payload
__all__ = ('FormData',)
__all__ = ("FormData",)
class FormData:
"""Helper class for multipart/form-data and
application/x-www-form-urlencoded body generation."""
"""Helper class for form body generation.
def __init__(self, fields:
Iterable[Any]=(),
quote_fields: bool=True,
charset: Optional[str]=None) -> None:
self._writer = multipart.MultipartWriter('form-data')
Supports multipart/form-data and application/x-www-form-urlencoded.
"""
def __init__(
self,
fields: Iterable[Any] = (),
quote_fields: bool = True,
charset: Optional[str] = None,
) -> None:
self._writer = multipart.MultipartWriter("form-data")
self._fields = [] # type: List[Any]
self._is_multipart = False
self._is_processed = False
self._quote_fields = quote_fields
self._charset = charset
@ -35,10 +40,15 @@ class FormData:
def is_multipart(self) -> bool:
return self._is_multipart
def add_field(self, name: str, value: Any, *,
content_type: Optional[str]=None,
filename: Optional[str]=None,
content_transfer_encoding: Optional[str]=None) -> None:
def add_field(
self,
name: str,
value: Any,
*,
content_type: Optional[str] = None,
filename: Optional[str] = None,
content_transfer_encoding: Optional[str] = None,
) -> None:
if isinstance(value, io.IOBase):
self._is_multipart = True
@ -46,27 +56,31 @@ class FormData:
if filename is None and content_transfer_encoding is None:
filename = name
type_options = MultiDict({'name': name})
type_options = MultiDict({"name": name}) # type: MultiDict[str]
if filename is not None and not isinstance(filename, str):
raise TypeError('filename must be an instance of str. '
'Got: %s' % filename)
raise TypeError(
"filename must be an instance of str. " "Got: %s" % filename
)
if filename is None and isinstance(value, io.IOBase):
filename = guess_filename(value, name)
if filename is not None:
type_options['filename'] = filename
type_options["filename"] = filename
self._is_multipart = True
headers = {}
if content_type is not None:
if not isinstance(content_type, str):
raise TypeError('content_type must be an instance of str. '
'Got: %s' % content_type)
raise TypeError(
"content_type must be an instance of str. " "Got: %s" % content_type
)
headers[hdrs.CONTENT_TYPE] = content_type
self._is_multipart = True
if content_transfer_encoding is not None:
if not isinstance(content_transfer_encoding, str):
raise TypeError('content_transfer_encoding must be an instance'
' of str. Got: %s' % content_transfer_encoding)
raise TypeError(
"content_transfer_encoding must be an instance"
" of str. Got: %s" % content_transfer_encoding
)
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
self._is_multipart = True
@ -79,60 +93,67 @@ class FormData:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, 'unknown')
self.add_field(k, rec) # type: ignore
k = guess_filename(rec, "unknown")
self.add_field(k, rec) # type: ignore[arg-type]
elif isinstance(rec, (MultiDictProxy, MultiDict)):
to_add.extend(rec.items())
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
k, fp = rec
self.add_field(k, fp) # type: ignore
self.add_field(k, fp) # type: ignore[arg-type]
else:
raise TypeError('Only io.IOBase, multidict and (name, file) '
'pairs allowed, use .add_field() for passing '
'more complex parameters, got {!r}'
.format(rec))
raise TypeError(
"Only io.IOBase, multidict and (name, file) "
"pairs allowed, use .add_field() for passing "
"more complex parameters, got {!r}".format(rec)
)
def _gen_form_urlencoded(self) -> payload.BytesPayload:
# form data (x-www-form-urlencoded)
data = []
for type_options, _, value in self._fields:
data.append((type_options['name'], value))
data.append((type_options["name"], value))
charset = self._charset if self._charset is not None else 'utf-8'
charset = self._charset if self._charset is not None else "utf-8"
if charset == 'utf-8':
content_type = 'application/x-www-form-urlencoded'
if charset == "utf-8":
content_type = "application/x-www-form-urlencoded"
else:
content_type = ('application/x-www-form-urlencoded; '
'charset=%s' % charset)
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
return payload.BytesPayload(
urlencode(data, doseq=True, encoding=charset).encode(),
content_type=content_type)
content_type=content_type,
)
def _gen_form_data(self) -> multipart.MultipartWriter:
"""Encode a list of fields using the multipart/form-data MIME format"""
if self._is_processed:
raise RuntimeError("Form data has been processed already")
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
part = payload.get_payload(
value, content_type=headers[hdrs.CONTENT_TYPE],
headers=headers, encoding=self._charset)
value,
content_type=headers[hdrs.CONTENT_TYPE],
headers=headers,
encoding=self._charset,
)
else:
part = payload.get_payload(
value, headers=headers, encoding=self._charset)
value, headers=headers, encoding=self._charset
)
except Exception as exc:
raise TypeError(
'Can not serialize value type: %r\n '
'headers: %r\n value: %r' % (
type(value), headers, value)) from exc
"Can not serialize value type: %r\n "
"headers: %r\n value: %r" % (type(value), headers, value)
) from exc
if dispparams:
part.set_content_disposition(
'form-data', quote_fields=self._quote_fields, **dispparams
"form-data", quote_fields=self._quote_fields, **dispparams
)
# FIXME cgi.FieldStorage doesn't likes body parts with
# Content-Length which were sent via chunked transfer encoding
@ -141,6 +162,7 @@ class FormData:
self._writer.append_payload(part)
self._is_processed = True
return self._writer
def __call__(self) -> Payload:

View file

@ -1,72 +0,0 @@
from collections.abc import MutableSequence
from functools import total_ordering
from .helpers import NO_EXTENSIONS
@total_ordering
class FrozenList(MutableSequence):
__slots__ = ('_frozen', '_items')
def __init__(self, items=None):
self._frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
@property
def frozen(self):
return self._frozen
def freeze(self):
self._frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items[index] = value
def __delitem__(self, index):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
del self._items[index]
def __len__(self):
return self._items.__len__()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __eq__(self, other):
return list(self) == other
def __le__(self, other):
return list(self) <= other
def insert(self, pos, item):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items.insert(pos, item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self._frozen,
self._items)
PyFrozenList = FrozenList
try:
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
if not NO_EXTENSIONS:
FrozenList = CFrozenList # type: ignore
except ImportError: # pragma: no cover
pass

View file

@ -1,63 +0,0 @@
from typing import (
Generic,
Iterable,
Iterator,
List,
MutableSequence,
Optional,
TypeVar,
Union,
overload,
)
_T = TypeVar('_T')
_Arg = Union[List[_T], Iterable[_T]]
class FrozenList(MutableSequence[_T], Generic[_T]):
def __init__(self, items: Optional[_Arg[_T]]=...) -> None: ...
@property
def frozen(self) -> bool: ...
def freeze(self) -> None: ...
@overload
def __getitem__(self, i: int) -> _T: ...
@overload
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
@overload
def __setitem__(self, i: int, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
@overload
def __delitem__(self, i: int) -> None: ...
@overload
def __delitem__(self, i: slice) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __reversed__(self) -> Iterator[_T]: ...
def __eq__(self, other: object) -> bool: ...
def __le__(self, other: FrozenList[_T]) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: FrozenList[_T]) -> bool: ...
def __ge__(self, other: FrozenList[_T]) -> bool: ...
def __gt__(self, other: FrozenList[_T]) -> bool: ...
def insert(self, pos: int, item: _T) -> None: ...
def __repr__(self) -> str: ...
# types for C accelerators are the same
CFrozenList = PyFrozenList = FrozenList

View file

@ -2,99 +2,113 @@
# After changing the file content call ./tools/gen.py
# to regenerate the headers parser
import sys
from typing import Set
from multidict import istr
METH_ANY = '*'
METH_CONNECT = 'CONNECT'
METH_HEAD = 'HEAD'
METH_GET = 'GET'
METH_DELETE = 'DELETE'
METH_OPTIONS = 'OPTIONS'
METH_PATCH = 'PATCH'
METH_POST = 'POST'
METH_PUT = 'PUT'
METH_TRACE = 'TRACE'
if sys.version_info >= (3, 8):
from typing import Final
else:
from typing_extensions import Final
METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE,
METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE}
METH_ANY: Final[str] = "*"
METH_CONNECT: Final[str] = "CONNECT"
METH_HEAD: Final[str] = "HEAD"
METH_GET: Final[str] = "GET"
METH_DELETE: Final[str] = "DELETE"
METH_OPTIONS: Final[str] = "OPTIONS"
METH_PATCH: Final[str] = "PATCH"
METH_POST: Final[str] = "POST"
METH_PUT: Final[str] = "PUT"
METH_TRACE: Final[str] = "TRACE"
METH_ALL: Final[Set[str]] = {
METH_CONNECT,
METH_HEAD,
METH_GET,
METH_DELETE,
METH_OPTIONS,
METH_PATCH,
METH_POST,
METH_PUT,
METH_TRACE,
}
ACCEPT = istr('Accept')
ACCEPT_CHARSET = istr('Accept-Charset')
ACCEPT_ENCODING = istr('Accept-Encoding')
ACCEPT_LANGUAGE = istr('Accept-Language')
ACCEPT_RANGES = istr('Accept-Ranges')
ACCESS_CONTROL_MAX_AGE = istr('Access-Control-Max-Age')
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('Access-Control-Allow-Credentials')
ACCESS_CONTROL_ALLOW_HEADERS = istr('Access-Control-Allow-Headers')
ACCESS_CONTROL_ALLOW_METHODS = istr('Access-Control-Allow-Methods')
ACCESS_CONTROL_ALLOW_ORIGIN = istr('Access-Control-Allow-Origin')
ACCESS_CONTROL_EXPOSE_HEADERS = istr('Access-Control-Expose-Headers')
ACCESS_CONTROL_REQUEST_HEADERS = istr('Access-Control-Request-Headers')
ACCESS_CONTROL_REQUEST_METHOD = istr('Access-Control-Request-Method')
AGE = istr('Age')
ALLOW = istr('Allow')
AUTHORIZATION = istr('Authorization')
CACHE_CONTROL = istr('Cache-Control')
CONNECTION = istr('Connection')
CONTENT_DISPOSITION = istr('Content-Disposition')
CONTENT_ENCODING = istr('Content-Encoding')
CONTENT_LANGUAGE = istr('Content-Language')
CONTENT_LENGTH = istr('Content-Length')
CONTENT_LOCATION = istr('Content-Location')
CONTENT_MD5 = istr('Content-MD5')
CONTENT_RANGE = istr('Content-Range')
CONTENT_TRANSFER_ENCODING = istr('Content-Transfer-Encoding')
CONTENT_TYPE = istr('Content-Type')
COOKIE = istr('Cookie')
DATE = istr('Date')
DESTINATION = istr('Destination')
DIGEST = istr('Digest')
ETAG = istr('Etag')
EXPECT = istr('Expect')
EXPIRES = istr('Expires')
FORWARDED = istr('Forwarded')
FROM = istr('From')
HOST = istr('Host')
IF_MATCH = istr('If-Match')
IF_MODIFIED_SINCE = istr('If-Modified-Since')
IF_NONE_MATCH = istr('If-None-Match')
IF_RANGE = istr('If-Range')
IF_UNMODIFIED_SINCE = istr('If-Unmodified-Since')
KEEP_ALIVE = istr('Keep-Alive')
LAST_EVENT_ID = istr('Last-Event-ID')
LAST_MODIFIED = istr('Last-Modified')
LINK = istr('Link')
LOCATION = istr('Location')
MAX_FORWARDS = istr('Max-Forwards')
ORIGIN = istr('Origin')
PRAGMA = istr('Pragma')
PROXY_AUTHENTICATE = istr('Proxy-Authenticate')
PROXY_AUTHORIZATION = istr('Proxy-Authorization')
RANGE = istr('Range')
REFERER = istr('Referer')
RETRY_AFTER = istr('Retry-After')
SEC_WEBSOCKET_ACCEPT = istr('Sec-WebSocket-Accept')
SEC_WEBSOCKET_VERSION = istr('Sec-WebSocket-Version')
SEC_WEBSOCKET_PROTOCOL = istr('Sec-WebSocket-Protocol')
SEC_WEBSOCKET_EXTENSIONS = istr('Sec-WebSocket-Extensions')
SEC_WEBSOCKET_KEY = istr('Sec-WebSocket-Key')
SEC_WEBSOCKET_KEY1 = istr('Sec-WebSocket-Key1')
SERVER = istr('Server')
SET_COOKIE = istr('Set-Cookie')
TE = istr('TE')
TRAILER = istr('Trailer')
TRANSFER_ENCODING = istr('Transfer-Encoding')
UPGRADE = istr('Upgrade')
WEBSOCKET = istr('WebSocket')
URI = istr('URI')
USER_AGENT = istr('User-Agent')
VARY = istr('Vary')
VIA = istr('Via')
WANT_DIGEST = istr('Want-Digest')
WARNING = istr('Warning')
WWW_AUTHENTICATE = istr('WWW-Authenticate')
X_FORWARDED_FOR = istr('X-Forwarded-For')
X_FORWARDED_HOST = istr('X-Forwarded-Host')
X_FORWARDED_PROTO = istr('X-Forwarded-Proto')
ACCEPT: Final[istr] = istr("Accept")
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
AGE: Final[istr] = istr("Age")
ALLOW: Final[istr] = istr("Allow")
AUTHORIZATION: Final[istr] = istr("Authorization")
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
CONNECTION: Final[istr] = istr("Connection")
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
CONTENT_MD5: Final[istr] = istr("Content-MD5")
CONTENT_RANGE: Final[istr] = istr("Content-Range")
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
CONTENT_TYPE: Final[istr] = istr("Content-Type")
COOKIE: Final[istr] = istr("Cookie")
DATE: Final[istr] = istr("Date")
DESTINATION: Final[istr] = istr("Destination")
DIGEST: Final[istr] = istr("Digest")
ETAG: Final[istr] = istr("Etag")
EXPECT: Final[istr] = istr("Expect")
EXPIRES: Final[istr] = istr("Expires")
FORWARDED: Final[istr] = istr("Forwarded")
FROM: Final[istr] = istr("From")
HOST: Final[istr] = istr("Host")
IF_MATCH: Final[istr] = istr("If-Match")
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
IF_RANGE: Final[istr] = istr("If-Range")
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
LINK: Final[istr] = istr("Link")
LOCATION: Final[istr] = istr("Location")
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
ORIGIN: Final[istr] = istr("Origin")
PRAGMA: Final[istr] = istr("Pragma")
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
RANGE: Final[istr] = istr("Range")
REFERER: Final[istr] = istr("Referer")
RETRY_AFTER: Final[istr] = istr("Retry-After")
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
SERVER: Final[istr] = istr("Server")
SET_COOKIE: Final[istr] = istr("Set-Cookie")
TE: Final[istr] = istr("TE")
TRAILER: Final[istr] = istr("Trailer")
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
UPGRADE: Final[istr] = istr("Upgrade")
URI: Final[istr] = istr("URI")
USER_AGENT: Final[istr] = istr("User-Agent")
VARY: Final[istr] = istr("Vary")
VIA: Final[istr] = istr("Via")
WANT_DIGEST: Final[istr] = istr("Want-Digest")
WARNING: Final[istr] = istr("Warning")
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")

View file

@ -17,13 +17,17 @@ import warnings
import weakref
from collections import namedtuple
from contextlib import suppress
from email.utils import parsedate
from math import ceil
from pathlib import Path
from types import TracebackType
from typing import ( # noqa
from typing import (
Any,
Callable,
ContextManager,
Dict,
Generator,
Generic,
Iterable,
Iterator,
List,
@ -38,7 +42,7 @@ from typing import ( # noqa
cast,
)
from urllib.parse import quote
from urllib.request import getproxies
from urllib.request import getproxies, proxy_bypass
import async_timeout
import attr
@ -47,135 +51,140 @@ from yarl import URL
from . import hdrs
from .log import client_logger, internal_logger
from .typedefs import PathLike # noqa
from .typedefs import PathLike, Protocol # noqa
__all__ = ('BasicAuth', 'ChainMapProxy')
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
IS_MACOS = platform.system() == "Darwin"
IS_WINDOWS = platform.system() == "Windows"
PY_36 = sys.version_info >= (3, 6)
PY_37 = sys.version_info >= (3, 7)
PY_38 = sys.version_info >= (3, 8)
PY_310 = sys.version_info >= (3, 10)
if not PY_37:
if sys.version_info < (3, 7):
import idna_ssl
idna_ssl.patch_match_hostname()
try:
from typing import ContextManager
except ImportError:
from typing_extensions import ContextManager
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Set["asyncio.Task[Any]"]:
tasks = list(asyncio.Task.all_tasks(loop))
return {t for t in tasks if not t.done()}
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None
) -> Set['asyncio.Task[Any]']:
tasks = list(asyncio.Task.all_tasks(loop))
return {t for t in tasks if not t.done()}
else:
all_tasks = asyncio.all_tasks
if PY_37:
all_tasks = getattr(asyncio, 'all_tasks') # noqa
_T = TypeVar('_T')
_T = TypeVar("_T")
_S = TypeVar("_S")
sentinel = object() # type: Any
NO_EXTENSIONS = bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')) # type: bool
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
# for compatibility with older versions
DEBUG = (getattr(sys.flags, 'dev_mode', False) or
(not sys.flags.ignore_environment and
bool(os.environ.get('PYTHONASYNCIODEBUG')))) # type: bool
DEBUG = getattr(sys.flags, "dev_mode", False) or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
) # type: bool
CHAR = set(chr(i) for i in range(0, 128))
CTL = set(chr(i) for i in range(0, 32)) | {chr(127), }
SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']',
'?', '=', '{', '}', ' ', chr(9)}
CHAR = {chr(i) for i in range(0, 128)}
CTL = {chr(i) for i in range(0, 32)} | {
chr(127),
}
SEPARATORS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
" ",
chr(9),
}
TOKEN = CHAR ^ CTL ^ SEPARATORS
coroutines = asyncio.coroutines
old_debug = coroutines._DEBUG # type: ignore
# prevent "coroutine noop was never awaited" warning.
coroutines._DEBUG = False # type: ignore
class noop:
def __await__(self) -> Generator[None, None, None]:
yield
@asyncio.coroutine
def noop(*args, **kwargs): # type: ignore
return # type: ignore
async def noop2(*args: Any, **kwargs: Any) -> None:
return
coroutines._DEBUG = old_debug # type: ignore
class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])):
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
"""Http basic authentication helper."""
def __new__(cls, login: str,
password: str='',
encoding: str='latin1') -> 'BasicAuth':
def __new__(
cls, login: str, password: str = "", encoding: str = "latin1"
) -> "BasicAuth":
if login is None:
raise ValueError('None is not allowed as login value')
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError('None is not allowed as password value')
raise ValueError("None is not allowed as password value")
if ':' in login:
raise ValueError(
'A ":" is not allowed in login (RFC 1945#section-11.1)')
if ":" in login:
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth':
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(' ', 1)
auth_type, encoded_credentials = auth_header.split(" ", 1)
except ValueError:
raise ValueError('Could not parse authorization header.')
raise ValueError("Could not parse authorization header.")
if auth_type.lower() != 'basic':
raise ValueError('Unknown authorization method %s' % auth_type)
if auth_type.lower() != "basic":
raise ValueError("Unknown authorization method %s" % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode('ascii'), validate=True
encoded_credentials.encode("ascii"), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError('Invalid base64 encoding.')
raise ValueError("Invalid base64 encoding.")
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(':', 1)
username, password = decoded.split(":", 1)
except ValueError:
raise ValueError('Invalid credentials.')
raise ValueError("Invalid credentials.")
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL,
*, encoding: str='latin1') -> Optional['BasicAuth']:
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or '', encoding=encoding)
return cls(url.user, url.password or "", encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding)
return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
creds = (f"{self.login}:{self.password}").encode(self.encoding)
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
@ -187,12 +196,14 @@ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
def netrc_from_env() -> Optional[netrc.netrc]:
"""Attempt to load the netrc file from the path specified by the env-var
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get('NETRC')
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
@ -201,44 +212,50 @@ def netrc_from_env() -> Optional[netrc.netrc]:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug('Could not resolve home directory when '
'trying to look for .netrc file: %s', e)
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / (
'_netrc' if platform.system() == 'Windows' else '.netrc')
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning('Could not parse .netrc file: %s', e)
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning('Could not read .netrc file: %s', e)
client_logger.warning("Could not read .netrc file: %s", e)
return None
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ProxyInfo:
proxy = attr.ib(type=URL)
proxy_auth = attr.ib(type=Optional[BasicAuth])
proxy: URL
proxy_auth: Optional[BasicAuth]
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {k: URL(v) for k, v in getproxies().items()
if k in ('http', 'https')}
proxy_urls = {
k: URL(v)
for k, v in getproxies().items()
if k in ("http", "https", "ws", "wss")
}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme == 'https':
if proxy.scheme in ("https", "wss"):
client_logger.warning(
"HTTPS proxies %s are not supported, ignoring", proxy)
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
@ -255,42 +272,61 @@ def proxies_from_env() -> Dict[str, ProxyInfo]:
return ret
def current_task(loop: Optional[asyncio.AbstractEventLoop]=None) -> asyncio.Task: # type: ignore # noqa # Return type is intentionally Generic here
if PY_37:
return asyncio.current_task(loop=loop) # type: ignore
def current_task(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> "Optional[asyncio.Task[Any]]":
if sys.version_info >= (3, 7):
return asyncio.current_task(loop=loop)
else:
return asyncio.Task.current_task(loop=loop)
def get_running_loop(
loop: Optional[asyncio.AbstractEventLoop]=None
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> asyncio.AbstractEventLoop:
if loop is None:
loop = asyncio.get_event_loop()
if not loop.is_running():
warnings.warn("The object should be created from async function",
DeprecationWarning, stacklevel=3)
warnings.warn(
"The object should be created within an async function",
DeprecationWarning,
stacklevel=3,
)
if loop.get_debug():
internal_logger.warning(
"The object should be created from async function",
stack_info=True)
"The object should be created within an async function", stack_info=True
)
return loop
def isasyncgenfunction(obj: Any) -> bool:
func = getattr(inspect, 'isasyncgenfunction', None)
func = getattr(inspect, "isasyncgenfunction", None)
if func is not None:
return func(obj)
return func(obj) # type: ignore[no-any-return]
else:
return False
@attr.s(frozen=True, slots=True)
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth
@attr.s(auto_attribs=True, frozen=True, slots=True)
class MimeType:
type = attr.ib(type=str)
subtype = attr.ib(type=str)
suffix = attr.ib(type=str)
parameters = attr.ib(type=MultiDictProxy) # type: MultiDictProxy[str]
type: str
subtype: str
suffix: str
parameters: "MultiDictProxy[str]"
@functools.lru_cache(maxsize=56)
@ -309,83 +345,135 @@ def parse_mimetype(mimetype: str) -> MimeType:
"""
if not mimetype:
return MimeType(type='', subtype='', suffix='',
parameters=MultiDictProxy(MultiDict()))
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(';')
parts = mimetype.split(";")
params = MultiDict() # type: MultiDict[str]
for item in parts[1:]:
if not item:
continue
key, value = cast(Tuple[str, str],
item.split('=', 1) if '=' in item else (item, ''))
key, value = cast(
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
)
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == '*':
fulltype = '*/*'
if fulltype == "*":
fulltype = "*/*"
mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
if '/' in fulltype else (fulltype, ''))
stype, suffix = (cast(Tuple[str, str], stype.split('+', 1))
if '+' in stype else (stype, ''))
mtype, stype = (
cast(Tuple[str, str], fulltype.split("/", 1))
if "/" in fulltype
else (fulltype, "")
)
stype, suffix = (
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
)
return MimeType(type=mtype, subtype=stype, suffix=suffix,
parameters=MultiDictProxy(params))
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
)
def guess_filename(obj: Any, default: Optional[str]=None) -> Optional[str]:
name = getattr(obj, 'name', None)
if name and isinstance(name, str) and name[0] != '<' and name[-1] != '>':
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
return Path(name).name
return default
def content_disposition_header(disptype: str,
quote_fields: bool=True,
**params: str) -> str:
"""Sets ``Content-Disposition`` header.
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
def quoted_string(content: str) -> str:
"""Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised.
"""
if not (QCONTENT > set(content)):
raise ValueError(f"bad content for quoted-string {content!r}")
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
def content_disposition_header(
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
) -> str:
"""Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError('bad content disposition type {!r}'
''.format(disptype))
raise ValueError("bad content disposition type {!r}" "".format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError('bad content disposition parameter'
' {!r}={!r}'.format(key, val))
qval = quote(val, '') if quote_fields else val
lparams.append((key, '"%s"' % qval))
if key == 'filename':
lparams.append(('filename*', "utf-8''" + qval))
sparams = '; '.join('='.join(pair) for pair in lparams)
value = '; '.join((value, sparams))
raise ValueError(
"bad content disposition parameter" " {!r}={!r}".format(key, val)
)
if quote_fields:
if key.lower() == "filename":
qval = quote(val, "", encoding=_charset)
lparams.append((key, '"%s"' % qval))
else:
try:
qval = quoted_string(val)
except ValueError:
qval = "".join(
(_charset, "''", quote(val, "", encoding=_charset))
)
lparams.append((key + "*", qval))
else:
lparams.append((key, '"%s"' % qval))
else:
qval = val.replace("\\", "\\\\").replace('"', '\\"')
lparams.append((key, '"%s"' % qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value
class reify:
"""Use as a class method decorator. It operates almost exactly like
class _TSelf(Protocol, Generic[_T]):
_cache: Dict[str, _T]
class reify(Generic[_T]):
"""Use as a class method decorator.
It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
def __init__(self, wrapped: Callable[..., Any]) -> None:
def __init__(self, wrapped: Callable[..., _T]) -> None:
self.wrapped = wrapped
self.__doc__ = wrapped.__doc__
self.name = wrapped.__name__
def __get__(self, inst: Any, owner: Any) -> Any:
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
try:
try:
return inst._cache[self.name]
@ -398,7 +486,7 @@ class reify:
return self
raise
def __set__(self, inst: Any, value: Any) -> None:
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
raise AttributeError("reified property is read-only")
@ -406,31 +494,35 @@ reify_py = reify
try:
from ._helpers import reify as reify_c
if not NO_EXTENSIONS:
reify = reify_c # type: ignore
reify = reify_c # type: ignore[misc,assignment]
except ImportError:
pass
_ipv4_pattern = (r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')
_ipv4_pattern = (
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
)
_ipv6_pattern = (
r'^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}'
r'(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)'
r'((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})'
r'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}'
r'[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)'
r'(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}'
r':|:(:[A-F0-9]{1,4}){7})$')
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
r":|:(:[A-F0-9]{1,4}){7})$"
)
_ipv4_regex = re.compile(_ipv4_pattern)
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii'))
_ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
def _is_ip_address(
regex: Pattern[str], regexb: Pattern[bytes],
host: Optional[Union[str, bytes]]) -> bool:
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
) -> bool:
if host is None:
return False
if isinstance(host, str):
@ -438,26 +530,22 @@ def _is_ip_address(
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
raise TypeError("{} [{}] is not a str or bytes"
.format(host, type(host)))
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
def is_ip_address(
host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
return is_ipv4_address(host) or is_ipv6_address(host)
def next_whole_second() -> datetime.datetime:
"""Return current time rounded up to the next whole second."""
return (
datetime.datetime.now(
datetime.timezone.utc).replace(microsecond=0) +
datetime.timedelta(seconds=0)
)
return datetime.datetime.now(datetime.timezone.utc).replace(
microsecond=0
) + datetime.timedelta(seconds=0)
_cached_current_datetime = None # type: Optional[int]
@ -474,19 +562,37 @@ def rfc822_formatted_time() -> str:
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = ("", # Dummy so we can use 1-based month numbers
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
_monthname = (
"", # Dummy so we can use 1-based month numbers
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
)
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd], day, _monthname[month], year, hh, mm, ss
_weekdayname[wd],
day,
_monthname[month],
year,
hh,
mm,
ss,
)
_cached_current_datetime = now
return _cached_formatted_datetime
def _weakref_handle(info): # type: ignore
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
ref, name = info
ob = ref()
if ob is not None:
@ -494,46 +600,60 @@ def _weakref_handle(info): # type: ignore
getattr(ob, name)()
def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): # type: ignore
def weakref_handle(
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if ceil_timeout:
if timeout >= 5:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
return None
def call_later(cb, timeout, loop): # type: ignore
def call_later(
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = ceil(loop.time() + timeout)
when = loop.time() + timeout
if timeout > 5:
when = ceil(when)
return loop.call_at(when, cb)
return None
class TimeoutHandle:
""" Timeout handle """
"""Timeout handle"""
def __init__(self,
loop: asyncio.AbstractEventLoop,
timeout: Optional[float]) -> None:
def __init__(
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
) -> None:
self._timeout = timeout
self._loop = loop
self._callbacks = [] # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]] # noqa
self._callbacks = (
[]
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
def register(self, callback: Callable[..., None],
*args: Any, **kwargs: Any) -> None:
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> Optional[asyncio.Handle]:
if self._timeout is not None and self._timeout > 0:
at = ceil(self._loop.time() + self._timeout)
return self._loop.call_at(at, self.__call__)
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
return None
def timer(self) -> 'BaseTimerContext':
def timer(self) -> "BaseTimerContext":
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
@ -549,23 +669,25 @@ class TimeoutHandle:
self._callbacks.clear()
class BaseTimerContext(ContextManager['BaseTimerContext']):
class BaseTimerContext(ContextManager["BaseTimerContext"]):
pass
class TimerNoop(BaseTimerContext):
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return False
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
return
class TimerContext(BaseTimerContext):
""" Low resolution timeout context manager """
"""Low resolution timeout context manager"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
@ -576,19 +698,22 @@ class TimerContext(BaseTimerContext):
task = current_task(loop=self._loop)
if task is None:
raise RuntimeError('Timeout context manager should be used '
'inside a task')
raise RuntimeError(
"Timeout context manager should be used " "inside a task"
)
if self._cancelled:
task.cancel()
raise asyncio.TimeoutError from None
self._tasks.append(task)
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
if self._tasks:
self._tasks.pop()
@ -604,23 +729,21 @@ class TimerContext(BaseTimerContext):
self._cancelled = True
class CeilTimeout(async_timeout.timeout):
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
if delay is None or delay <= 0:
return async_timeout.timeout(None)
def __enter__(self) -> async_timeout.timeout:
if self._timeout is not None:
self._task = current_task(loop=self._loop)
if self._task is None:
raise RuntimeError(
'Timeout context manager should be used inside a task')
self._cancel_handler = self._loop.call_at(
ceil(self._loop.time() + self._timeout), self._cancel_task)
return self
loop = get_running_loop()
now = loop.time()
when = now + delay
if delay > 5:
when = ceil(when)
return async_timeout.timeout_at(when)
class HeadersMixin:
ATTRS = frozenset([
'_content_type', '_content_dict', '_stored_content_type'])
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
_content_type = None # type: Optional[str]
_content_dict = None # type: Optional[Dict[str, str]]
@ -630,7 +753,7 @@ class HeadersMixin:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = 'application/octet-stream'
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
self._content_type, self._content_dict = cgi.parse_header(raw)
@ -638,23 +761,25 @@ class HeadersMixin:
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore
return self._content_type # type: ignore[return-value]
@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get('charset') # type: ignore
return self._content_dict.get("charset") # type: ignore[union-attr]
@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
content_length = self._headers.get( # type: ignore[attr-defined]
hdrs.CONTENT_LENGTH
)
if content_length is not None:
return int(content_length)
@ -662,25 +787,27 @@ class HeadersMixin:
return None
def set_result(fut: 'asyncio.Future[_T]', result: _T) -> None:
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
if not fut.done():
fut.set_result(result)
def set_exception(fut: 'asyncio.Future[_T]', exc: BaseException) -> None:
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
if not fut.done():
fut.set_exception(exc)
class ChainMapProxy(Mapping[str, Any]):
__slots__ = ('_maps',)
__slots__ = ("_maps",)
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError("Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__))
raise TypeError(
"Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__)
)
def __getitem__(self, key: str) -> Any:
for mapping in self._maps:
@ -690,12 +817,12 @@ class ChainMapProxy(Mapping[str, Any]):
pass
raise KeyError(key)
def get(self, key: str, default: Any=None) -> Any:
def get(self, key: str, default: Any = None) -> Any:
return self[key] if key in self else default
def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore
return len(set().union(*self._maps)) # type: ignore[arg-type]
def __iter__(self) -> Iterator[str]:
d = {} # type: Dict[str, Any]
@ -712,4 +839,37 @@ class ChainMapProxy(Mapping[str, Any]):
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return 'ChainMapProxy({})'.format(content)
return f"ChainMapProxy({content})"
# https://tools.ietf.org/html/rfc7232#section-2.3
_ETAGC = r"[!#-}\x80-\xff]+"
_ETAGC_RE = re.compile(_ETAGC)
_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
ETAG_ANY = "*"
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ETag:
value: str
is_weak: bool = False
def validate_etag_value(value: str) -> None:
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
raise ValueError(
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
)
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object"""
if date_str is not None:
timetuple = parsedate(date_str)
if timetuple is not None:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None

View file

@ -1,50 +1,72 @@
import http.server
import sys
from typing import Mapping, Tuple # noqa
from typing import Mapping, Tuple
from . import __version__
from .http_exceptions import HttpProcessingError as HttpProcessingError
from .http_parser import HeadersParser as HeadersParser
from .http_parser import HttpParser as HttpParser
from .http_parser import HttpRequestParser as HttpRequestParser
from .http_parser import HttpResponseParser as HttpResponseParser
from .http_parser import RawRequestMessage as RawRequestMessage
from .http_parser import RawResponseMessage as RawResponseMessage
from .http_websocket import WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE
from .http_websocket import WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE
from .http_websocket import WS_KEY as WS_KEY
from .http_websocket import WebSocketError as WebSocketError
from .http_websocket import WebSocketReader as WebSocketReader
from .http_websocket import WebSocketWriter as WebSocketWriter
from .http_websocket import WSCloseCode as WSCloseCode
from .http_websocket import WSMessage as WSMessage
from .http_websocket import WSMsgType as WSMsgType
from .http_websocket import ws_ext_gen as ws_ext_gen
from .http_websocket import ws_ext_parse as ws_ext_parse
from .http_writer import HttpVersion as HttpVersion
from .http_writer import HttpVersion10 as HttpVersion10
from .http_writer import HttpVersion11 as HttpVersion11
from .http_writer import StreamWriter as StreamWriter
from .http_parser import (
HeadersParser as HeadersParser,
HttpParser as HttpParser,
HttpRequestParser as HttpRequestParser,
HttpResponseParser as HttpResponseParser,
RawRequestMessage as RawRequestMessage,
RawResponseMessage as RawResponseMessage,
)
from .http_websocket import (
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
WS_KEY as WS_KEY,
WebSocketError as WebSocketError,
WebSocketReader as WebSocketReader,
WebSocketWriter as WebSocketWriter,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen as ws_ext_gen,
ws_ext_parse as ws_ext_parse,
)
from .http_writer import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
StreamWriter as StreamWriter,
)
__all__ = (
'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE',
"HttpProcessingError",
"RESPONSES",
"SERVER_SOFTWARE",
# .http_writer
'StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11',
"StreamWriter",
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
# .http_parser
'HeadersParser', 'HttpParser',
'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage',
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
# .http_websocket
'WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'ws_ext_gen', 'ws_ext_parse',
'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode',
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"ws_ext_gen",
"ws_ext_parse",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
sys.version_info, __version__) # type: str
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
sys.version_info, __version__
) # type: str
RESPONSES = http.server.BaseHTTPRequestHandler.responses # type: Mapping[int, Tuple[str, str]] # noqa
RESPONSES = (
http.server.BaseHTTPRequestHandler.responses
) # type: Mapping[int, Tuple[str, str]]

View file

@ -5,7 +5,7 @@ from typing import Optional, Union
from .typedefs import _CIMultiDict
__all__ = ('HttpProcessingError',)
__all__ = ("HttpProcessingError",)
class HttpProcessingError(Exception):
@ -19,32 +19,34 @@ class HttpProcessingError(Exception):
"""
code = 0
message = ''
message = ""
headers = None
def __init__(self, *,
code: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(
self,
*,
code: Optional[int] = None,
message: str = "",
headers: Optional[_CIMultiDict] = None,
) -> None:
if code is not None:
self.code = code
self.headers = headers
self.message = message
def __str__(self) -> str:
return "%s, message=%r" % (self.code, self.message)
return f"{self.code}, message={self.message!r}"
def __repr__(self) -> str:
return "<%s: %s>" % (self.__class__.__name__, self)
return f"<{self.__class__.__name__}: {self}>"
class BadHttpMessage(HttpProcessingError):
code = 400
message = 'Bad Request'
message = "Bad Request"
def __init__(self, message: str, *,
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
super().__init__(message=message, headers=headers)
self.args = (message,)
@ -52,7 +54,7 @@ class BadHttpMessage(HttpProcessingError):
class HttpBadRequest(BadHttpMessage):
code = 400
message = 'Bad Request'
message = "Bad Request"
class PayloadEncodingError(BadHttpMessage):
@ -72,37 +74,32 @@ class ContentLengthError(PayloadEncodingError):
class LineTooLong(BadHttpMessage):
def __init__(self, line: str,
limit: str='Unknown',
actual_size: str='Unknown') -> None:
def __init__(
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
) -> None:
super().__init__(
"Got more than %s bytes (%s) when reading %s." % (
limit, actual_size, line))
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
)
self.args = (line, limit, actual_size)
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr: Union[bytes, str]) -> None:
if isinstance(hdr, bytes):
hdr = hdr.decode('utf-8', 'surrogateescape')
super().__init__('Invalid HTTP Header: {}'.format(hdr))
hdr = hdr.decode("utf-8", "surrogateescape")
super().__init__(f"Invalid HTTP Header: {hdr}")
self.hdr = hdr
self.args = (hdr,)
class BadStatusLine(BadHttpMessage):
def __init__(self, line: str='') -> None:
def __init__(self, line: str = "") -> None:
if not isinstance(line, str):
line = repr(line)
super().__init__(f"Bad status line {line!r}")
self.args = (line,)
self.line = line
__str__ = Exception.__str__
__repr__ = Exception.__repr__
class InvalidURLError(BadHttpMessage):
pass

View file

@ -4,8 +4,22 @@ import collections
import re
import string
import zlib
from contextlib import suppress
from enum import IntEnum
from typing import Any, List, Optional, Tuple, Type, Union # noqa
from typing import (
Any,
Generic,
List,
NamedTuple,
Optional,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from multidict import CIMultiDict, CIMultiDictProxy, istr
from yarl import URL
@ -14,6 +28,7 @@ from . import hdrs
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS, BaseTimerContext
from .http_exceptions import (
BadHttpMessage,
BadStatusLine,
ContentEncodingError,
ContentLengthError,
@ -24,20 +39,26 @@ from .http_exceptions import (
from .http_writer import HttpVersion, HttpVersion10
from .log import internal_logger
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import RawHeaders
from .typedefs import Final, RawHeaders
try:
import brotli
HAS_BROTLI = True
except ImportError: # pragma: no cover
HAS_BROTLI = False
__all__ = (
'HeadersParser', 'HttpParser', 'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
)
ASCIISET = set(string.printable)
ASCIISET: Final[Set[str]] = set(string.printable)
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
# and https://tools.ietf.org/html/rfc7230#appendix-B
@ -46,19 +67,41 @@ ASCIISET = set(string.printable)
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE = re.compile(r'HTTP/(\d+).(\d+)')
HDRRE = re.compile(rb'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]')
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
class RawRequestMessage(NamedTuple):
method: str
path: str
version: HttpVersion
headers: "CIMultiDictProxy[str]"
raw_headers: RawHeaders
should_close: bool
compression: Optional[str]
upgrade: bool
chunked: bool
url: URL
RawRequestMessage = collections.namedtuple(
'RawRequestMessage',
['method', 'path', 'version', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked', 'url'])
RawResponseMessage = collections.namedtuple(
'RawResponseMessage',
['version', 'code', 'reason', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked'])
"RawResponseMessage",
[
"version",
"code",
"reason",
"headers",
"raw_headers",
"should_close",
"compression",
"upgrade",
"chunked",
],
)
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
class ParseState(IntEnum):
@ -78,18 +121,19 @@ class ChunkState(IntEnum):
class HeadersParser:
def __init__(self,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190) -> None:
def __init__(
self,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
) -> None:
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]', RawHeaders]:
self, lines: List[bytes]
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
headers = CIMultiDict() # type: CIMultiDict[str]
raw_headers = []
@ -100,20 +144,22 @@ class HeadersParser:
while line:
# Parse initial header name : value pair.
try:
bname, bvalue = line.split(b':', 1)
bname, bvalue = line.split(b":", 1)
except ValueError:
raise InvalidHeader(line) from None
bname = bname.strip(b' \t')
bname = bname.strip(b" \t")
bvalue = bvalue.lstrip()
if HDRRE.search(bname):
raise InvalidHeader(bname)
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
bname.decode("utf8", "xmlcharrefreplace")),
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(len(bname)))
str(len(bname)),
)
header_length = len(bvalue)
@ -130,10 +176,12 @@ class HeadersParser:
header_length += len(line)
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length))
str(header_length),
)
bvalue_lst.append(line)
# next line
@ -143,20 +191,22 @@ class HeadersParser:
if line:
continuation = line[0] in (32, 9) # (' ', '\t')
else:
line = b''
line = b""
break
bvalue = b''.join(bvalue_lst)
bvalue = b"".join(bvalue_lst)
else:
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length))
str(header_length),
)
bvalue = bvalue.strip()
name = bname.decode('utf-8', 'surrogateescape')
value = bvalue.decode('utf-8', 'surrogateescape')
name = bname.decode("utf-8", "surrogateescape")
value = bvalue.decode("utf-8", "surrogateescape")
headers.add(name, value)
raw_headers.append((bname, bvalue))
@ -164,21 +214,24 @@ class HeadersParser:
return (CIMultiDictProxy(headers), tuple(raw_headers))
class HttpParser(abc.ABC):
def __init__(self, protocol: Optional[BaseProtocol]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190,
timer: Optional[BaseTimerContext]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
payload_exception: Optional[Type[BaseException]]=None,
response_with_body: bool=True,
read_until_eof: bool=False,
auto_decompress: bool=True) -> None:
class HttpParser(abc.ABC, Generic[_MsgT]):
def __init__(
self,
protocol: Optional[BaseProtocol] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
limit: int = 2 ** 16,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
timer: Optional[BaseTimerContext] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
payload_exception: Optional[Type[BaseException]] = None,
response_with_body: bool = True,
read_until_eof: bool = False,
auto_decompress: bool = True,
) -> None:
self.protocol = protocol
self.loop = loop
self.max_line_size = max_line_size
@ -193,20 +246,19 @@ class HttpParser(abc.ABC):
self.read_until_eof = read_until_eof
self._lines = [] # type: List[bytes]
self._tail = b''
self._tail = b""
self._upgraded = False
self._payload = None
self._payload_parser = None # type: Optional[HttpPayloadParser]
self._auto_decompress = auto_decompress
self._headers_parser = HeadersParser(max_line_size,
max_headers,
max_field_size)
self._limit = limit
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
@abc.abstractmethod
def parse_message(self, lines: List[bytes]) -> Any:
def parse_message(self, lines: List[bytes]) -> _MsgT:
pass
def feed_eof(self) -> Any:
def feed_eof(self) -> Optional[_MsgT]:
if self._payload_parser is not None:
self._payload_parser.feed_eof()
self._payload_parser = None
@ -216,27 +268,26 @@ class HttpParser(abc.ABC):
self._lines.append(self._tail)
if self._lines:
if self._lines[-1] != '\r\n':
self._lines.append(b'')
try:
if self._lines[-1] != "\r\n":
self._lines.append(b"")
with suppress(Exception):
return self.parse_message(self._lines)
except Exception:
return None
return None
def feed_data(
self,
data: bytes,
SEP: bytes=b'\r\n',
EMPTY: bytes=b'',
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH,
METH_CONNECT: str=hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr=hdrs.SEC_WEBSOCKET_KEY1
) -> Tuple[List[Any], bool, bytes]:
self,
data: bytes,
SEP: bytes = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
messages = []
if self._tail:
data, self._tail = self._tail + data, b''
data, self._tail = self._tail + data, b""
data_len = len(data)
start_pos = 0
@ -261,67 +312,104 @@ class HttpParser(abc.ABC):
# \r\n\r\n found
if self._lines[-1] == EMPTY:
try:
msg = self.parse_message(self._lines)
msg: _MsgT = self.parse_message(self._lines)
finally:
self._lines.clear()
# payload length
length = msg.headers.get(CONTENT_LENGTH)
if length is not None:
def get_content_length() -> Optional[int]:
# payload length
length_hdr = msg.headers.get(CONTENT_LENGTH)
if length_hdr is None:
return None
try:
length = int(length)
length = int(length_hdr)
except ValueError:
raise InvalidHeader(CONTENT_LENGTH)
if length < 0:
raise InvalidHeader(CONTENT_LENGTH)
return length
length = get_content_length()
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
self._upgraded = msg.upgrade
method = getattr(msg, 'method', self.method)
method = getattr(msg, "method", self.method)
assert self.protocol is not None
# calculate payload
if ((length is not None and length > 0) or
msg.chunked and not msg.upgrade):
if (
(length is not None and length > 0)
or msg.chunked
and not msg.upgrade
):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code, readall=self.readall,
code=self.code,
readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
assert isinstance(msg, RawRequestMessage)
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
self._upgraded = True
self._payload_parser = HttpPayloadParser(
payload, method=msg.method,
compression=msg.compression, readall=True,
auto_decompress=self._auto_decompress)
payload,
method=msg.method,
compression=msg.compression,
readall=True,
auto_decompress=self._auto_decompress,
)
else:
if (getattr(msg, 'code', 100) >= 199 and
length is None and self.read_until_eof):
if (
getattr(msg, "code", 100) >= 199
and length is None
and self.read_until_eof
):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code, readall=True,
code=self.code,
readall=True,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
else:
payload = EMPTY_PAYLOAD # type: ignore
payload = EMPTY_PAYLOAD
messages.append((msg, payload))
else:
@ -339,17 +427,17 @@ class HttpParser(abc.ABC):
assert not self._lines
assert self._payload_parser is not None
try:
eof, data = self._payload_parser.feed_data(
data[start_pos:])
eof, data = self._payload_parser.feed_data(data[start_pos:])
except BaseException as exc:
if self.payload_exception is not None:
self._payload_parser.payload.set_exception(
self.payload_exception(str(exc)))
self.payload_exception(str(exc))
)
else:
self._payload_parser.payload.set_exception(exc)
eof = True
data = b''
data = b""
if eof:
start_pos = 0
@ -367,14 +455,10 @@ class HttpParser(abc.ABC):
return messages, self._upgraded, data
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]',
RawHeaders,
Optional[bool],
Optional[str],
bool,
bool]:
self, lines: List[bytes]
) -> Tuple[
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
@ -390,37 +474,54 @@ class HttpParser(abc.ABC):
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == 'close':
if v == "close":
close_conn = True
elif v == 'keep-alive':
elif v == "keep-alive":
close_conn = False
elif v == 'upgrade':
elif v == "upgrade":
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
if enc in ("gzip", "deflate", "br"):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te and 'chunked' in te.lower():
chunked = True
if te is not None:
if "chunked" == te.lower():
chunked = True
else:
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
if hdrs.CONTENT_LENGTH in headers:
raise BadHttpMessage(
"Content-Length can't be present with Transfer-Encoding",
)
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
def set_upgraded(self, val: bool) -> None:
"""Set connection upgraded (to websocket) mode.
class HttpRequestParser(HttpParser):
"""Read request status line. Exception .http_exceptions.BadStatusLine
:param bool val: new state.
"""
self._upgraded = val
class HttpRequestParser(HttpParser[RawRequestMessage]):
"""Read request status line.
Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def parse_message(self, lines: List[bytes]) -> Any:
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
# request line
line = lines[0].decode('utf-8', 'surrogateescape')
line = lines[0].decode("utf-8", "surrogateescape")
try:
method, path, version = line.split(None, 2)
except ValueError:
@ -428,9 +529,11 @@ class HttpRequestParser(HttpParser):
if len(path) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(path)))
"Status line is too long", str(self.max_line_size), str(len(path))
)
path_part, _hash_separator, url_fragment = path.partition("#")
path_part, _question_mark_separator, qs_part = path_part.partition("?")
# method
if not METHRE.match(method):
@ -438,8 +541,8 @@ class HttpRequestParser(HttpParser):
# version
try:
if version.startswith('HTTP/'):
n1, n2 = version[5:].split('.', 1)
if version.startswith("HTTP/"):
n1, n2 = version[5:].split(".", 1)
version_o = HttpVersion(int(n1), int(n2))
else:
raise BadStatusLine(version)
@ -447,8 +550,14 @@ class HttpRequestParser(HttpParser):
raise BadStatusLine(version)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
@ -457,18 +566,37 @@ class HttpRequestParser(HttpParser):
close = False
return RawRequestMessage(
method, path, version_o, headers, raw_headers,
close, compression, upgrade, chunked, URL(path))
method,
path,
version_o,
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
# NOTE: parser does, otherwise it results into the same
# NOTE: HTTP Request-Line input producing different
# NOTE: `yarl.URL()` objects
URL.build(
path=path_part,
query_string=qs_part,
fragment=url_fragment,
encoded=True,
),
)
class HttpResponseParser(HttpParser):
class HttpResponseParser(HttpParser[RawResponseMessage]):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
Returns RawResponseMessage.
"""
def parse_message(self, lines: List[bytes]) -> Any:
line = lines[0].decode('utf-8', 'surrogateescape')
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
version, status = line.split(None, 1)
except ValueError:
@ -477,13 +605,12 @@ class HttpResponseParser(HttpParser):
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ''
reason = ""
if len(reason) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(reason)))
"Status line is too long", str(self.max_line_size), str(len(reason))
)
# version
match = VERSRE.match(version)
@ -501,39 +628,57 @@ class HttpResponseParser(HttpParser):
raise BadStatusLine(line)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None:
close = version_o <= HttpVersion10
return RawResponseMessage(
version_o, status_i, reason.strip(),
headers, raw_headers, close, compression, upgrade, chunked)
version_o,
status_i,
reason.strip(),
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
)
class HttpPayloadParser:
def __init__(self, payload: StreamReader,
length: Optional[int]=None,
chunked: bool=False,
compression: Optional[str]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
response_with_body: bool=True,
auto_decompress: bool=True) -> None:
def __init__(
self,
payload: StreamReader,
length: Optional[int] = None,
chunked: bool = False,
compression: Optional[str] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
response_with_body: bool = True,
auto_decompress: bool = True,
) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b''
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
real_payload = DeflateBuffer(payload, compression) # type: Union[StreamReader, DeflateBuffer] # noqa
real_payload = DeflateBuffer(
payload, compression
) # type: Union[StreamReader, DeflateBuffer]
else:
real_payload = payload
@ -555,9 +700,10 @@ class HttpPayloadParser:
else:
if readall and code != 204:
self._type = ParseState.PARSE_UNTIL_EOF
elif method in ('PUT', 'POST'):
elif method in ("PUT", "POST"):
internal_logger.warning( # pragma: no cover
'Content-Length or Transfer-Encoding header is required')
"Content-Length or Transfer-Encoding header is required"
)
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
@ -569,15 +715,16 @@ class HttpPayloadParser:
self.payload.feed_eof()
elif self._type == ParseState.PARSE_LENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
"Not enough data for satisfy content length header."
)
elif self._type == ParseState.PARSE_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
"Not enough data for satisfy transfer length header."
)
def feed_data(self,
chunk: bytes,
SEP: bytes=b'\r\n',
CHUNK_EXT: bytes=b';') -> Tuple[bool, bytes]:
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
@ -588,7 +735,7 @@ class HttpPayloadParser:
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b''
return True, b""
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
@ -599,7 +746,7 @@ class HttpPayloadParser:
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b''
self._chunk_tail = b""
while chunk:
@ -617,11 +764,12 @@ class HttpPayloadParser:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode('ascii', 'surrogateescape'))
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos+2:]
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
@ -630,7 +778,7 @@ class HttpPayloadParser:
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
@ -640,7 +788,7 @@ class HttpPayloadParser:
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b''
return False, b""
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
@ -655,77 +803,118 @@ class HttpPayloadParser:
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
if chunk[:2] == SEP:
head = chunk[:2]
if head == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
else:
self._chunk = ChunkState.PARSE_TRAILERS
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
# CRLF which marks the end of response might not be
# contained in the same TCP segment which delivered the
# size indicator.
if not head:
return False, b""
if head == SEP[:1]:
self._chunk_tail = head
return False, b""
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos+2:]
chunk = chunk[pos + 2 :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b''
return False, b""
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
decompressor: Any
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
self.out = out
self.size = 0
self.encoding = encoding
self._started_decoding = False
if encoding == 'br':
if encoding == "br":
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
'Can not decode content-encoding: brotli (br). '
'Please install `brotlipy`')
self.decompressor = brotli.Decompressor()
"Can not decode content-encoding: brotli (br). "
"Please install `Brotli`"
)
class BrotliDecoder:
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self) -> None:
self._obj = brotli.Decompressor()
def decompress(self, data: bytes) -> bytes:
if hasattr(self._obj, "decompress"):
return cast(bytes, self._obj.decompress(data))
return cast(bytes, self._obj.process(data))
def flush(self) -> bytes:
if hasattr(self._obj, "flush"):
return cast(bytes, self._obj.flush())
return b""
self.decompressor = BrotliDecoder()
else:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def set_exception(self, exc: BaseException) -> None:
self.out.set_exception(exc)
def feed_data(self, chunk: bytes, size: int) -> None:
if not size:
return
self.size += size
# RFC1950
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
# bits 4..7 = CINFO = 1..7 = windows size.
if (
not self._started_decoding
and self.encoding == "deflate"
and chunk[0] & 0xF != 8
):
# Change the decoder to decompress incorrectly compressed data
# Actually we should issue a warning about non-RFC-compliant data.
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
if not self._started_decoding and self.encoding == 'deflate':
self.decompressor = zlib.decompressobj()
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
else:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
raise ContentEncodingError(
"Can not decode content-encoding: %s" % self.encoding
)
self._started_decoding = True
if chunk:
self._started_decoding = True
self.out.feed_data(chunk, len(chunk))
def feed_eof(self) -> None:
@ -733,8 +922,8 @@ class DeflateBuffer:
if chunk or self.size > 0:
self.out.feed_data(chunk, len(chunk))
if self.encoding == 'deflate' and not self.decompressor.eof:
raise ContentEncodingError('deflate')
if self.encoding == "deflate" and not self.decompressor.eof:
raise ContentEncodingError("deflate")
self.out.feed_eof()
@ -752,10 +941,13 @@ RawResponseMessagePy = RawResponseMessage
try:
if not NO_EXTENSIONS:
from ._http_parser import (HttpRequestParser, # type: ignore # noqa
HttpResponseParser,
RawRequestMessage,
RawResponseMessage)
from ._http_parser import ( # type: ignore[import,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
RawResponseMessage,
)
HttpRequestParserC = HttpRequestParser
HttpResponseParserC = HttpResponseParser
RawRequestMessageC = RawRequestMessage

View file

@ -9,16 +9,24 @@ import sys
import zlib
from enum import IntEnum
from struct import Struct
from typing import Any, Callable, List, Optional, Tuple, Union
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
from .log import ws_logger
from .streams import DataQueue
from .typedefs import Final
__all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'WSMessage',
'WebSocketError', 'WSMsgType', 'WSCloseCode')
__all__ = (
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
class WSCloseCode(IntEnum):
@ -26,6 +34,7 @@ class WSCloseCode(IntEnum):
GOING_AWAY = 1001
PROTOCOL_ERROR = 1002
UNSUPPORTED_DATA = 1003
ABNORMAL_CLOSURE = 1006
INVALID_TEXT = 1007
POLICY_VIOLATION = 1008
MESSAGE_TOO_BIG = 1009
@ -33,9 +42,10 @@ class WSCloseCode(IntEnum):
INTERNAL_ERROR = 1011
SERVICE_RESTART = 1012
TRY_AGAIN_LATER = 1013
BAD_GATEWAY = 1014
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
class WSMsgType(IntEnum):
@ -44,7 +54,7 @@ class WSMsgType(IntEnum):
TEXT = 0x1
BINARY = 0x2
PING = 0x9
PONG = 0xa
PONG = 0xA
CLOSE = 0x8
# aiohttp specific types
@ -62,28 +72,25 @@ class WSMsgType(IntEnum):
error = ERROR
WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
UNPACK_LEN2 = Struct('!H').unpack_from
UNPACK_LEN3 = Struct('!Q').unpack_from
UNPACK_CLOSE_CODE = Struct('!H').unpack
PACK_LEN1 = Struct('!BB').pack
PACK_LEN2 = Struct('!BBH').pack
PACK_LEN3 = Struct('!BBQ').pack
PACK_CLOSE_CODE = Struct('!H').pack
MSG_SIZE = 2 ** 14
DEFAULT_LIMIT = 2 ** 16
UNPACK_LEN2 = Struct("!H").unpack_from
UNPACK_LEN3 = Struct("!Q").unpack_from
UNPACK_CLOSE_CODE = Struct("!H").unpack
PACK_LEN1 = Struct("!BB").pack
PACK_LEN2 = Struct("!BBH").pack
PACK_LEN3 = Struct("!BBQ").pack
PACK_CLOSE_CODE = Struct("!H").pack
MSG_SIZE: Final[int] = 2 ** 14
DEFAULT_LIMIT: Final[int] = 2 ** 16
_WSMessageBase = collections.namedtuple('_WSMessageBase',
['type', 'data', 'extra'])
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
class WSMessage(_WSMessageBase):
def json(self, *,
loads: Callable[[Any], Any]=json.loads) -> Any:
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
"""Return parsed JSON data.
.. versionadded:: 0.22
@ -103,18 +110,18 @@ class WebSocketError(Exception):
super().__init__(code, message)
def __str__(self) -> str:
return self.args[1]
return cast(str, self.args[1])
class WSHandshakeError(Exception):
"""WebSocket protocol handshake error."""
native_byteorder = sys.byteorder
native_byteorder: Final[str] = sys.byteorder
# Used by _websocket_mask_python
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
@ -145,24 +152,27 @@ if NO_EXTENSIONS: # pragma: no cover
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
from ._websocket import _websocket_mask_cython # type: ignore[import]
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
_websocket_mask = _websocket_mask_python
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff])
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
_WS_EXT_RE = re.compile(r'^(?:;\s*(?:'
r'(server_no_context_takeover)|'
r'(client_no_context_takeover)|'
r'(server_max_window_bits(?:=(\d+))?)|'
r'(client_max_window_bits(?:=(\d+))?)))*$')
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
r"^(?:;\s*(?:"
r"(server_no_context_takeover)|"
r"(client_no_context_takeover)|"
r"(server_max_window_bits(?:=(\d+))?)|"
r"(client_max_window_bits(?:=(\d+))?)))*$"
)
_WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?')
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
if not extstr:
return 0, False
@ -201,37 +211,38 @@ def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
# If compress level not support,
# FAIL the parse progress
if compress > 15 or compress < 9:
raise WSHandshakeError('Invalid window size')
raise WSHandshakeError("Invalid window size")
if match.group(2):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
# Return Fail if client side and not match
elif not isserver:
raise WSHandshakeError('Extension for deflate not supported' +
ext.group(1))
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
return compress, notakeover
def ws_ext_gen(compress: int=15, isserver: bool=False,
server_notakeover: bool=False) -> str:
def ws_ext_gen(
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
) -> str:
# client_notakeover=False not used for server
# compress wbit 8 does not support in zlib
if compress < 9 or compress > 15:
raise ValueError('Compress wbits must between 9 and 15, '
'zlib does not support wbits=8')
enabledext = ['permessage-deflate']
raise ValueError(
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
)
enabledext = ["permessage-deflate"]
if not isserver:
enabledext.append('client_max_window_bits')
enabledext.append("client_max_window_bits")
if compress < 15:
enabledext.append('server_max_window_bits=' + str(compress))
enabledext.append("server_max_window_bits=" + str(compress))
if server_notakeover:
enabledext.append('server_no_context_takeover')
enabledext.append("server_no_context_takeover")
# if client_notakeover:
# enabledext.append('client_no_context_takeover')
return '; '.join(enabledext)
return "; ".join(enabledext)
class WSParserState(IntEnum):
@ -242,9 +253,9 @@ class WSParserState(IntEnum):
class WebSocketReader:
def __init__(self, queue: DataQueue[WSMessage],
max_msg_size: int, compress: bool=True) -> None:
def __init__(
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
) -> None:
self.queue = queue
self._max_msg_size = max_msg_size
@ -257,7 +268,7 @@ class WebSocketReader:
self._frame_opcode = None # type: Optional[int]
self._frame_payload = bytearray()
self._tail = b''
self._tail = b""
self._has_mask = False
self._frame_mask = None # type: Optional[bytes]
self._payload_length = 0
@ -278,7 +289,7 @@ class WebSocketReader:
except Exception as exc:
self._exc = exc
self.queue.set_exception(exc)
return True, b''
return True, b""
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
@ -287,41 +298,45 @@ class WebSocketReader:
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
if (close_code < 3000 and
close_code not in ALLOWED_CLOSE_CODES):
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close code: {}'.format(close_code))
f"Invalid close code: {close_code}",
)
try:
close_message = payload[2:].decode('utf-8')
close_message = payload[2:].decode("utf-8")
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
elif payload:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close frame: {} {} {!r}'.format(
fin, opcode, payload))
f"Invalid close frame: {fin} {opcode} {payload!r}",
)
else:
msg = WSMessage(WSMsgType.CLOSE, 0, '')
msg = WSMessage(WSMsgType.CLOSE, 0, "")
self.queue.feed_data(msg, 0)
elif opcode == WSMsgType.PING:
self.queue.feed_data(
WSMessage(WSMsgType.PING, payload, ''), len(payload))
WSMessage(WSMsgType.PING, payload, ""), len(payload)
)
elif opcode == WSMsgType.PONG:
self.queue.feed_data(
WSMessage(WSMsgType.PONG, payload, ''), len(payload))
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
)
elif opcode not in (
WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None:
elif (
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
and self._opcode is None
):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Unexpected opcode={!r}".format(opcode))
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
)
else:
# load text/binary
if not fin:
@ -329,12 +344,13 @@ class WebSocketReader:
if opcode != WSMsgType.CONTINUATION:
self._opcode = opcode
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
len(self._partial), self._max_msg_size
),
)
else:
# previous frame was non finished
# we should get continuation opcode
@ -342,8 +358,9 @@ class WebSocketReader:
if opcode != WSMsgType.CONTINUATION:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'The opcode in non-fin frame is expected '
'to be zero, got {!r}'.format(opcode))
"The opcode in non-fin frame is expected "
"to be zero, got {!r}".format(opcode),
)
if opcode == WSMsgType.CONTINUATION:
assert self._opcode is not None
@ -351,28 +368,28 @@ class WebSocketReader:
self._opcode = None
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
len(self._partial), self._max_msg_size
),
)
# Decompress process must to be done after all packets
# received.
if compressed:
self._partial.extend(_WS_DEFLATE_TRAILING)
payload_merged = self._decompressobj.decompress(
self._partial, self._max_msg_size)
self._partial, self._max_msg_size
)
if self._decompressobj.unconsumed_tail:
left = len(self._decompressobj.unconsumed_tail)
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Decompressed message size {} exceeds limit {}"
.format(
self._max_msg_size + left,
self._max_msg_size
)
"Decompressed message size {} exceeds limit {}".format(
self._max_msg_size + left, self._max_msg_size
),
)
else:
payload_merged = bytes(self._partial)
@ -381,27 +398,29 @@ class WebSocketReader:
if opcode == WSMsgType.TEXT:
try:
text = payload_merged.decode('utf-8')
text = payload_merged.decode("utf-8")
self.queue.feed_data(
WSMessage(WSMsgType.TEXT, text, ''), len(text))
WSMessage(WSMsgType.TEXT, text, ""), len(text)
)
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
else:
self.queue.feed_data(
WSMessage(WSMsgType.BINARY, payload_merged, ''),
len(payload_merged))
WSMessage(WSMsgType.BINARY, payload_merged, ""),
len(payload_merged),
)
return False, b''
return False, b""
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int],
bytearray,
Optional[bool]]]:
def parse_frame(
self, buf: bytes
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
"""Return the next frame from the socket."""
frames = []
if self._tail:
buf, self._tail = self._tail + buf, b''
buf, self._tail = self._tail + buf, b""
start_pos = 0
buf_length = len(buf)
@ -410,7 +429,7 @@ class WebSocketReader:
# read header
if self._state == WSParserState.READ_HEADER:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
data = buf[start_pos : start_pos + 2]
start_pos += 2
first_byte, second_byte = data
@ -418,7 +437,7 @@ class WebSocketReader:
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
opcode = first_byte & 0xF
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
@ -433,23 +452,25 @@ class WebSocketReader:
if rsv2 or rsv3 or (rsv1 and not self._compress):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
"Received frame with non-zero reserved bits",
)
if opcode > 0x7 and fin == 0:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received fragmented control frame')
"Received fragmented control frame",
)
has_mask = (second_byte >> 7) & 1
length = second_byte & 0x7f
length = second_byte & 0x7F
# Control frames MUST have a payload
# length of 125 bytes or less
if opcode > 0x7 and length > 125:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Control frame payload cannot be '
'larger than 125 bytes')
"Control frame payload cannot be " "larger than 125 bytes",
)
# Set compress status if last package is FIN
# OR set compress status if this is first fragment
@ -459,7 +480,8 @@ class WebSocketReader:
elif rsv1:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
"Received frame with non-zero reserved bits",
)
self._frame_fin = bool(fin)
self._frame_opcode = opcode
@ -474,26 +496,28 @@ class WebSocketReader:
length = self._payload_length_flag
if length == 126:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
data = buf[start_pos : start_pos + 2]
start_pos += 2
length = UNPACK_LEN2(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
else:
break
elif length > 126:
if buf_length - start_pos >= 8:
data = buf[start_pos:start_pos+8]
data = buf[start_pos : start_pos + 8]
start_pos += 8
length = UNPACK_LEN3(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
else:
break
else:
@ -501,12 +525,13 @@ class WebSocketReader:
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
# read payload mask
if self._state == WSParserState.READ_PAYLOAD_MASK:
if buf_length - start_pos >= 4:
self._frame_mask = buf[start_pos:start_pos+4]
self._frame_mask = buf[start_pos : start_pos + 4]
start_pos += 4
self._state = WSParserState.READ_PAYLOAD
else:
@ -523,7 +548,7 @@ class WebSocketReader:
start_pos = buf_length
else:
self._payload_length = 0
payload.extend(buf[start_pos:start_pos+length])
payload.extend(buf[start_pos : start_pos + length])
start_pos = start_pos + length
if self._payload_length == 0:
@ -531,11 +556,9 @@ class WebSocketReader:
assert self._frame_mask is not None
_websocket_mask(self._frame_mask, payload)
frames.append((
self._frame_fin,
self._frame_opcode,
payload,
self._compressed))
frames.append(
(self._frame_fin, self._frame_opcode, payload, self._compressed)
)
self._frame_payload = bytearray()
self._state = WSParserState.READ_HEADER
@ -548,11 +571,17 @@ class WebSocketReader:
class WebSocketWriter:
def __init__(self, protocol: BaseProtocol, transport: asyncio.Transport, *,
use_mask: bool=False, limit: int=DEFAULT_LIMIT,
random: Any=random.Random(),
compress: int=0, notakeover: bool=False) -> None:
def __init__(
self,
protocol: BaseProtocol,
transport: asyncio.Transport,
*,
use_mask: bool = False,
limit: int = DEFAULT_LIMIT,
random: Any = random.Random(),
compress: int = 0,
notakeover: bool = False,
) -> None:
self.protocol = protocol
self.transport = transport
self.use_mask = use_mask
@ -564,11 +593,12 @@ class WebSocketWriter:
self._output_size = 0
self._compressobj = None # type: Any # actually compressobj
async def _send_frame(self, message: bytes, opcode: int,
compress: Optional[int]=None) -> None:
async def _send_frame(
self, message: bytes, opcode: int, compress: Optional[int] = None
) -> None:
"""Send a frame over the websocket with message as its payload."""
if self._closing:
ws_logger.warning('websocket connection is closing.')
if self._closing and not (opcode & WSMsgType.CLOSE):
raise ConnectionResetError("Cannot write to closing transport")
rsv = 0
@ -578,15 +608,18 @@ class WebSocketWriter:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
compressobj = zlib.compressobj(wbits=-compress)
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
else: # self.compress
if not self._compressobj:
self._compressobj = zlib.compressobj(wbits=-self.compress)
self._compressobj = zlib.compressobj(
level=zlib.Z_BEST_SPEED, wbits=-self.compress
)
compressobj = self._compressobj
message = compressobj.compress(message)
message = message + compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
)
if message.endswith(_WS_DEFLATE_TRAILING):
message = message[:-4]
rsv = rsv | 0x40
@ -606,18 +639,18 @@ class WebSocketWriter:
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
mask = self.randrange(0, 0xffffffff)
mask = mask.to_bytes(4, 'big')
mask = self.randrange(0, 0xFFFFFFFF)
mask = mask.to_bytes(4, "big")
message = bytearray(message)
_websocket_mask(mask, message)
self.transport.write(header + mask + message)
self._write(header + mask + message)
self._output_size += len(header) + len(mask) + len(message)
else:
if len(message) > MSG_SIZE:
self.transport.write(header)
self.transport.write(message)
self._write(header)
self._write(message)
else:
self.transport.write(header + message)
self._write(header + message)
self._output_size += len(header) + len(message)
@ -625,35 +658,44 @@ class WebSocketWriter:
self._output_size = 0
await self.protocol._drain_helper()
async def pong(self, message: bytes=b'') -> None:
def _write(self, data: bytes) -> None:
if self.transport is None or self.transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
self.transport.write(data)
async def pong(self, message: bytes = b"") -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PONG)
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PING)
async def send(self, message: Union[str, bytes],
binary: bool=False,
compress: Optional[int]=None) -> None:
async def send(
self,
message: Union[str, bytes],
binary: bool = False,
compress: Optional[int] = None,
) -> None:
"""Send a frame over the websocket with message as its payload."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
if binary:
await self._send_frame(message, WSMsgType.BINARY, compress)
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
async def close(self, code: int=1000, message: bytes=b'') -> None:
async def close(self, code: int = 1000, message: bytes = b"") -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
try:
await self._send_frame(
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE)
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
)
finally:
self._closing = True

View file

@ -1,32 +1,39 @@
"""Http related parsers and protocol."""
import asyncio
import collections
import zlib
from typing import Any, Awaitable, Callable, Optional, Union # noqa
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
from multidict import CIMultiDict # noqa
from multidict import CIMultiDict
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
__all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11')
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
class HttpVersion(NamedTuple):
major: int
minor: int
HttpVersion = collections.namedtuple('HttpVersion', ['major', 'minor'])
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
class StreamWriter(AbstractStreamWriter):
def __init__(self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None) -> None:
def __init__(
self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None,
on_headers_sent: _T_OnHeadersSent = None,
) -> None:
self._protocol = protocol
self._transport = protocol.transport
@ -41,6 +48,7 @@ class StreamWriter(AbstractStreamWriter):
self._drain_waiter = None
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
self._on_headers_sent = on_headers_sent # type: _T_OnHeadersSent
@property
def transport(self) -> Optional[asyncio.Transport]:
@ -53,10 +61,11 @@ class StreamWriter(AbstractStreamWriter):
def enable_chunking(self) -> None:
self.chunked = True
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self._compress = zlib.compressobj(wbits=zlib_mode)
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
def _write(self, chunk: bytes) -> None:
size = len(chunk)
@ -64,11 +73,12 @@ class StreamWriter(AbstractStreamWriter):
self.output_size += size
if self._transport is None or self._transport.is_closing():
raise ConnectionResetError('Cannot write to closing transport')
raise ConnectionResetError("Cannot write to closing transport")
self._transport.write(chunk)
async def write(self, chunk: bytes,
*, drain: bool=True, LIMIT: int=0x10000) -> None:
async def write(
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
) -> None:
"""Writes chunk of data to a stream.
write_eof() indicates end of stream.
@ -78,6 +88,11 @@ class StreamWriter(AbstractStreamWriter):
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if isinstance(chunk, memoryview):
if chunk.nbytes != len(chunk):
# just reshape it
chunk = chunk.cast("c")
if self._compress is not None:
chunk = self._compress.compress(chunk)
if not chunk:
@ -88,15 +103,15 @@ class StreamWriter(AbstractStreamWriter):
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[:self.length]
chunk = chunk[: self.length]
self.length = 0
if not chunk:
return
if chunk:
if self.chunked:
chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len_pre + chunk + b'\r\n'
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len_pre + chunk + b"\r\n"
self._write(chunk)
@ -104,14 +119,18 @@ class StreamWriter(AbstractStreamWriter):
self.buffer_size = 0
await self.drain()
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write request/response status and headers."""
if self._on_headers_sent is not None:
await self._on_headers_sent(headers)
# status + headers
buf = _serialize_headers(status_line, headers)
self._write(buf)
async def write_eof(self, chunk: bytes=b'') -> None:
async def write_eof(self, chunk: bytes = b"") -> None:
if self._eof:
return
@ -124,15 +143,15 @@ class StreamWriter(AbstractStreamWriter):
chunk = chunk + self._compress.flush()
if chunk and self.chunked:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
if self.chunked:
if chunk:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
chunk = b'0\r\n\r\n'
chunk = b"0\r\n\r\n"
if chunk:
self._write(chunk)
@ -154,17 +173,26 @@ class StreamWriter(AbstractStreamWriter):
await self._protocol._drain_helper()
def _py_serialize_headers(status_line: str,
headers: 'CIMultiDict[str]') -> bytes:
line = status_line + '\r\n' + ''.join(
[k + ': ' + v + '\r\n' for k, v in headers.items()])
return line.encode('utf-8') + b'\r\n'
def _safe_header(string: str) -> str:
if "\r" in string or "\n" in string:
raise ValueError(
"Newline or carriage return detected in headers. "
"Potential header injection attack."
)
return string
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
return line.encode("utf-8")
_serialize_headers = _py_serialize_headers
try:
import aiohttp._http_writer as _http_writer # type: ignore
import aiohttp._http_writer as _http_writer # type: ignore[import]
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
_serialize_headers = _c_serialize_headers

View file

@ -1,27 +1,24 @@
import asyncio
import collections
from typing import Any, Optional
try:
from typing import Deque
except ImportError:
from typing_extensions import Deque # noqa
from typing import Any, Deque, Optional
class EventResultOrError:
"""
This class wrappers the Event asyncio lock allowing either awake the
"""Event asyncio lock helper class.
Wraps the Event asyncio lock allowing either to awake the
locked Tasks without any error or raising an exception.
thanks to @vorpalsmith for the simple design.
"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._exc = None # type: Optional[BaseException]
self._event = asyncio.Event(loop=loop)
self._event = asyncio.Event()
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
def set(self, exc: Optional[BaseException]=None) -> None:
def set(self, exc: Optional[BaseException] = None) -> None:
self._exc = exc
self._event.set()
@ -39,6 +36,6 @@ class EventResultOrError:
return val
def cancel(self) -> None:
""" Cancel all waiters """
"""Cancel all waiters"""
for waiter in self._waiters:
waiter.cancel()

View file

@ -1,8 +1,8 @@
import logging
access_logger = logging.getLogger('aiohttp.access')
client_logger = logging.getLogger('aiohttp.client')
internal_logger = logging.getLogger('aiohttp.internal')
server_logger = logging.getLogger('aiohttp.server')
web_logger = logging.getLogger('aiohttp.web')
ws_logger = logging.getLogger('aiohttp.websocket')
access_logger = logging.getLogger("aiohttp.access")
client_logger = logging.getLogger("aiohttp.client")
internal_logger = logging.getLogger("aiohttp.internal")
server_logger = logging.getLogger("aiohttp.server")
web_logger = logging.getLogger("aiohttp.web")
ws_logger = logging.getLogger("aiohttp.websocket")

View file

@ -7,9 +7,11 @@ import warnings
import zlib
from collections import deque
from types import TracebackType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
Deque,
Dict,
Iterator,
List,
@ -19,10 +21,11 @@ from typing import ( # noqa
Tuple,
Type,
Union,
cast,
)
from urllib.parse import parse_qsl, unquote, urlencode
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping # noqa
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
from .hdrs import (
CONTENT_DISPOSITION,
@ -44,13 +47,19 @@ from .payload import (
)
from .streams import StreamReader
__all__ = ('MultipartReader', 'MultipartWriter', 'BodyPartReader',
'BadContentDispositionHeader', 'BadContentDispositionParam',
'parse_content_disposition', 'content_disposition_filename')
__all__ = (
"MultipartReader",
"MultipartWriter",
"BodyPartReader",
"BadContentDispositionHeader",
"BadContentDispositionParam",
"parse_content_disposition",
"content_disposition_filename",
)
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import ClientResponse # noqa
from .client_reqrep import ClientResponse
class BadContentDispositionHeader(RuntimeWarning):
@ -61,9 +70,9 @@ class BadContentDispositionParam(RuntimeWarning):
pass
def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
Dict[str, str]]:
def parse_content_disposition(
header: Optional[str],
) -> Tuple[Optional[str], Dict[str, str]]:
def is_token(string: str) -> bool:
return bool(string) and TOKEN >= set(string)
@ -74,23 +83,22 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
return is_token(string) and string.count("'") == 2
def is_extended_param(string: str) -> bool:
return string.endswith('*')
return string.endswith("*")
def is_continuous_param(string: str) -> bool:
pos = string.find('*') + 1
pos = string.find("*") + 1
if not pos:
return False
substring = string[pos:-1] if string.endswith('*') else string[pos:]
substring = string[pos:-1] if string.endswith("*") else string[pos:]
return substring.isdigit()
def unescape(text: str, *,
chars: str=''.join(map(re.escape, CHAR))) -> str:
return re.sub('\\\\([{}])'.format(chars), '\\1', text)
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
return re.sub(f"\\\\([{chars}])", "\\1", text)
if not header:
return None, {}
disptype, *parts = header.split(';')
disptype, *parts = header.split(";")
if not is_token(disptype):
warnings.warn(BadContentDispositionHeader(header))
return None, {}
@ -99,11 +107,11 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
while parts:
item = parts.pop(0)
if '=' not in item:
if "=" not in item:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
key, value = item.split('=', 1)
key, value = item.split("=", 1)
key = key.lower().strip()
value = value.lstrip()
@ -125,13 +133,13 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
elif is_extended_param(key):
if is_rfc5987(value):
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
encoding = encoding or "utf-8"
else:
warnings.warn(BadContentDispositionParam(item))
continue
try:
value = unquote(value, encoding, 'strict')
value = unquote(value, encoding, "strict")
except UnicodeDecodeError: # pragma: nocover
warnings.warn(BadContentDispositionParam(item))
continue
@ -140,16 +148,16 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
failed = True
if is_quoted(value):
failed = False
value = unescape(value[1:-1].lstrip('\\/'))
value = unescape(value[1:-1].lstrip("\\/"))
elif is_token(value):
failed = False
elif parts:
# maybe just ; in filename, in any case this is just
# one case fix, for proper fix we need to redesign parser
_value = '%s;%s' % (value, parts[0])
_value = f"{value};{parts[0]}"
if is_quoted(_value):
parts.pop(0)
value = unescape(_value[1:-1].lstrip('\\/'))
value = unescape(_value[1:-1].lstrip("\\/"))
failed = False
if failed:
@ -161,9 +169,10 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
return disptype.lower(), params
def content_disposition_filename(params: Mapping[str, str],
name: str='filename') -> Optional[str]:
name_suf = '%s*' % name
def content_disposition_filename(
params: Mapping[str, str], name: str = "filename"
) -> Optional[str]:
name_suf = "%s*" % name
if not params:
return None
elif name_suf in params:
@ -172,12 +181,12 @@ def content_disposition_filename(params: Mapping[str, str],
return params[name]
else:
parts = []
fnparams = sorted((key, value)
for key, value in params.items()
if key.startswith(name_suf))
fnparams = sorted(
(key, value) for key, value in params.items() if key.startswith(name_suf)
)
for num, (key, value) in enumerate(fnparams):
_, tail = key.split('*', 1)
if tail.endswith('*'):
_, tail = key.split("*", 1)
if tail.endswith("*"):
tail = tail[:-1]
if tail == str(num):
parts.append(value)
@ -185,11 +194,11 @@ def content_disposition_filename(params: Mapping[str, str],
break
if not parts:
return None
value = ''.join(parts)
value = "".join(parts)
if "'" in value:
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
return unquote(value, encoding, 'strict')
encoding = encoding or "utf-8"
return unquote(value, encoding, "strict")
return value
@ -202,21 +211,21 @@ class MultipartResponseWrapper:
def __init__(
self,
resp: 'ClientResponse',
stream: 'MultipartReader',
resp: "ClientResponse",
stream: "MultipartReader",
) -> None:
self.resp = resp
self.stream = stream
def __aiter__(self) -> 'MultipartResponseWrapper':
def __aiter__(self) -> "MultipartResponseWrapper":
return self
async def __anext__(
self,
) -> Union['MultipartReader', 'BodyPartReader']:
) -> Union["MultipartReader", "BodyPartReader"]:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
def at_eof(self) -> bool:
@ -225,7 +234,7 @@ class MultipartResponseWrapper:
async def next(
self,
) -> Optional[Union['MultipartReader', 'BodyPartReader']]:
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
"""Emits next multipart reader object."""
item = await self.stream.next()
if self.stream.at_eof():
@ -233,8 +242,10 @@ class MultipartResponseWrapper:
return item
async def release(self) -> None:
"""Releases the connection gracefully, reading all the content
to the void."""
"""Release the connection gracefully.
All remaining content is read to the void.
"""
await self.resp.release()
@ -243,9 +254,9 @@ class BodyPartReader:
chunk_size = 8192
def __init__(self, boundary: bytes,
headers: 'CIMultiDictProxy[str]',
content: StreamReader) -> None:
def __init__(
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
) -> None:
self.headers = headers
self._boundary = boundary
self._content = content
@ -254,18 +265,18 @@ class BodyPartReader:
self._length = int(length) if length is not None else None
self._read_bytes = 0
# TODO: typeing.Deque is not supported by Python 3.5
self._unread = deque() # type: Any
self._unread: Deque[bytes] = deque()
self._prev_chunk = None # type: Optional[bytes]
self._content_eof = 0
self._cache = {} # type: Dict[str, Any]
def __aiter__(self) -> 'BodyPartReader':
return self
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore[return-value]
async def __anext__(self) -> bytes:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
async def next(self) -> Optional[bytes]:
@ -274,7 +285,7 @@ class BodyPartReader:
return None
return item
async def read(self, *, decode: bool=False) -> bytes:
async def read(self, *, decode: bool = False) -> bytes:
"""Reads body part data.
decode: Decodes data following by encoding
@ -282,21 +293,21 @@ class BodyPartReader:
data remains untouched
"""
if self._at_eof:
return b''
return b""
data = bytearray()
while not self._at_eof:
data.extend((await self.read_chunk(self.chunk_size)))
data.extend(await self.read_chunk(self.chunk_size))
if decode:
return self.decode(data)
return data
async def read_chunk(self, size: int=chunk_size) -> bytes:
async def read_chunk(self, size: int = chunk_size) -> bytes:
"""Reads body part content chunk of the specified size.
size: chunk size
"""
if self._at_eof:
return b''
return b""
if self._length:
chunk = await self._read_chunk_from_length(size)
else:
@ -307,15 +318,15 @@ class BodyPartReader:
self._at_eof = True
if self._at_eof:
clrf = await self._content.readline()
assert b'\r\n' == clrf, \
'reader did not read all the data or it is malformed'
assert (
b"\r\n" == clrf
), "reader did not read all the data or it is malformed"
return chunk
async def _read_chunk_from_length(self, size: int) -> bytes:
# Reads body part content chunk of the specified size.
# The body part must has Content-Length header with proper value.
assert self._length is not None, \
'Content-Length required for chunked read'
assert self._length is not None, "Content-Length required for chunked read"
chunk_size = min(size, self._length - self._read_bytes)
chunk = await self._content.read(chunk_size)
return chunk
@ -323,8 +334,9 @@ class BodyPartReader:
async def _read_chunk_from_stream(self, size: int) -> bytes:
# Reads content chunk of body part with unknown length.
# The Content-Length header for body part is not necessary.
assert size >= len(self._boundary) + 2, \
'Chunk size must be greater or equal than boundary length + 2'
assert (
size >= len(self._boundary) + 2
), "Chunk size must be greater or equal than boundary length + 2"
first_chunk = self._prev_chunk is None
if first_chunk:
self._prev_chunk = await self._content.read(size)
@ -334,7 +346,7 @@ class BodyPartReader:
assert self._content_eof < 3, "Reading after EOF"
assert self._prev_chunk is not None
window = self._prev_chunk + chunk
sub = b'\r\n' + self._boundary
sub = b"\r\n" + self._boundary
if first_chunk:
idx = window.find(sub)
else:
@ -342,12 +354,11 @@ class BodyPartReader:
if idx >= 0:
# pushing boundary back to content
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
category=DeprecationWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
self._content.unread_data(window[idx:])
if size > idx:
self._prev_chunk = self._prev_chunk[:idx]
chunk = window[len(self._prev_chunk):idx]
chunk = window[len(self._prev_chunk) : idx]
if not chunk:
self._at_eof = True
result = self._prev_chunk
@ -357,7 +368,7 @@ class BodyPartReader:
async def readline(self) -> bytes:
"""Reads body part by line by line."""
if self._at_eof:
return b''
return b""
if self._unread:
line = self._unread.popleft()
@ -367,14 +378,14 @@ class BodyPartReader:
if line.startswith(self._boundary):
# the very last boundary may not come with \r\n,
# so set single rules for everyone
sline = line.rstrip(b'\r\n')
sline = line.rstrip(b"\r\n")
boundary = self._boundary
last_boundary = self._boundary + b'--'
last_boundary = self._boundary + b"--"
# ensure that we read exactly the boundary, not something alike
if sline == boundary or sline == last_boundary:
self._at_eof = True
self._unread.append(line)
return b''
return b""
else:
next_line = await self._content.readline()
if next_line.startswith(self._boundary):
@ -390,46 +401,45 @@ class BodyPartReader:
while not self._at_eof:
await self.read_chunk(self.chunk_size)
async def text(self, *, encoding: Optional[str]=None) -> str:
async def text(self, *, encoding: Optional[str] = None) -> str:
"""Like read(), but assumes that body part contains text data."""
data = await self.read(decode=True)
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
encoding = encoding or self.get_charset(default='utf-8')
encoding = encoding or self.get_charset(default="utf-8")
return data.decode(encoding)
async def json(self,
*,
encoding: Optional[str]=None) -> Optional[Dict[str, Any]]:
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
"""Like read(), but assumes that body parts contains JSON data."""
data = await self.read(decode=True)
if not data:
return None
encoding = encoding or self.get_charset(default='utf-8')
return json.loads(data.decode(encoding))
encoding = encoding or self.get_charset(default="utf-8")
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
async def form(self, *,
encoding: Optional[str]=None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contains form
urlencoded data.
"""
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contain form urlencoded data."""
data = await self.read(decode=True)
if not data:
return []
if encoding is not None:
real_encoding = encoding
else:
real_encoding = self.get_charset(default='utf-8')
return parse_qsl(data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding)
real_encoding = self.get_charset(default="utf-8")
return parse_qsl(
data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding,
)
def at_eof(self) -> bool:
"""Returns True if the boundary was reached or False otherwise."""
return self._at_eof
def decode(self, data: bytes) -> bytes:
"""Decodes data according the specified Content-Encoding
"""Decodes data.
Decoding is done according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
@ -439,78 +449,76 @@ class BodyPartReader:
return data
def _decode_content(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_ENCODING, '').lower()
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
if encoding == 'deflate':
if encoding == "deflate":
return zlib.decompress(data, -zlib.MAX_WBITS)
elif encoding == 'gzip':
elif encoding == "gzip":
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
elif encoding == 'identity':
elif encoding == "identity":
return data
else:
raise RuntimeError('unknown content encoding: {}'.format(encoding))
raise RuntimeError(f"unknown content encoding: {encoding}")
def _decode_content_transfer(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, '').lower()
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
if encoding == 'base64':
if encoding == "base64":
return base64.b64decode(data)
elif encoding == 'quoted-printable':
elif encoding == "quoted-printable":
return binascii.a2b_qp(data)
elif encoding in ('binary', '8bit', '7bit'):
elif encoding in ("binary", "8bit", "7bit"):
return data
else:
raise RuntimeError('unknown content transfer encoding: {}'
''.format(encoding))
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(encoding)
)
def get_charset(self, default: str) -> str:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, '')
ctype = self.headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get('charset', default)
return mimetype.parameters.get("charset", default)
@reify
def name(self) -> Optional[str]:
"""Returns name specified in Content-Disposition header or None
if missed or header is malformed.
"""
"""Returns name specified in Content-Disposition header.
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'name')
If the header is missing or malformed, returns None.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "name")
@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header or None
if missed or header is malformed.
"""Returns filename specified in Content-Disposition header.
Returns None if the header is missing or malformed.
"""
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'filename')
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "filename")
@payload_type(BodyPartReader, order=Order.try_first)
class BodyPartReaderPayload(Payload):
def __init__(self, value: BodyPartReader,
*args: Any, **kwargs: Any) -> None:
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
params = {} # type: Dict[str, str]
if value.name is not None:
params['name'] = value.name
params["name"] = value.name
if value.filename is not None:
params['filename'] = value.filename
params["filename"] = value.filename
if params:
self.set_content_disposition('attachment', True, **params)
self.set_content_disposition("attachment", True, **params)
async def write(self, writer: Any) -> None:
field = self._value
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
while chunk:
await writer.write(field.decode(chunk))
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
class MultipartReader:
@ -524,49 +532,51 @@ class MultipartReader:
#: Body part reader class for non multipart/* content types.
part_reader_cls = BodyPartReader
def __init__(self, headers: Mapping[str, str],
content: StreamReader) -> None:
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
self.headers = headers
self._boundary = ('--' + self._get_boundary()).encode()
self._boundary = ("--" + self._get_boundary()).encode()
self._content = content
self._last_part = None # type: Optional[Union['MultipartReader', BodyPartReader]] # noqa
self._last_part = (
None
) # type: Optional[Union['MultipartReader', BodyPartReader]]
self._at_eof = False
self._at_bof = True
self._unread = [] # type: List[bytes]
def __aiter__(self) -> 'MultipartReader':
return self
def __aiter__(
self,
) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore[return-value]
async def __anext__(
self,
) -> Union['MultipartReader', BodyPartReader]:
) -> Optional[Union["MultipartReader", BodyPartReader]]:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
@classmethod
def from_response(
cls,
response: 'ClientResponse',
response: "ClientResponse",
) -> MultipartResponseWrapper:
"""Constructs reader instance from HTTP response.
:param response: :class:`~aiohttp.client.ClientResponse` instance
"""
obj = cls.response_wrapper_cls(response, cls(response.headers,
response.content))
obj = cls.response_wrapper_cls(
response, cls(response.headers, response.content)
)
return obj
def at_eof(self) -> bool:
"""Returns True if the final boundary was reached or
False otherwise.
"""
"""Returns True if the final boundary was reached, false otherwise."""
return self._at_eof
async def next(
self,
) -> Optional[Union['MultipartReader', BodyPartReader]]:
) -> Optional[Union["MultipartReader", BodyPartReader]]:
"""Emits the next multipart body part."""
# So, if we're at BOF, we need to skip till the boundary.
if self._at_eof:
@ -592,24 +602,25 @@ class MultipartReader:
async def fetch_next_part(
self,
) -> Union['MultipartReader', BodyPartReader]:
) -> Union["MultipartReader", BodyPartReader]:
"""Returns the next body part reader."""
headers = await self._read_headers()
return self._get_part_reader(headers)
def _get_part_reader(
self,
headers: 'CIMultiDictProxy[str]',
) -> Union['MultipartReader', BodyPartReader]:
"""Dispatches the response by the `Content-Type` header, returning
suitable reader instance.
headers: "CIMultiDictProxy[str]",
) -> Union["MultipartReader", BodyPartReader]:
"""Dispatches the response by the `Content-Type` header.
Returns a suitable reader instance.
:param dict headers: Response headers
"""
ctype = headers.get(CONTENT_TYPE, '')
ctype = headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
if mimetype.type == 'multipart':
if mimetype.type == "multipart":
if self.multipart_reader_cls is None:
return type(self)(headers, self._content)
return self.multipart_reader_cls(headers, self._content)
@ -619,18 +630,16 @@ class MultipartReader:
def _get_boundary(self) -> str:
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
assert mimetype.type == 'multipart', (
'multipart/* content type expected'
)
assert mimetype.type == "multipart", "multipart/* content type expected"
if 'boundary' not in mimetype.parameters:
raise ValueError('boundary missed for Content-Type: %s'
% self.headers[CONTENT_TYPE])
if "boundary" not in mimetype.parameters:
raise ValueError(
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
)
boundary = mimetype.parameters['boundary']
boundary = mimetype.parameters["boundary"]
if len(boundary) > 70:
raise ValueError('boundary %r is too long (70 chars max)'
% boundary)
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
return boundary
@ -642,13 +651,14 @@ class MultipartReader:
async def _read_until_first_boundary(self) -> None:
while True:
chunk = await self._readline()
if chunk == b'':
raise ValueError("Could not find starting boundary %r"
% (self._boundary))
if chunk == b"":
raise ValueError(
"Could not find starting boundary %r" % (self._boundary)
)
chunk = chunk.rstrip()
if chunk == self._boundary:
return
elif chunk == self._boundary + b'--':
elif chunk == self._boundary + b"--":
self._at_eof = True
return
@ -656,7 +666,7 @@ class MultipartReader:
chunk = (await self._readline()).rstrip()
if chunk == self._boundary:
pass
elif chunk == self._boundary + b'--':
elif chunk == self._boundary + b"--":
self._at_eof = True
epilogue = await self._readline()
next_line = await self._readline()
@ -665,7 +675,7 @@ class MultipartReader:
# parent multipart boundary, if the parent boundary is found then
# it should be marked as unread and handed to the parent for
# processing
if next_line[:2] == b'--':
if next_line[:2] == b"--":
self._unread.append(next_line)
# otherwise the request is likely missing an epilogue and both
# lines should be passed to the parent for processing
@ -673,11 +683,10 @@ class MultipartReader:
else:
self._unread.extend([next_line, epilogue])
else:
raise ValueError('Invalid boundary %r, expected %r'
% (chunk, self._boundary))
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
async def _read_headers(self) -> 'CIMultiDictProxy[str]':
lines = [b'']
async def _read_headers(self) -> "CIMultiDictProxy[str]":
lines = [b""]
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
@ -703,8 +712,7 @@ _Part = Tuple[Payload, str, str]
class MultipartWriter(Payload):
"""Multipart body writer."""
def __init__(self, subtype: str='mixed',
boundary: Optional[str]=None) -> None:
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
boundary = boundary if boundary is not None else uuid.uuid4().hex
# The underlying Payload API demands a str (utf-8), not bytes,
# so we need to ensure we don't lose anything during conversion.
@ -712,24 +720,24 @@ class MultipartWriter(Payload):
# In both situations.
try:
self._boundary = boundary.encode('ascii')
self._boundary = boundary.encode("ascii")
except UnicodeEncodeError:
raise ValueError('boundary should contain ASCII only chars') \
from None
ctype = ('multipart/{}; boundary={}'
.format(subtype, self._boundary_value))
raise ValueError("boundary should contain ASCII only chars") from None
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
super().__init__(None, content_type=ctype)
self._parts = [] # type: List[_Part] # noqa
self._parts = [] # type: List[_Part]
def __enter__(self) -> 'MultipartWriter':
def __enter__(self) -> "MultipartWriter":
return self
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
def __iter__(self) -> Iterator[_Part]:
@ -765,26 +773,22 @@ class MultipartWriter(Payload):
# VCHAR = %x21-7E
value = self._boundary
if re.match(self._valid_tchar_regex, value):
return value.decode('ascii') # cannot fail
return value.decode("ascii") # cannot fail
if re.search(self._invalid_qdtext_char_regex, value):
raise ValueError("boundary value contains invalid characters")
# escape %x5C and %x22
quoted_value_content = value.replace(b'\\', b'\\\\')
quoted_value_content = value.replace(b"\\", b"\\\\")
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
return '"' + quoted_value_content.decode('ascii') + '"'
return '"' + quoted_value_content.decode("ascii") + '"'
@property
def boundary(self) -> str:
return self._boundary.decode('ascii')
return self._boundary.decode("ascii")
def append(
self,
obj: Any,
headers: Optional[MultiMapping[str]]=None
) -> Payload:
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
if headers is None:
headers = CIMultiDict()
@ -795,7 +799,7 @@ class MultipartWriter(Payload):
try:
payload = get_payload(obj, headers=headers)
except LookupError:
raise TypeError('Cannot create payload from %r' % obj)
raise TypeError("Cannot create payload from %r" % obj)
else:
return self.append_payload(payload)
@ -804,22 +808,23 @@ class MultipartWriter(Payload):
# compression
encoding = payload.headers.get(
CONTENT_ENCODING,
'',
"",
).lower() # type: Optional[str]
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
raise RuntimeError('unknown content encoding: {}'.format(encoding))
if encoding == 'identity':
if encoding and encoding not in ("deflate", "gzip", "identity"):
raise RuntimeError(f"unknown content encoding: {encoding}")
if encoding == "identity":
encoding = None
# te encoding
te_encoding = payload.headers.get(
CONTENT_TRANSFER_ENCODING,
'',
"",
).lower() # type: Optional[str]
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
raise RuntimeError('unknown content transfer encoding: {}'
''.format(te_encoding))
if te_encoding == 'binary':
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(te_encoding)
)
if te_encoding == "binary":
te_encoding = None
# size
@ -827,13 +832,11 @@ class MultipartWriter(Payload):
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding)) # type: ignore
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
return payload
def append_json(
self,
obj: Any,
headers: Optional[MultiMapping[str]]=None
self, obj: Any, headers: Optional[MultiMapping[str]] = None
) -> Payload:
"""Helper to append JSON part."""
if headers is None:
@ -842,10 +845,9 @@ class MultipartWriter(Payload):
return self.append_payload(JsonPayload(obj, headers=headers))
def append_form(
self,
obj: Union[Sequence[Tuple[str, str]],
Mapping[str, str]],
headers: Optional[MultiMapping[str]]=None
self,
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
headers: Optional[MultiMapping[str]] = None,
) -> Payload:
"""Helper to append form urlencoded part."""
assert isinstance(obj, (Sequence, Mapping))
@ -858,8 +860,10 @@ class MultipartWriter(Payload):
data = urlencode(obj, doseq=True)
return self.append_payload(
StringPayload(data, headers=headers,
content_type='application/x-www-form-urlencoded'))
StringPayload(
data, headers=headers, content_type="application/x-www-form-urlencoded"
)
)
@property
def size(self) -> Optional[int]:
@ -870,19 +874,21 @@ class MultipartWriter(Payload):
return None
total += int(
2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n'
part.size + len(part._binary_headers) +
2 # b'\r\n'
2
+ len(self._boundary)
+ 2
+ part.size # b'--'+self._boundary+b'\r\n'
+ len(part._binary_headers)
+ 2 # b'\r\n'
)
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
return total
async def write(self, writer: Any,
close_boundary: bool=True) -> None:
async def write(self, writer: Any, close_boundary: bool = True) -> None:
"""Write body."""
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(b"--" + self._boundary + b"\r\n")
await writer.write(part._binary_headers)
if encoding or te_encoding:
@ -891,19 +897,18 @@ class MultipartWriter(Payload):
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await part.write(w) # type: ignore[arg-type]
await w.write_eof()
else:
await part.write(writer)
await writer.write(b'\r\n')
await writer.write(b"\r\n")
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n')
await writer.write(b"--" + self._boundary + b"--\r\n")
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
self._writer = writer
self._encoding = None # type: Optional[str]
@ -911,16 +916,17 @@ class MultipartPayloadWriter:
self._encoding_buffer = None # type: Optional[bytearray]
def enable_encoding(self, encoding: str) -> None:
if encoding == 'base64':
if encoding == "base64":
self._encoding = encoding
self._encoding_buffer = bytearray()
elif encoding == 'quoted-printable':
self._encoding = 'quoted-printable'
elif encoding == "quoted-printable":
self._encoding = "quoted-printable"
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self._compress = zlib.compressobj(wbits=zlib_mode)
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
async def write_eof(self) -> None:
if self._compress is not None:
@ -929,10 +935,9 @@ class MultipartPayloadWriter:
self._compress = None
await self.write(chunk)
if self._encoding == 'base64':
if self._encoding == "base64":
if self._encoding_buffer:
await self._writer.write(base64.b64encode(
self._encoding_buffer))
await self._writer.write(base64.b64encode(self._encoding_buffer))
async def write(self, chunk: bytes) -> None:
if self._compress is not None:
@ -941,19 +946,18 @@ class MultipartPayloadWriter:
if not chunk:
return
if self._encoding == 'base64':
if self._encoding == "base64":
buf = self._encoding_buffer
assert buf is not None
buf.extend(chunk)
if buf:
div, mod = divmod(len(buf), 3)
enc_chunk, self._encoding_buffer = (
buf[:div * 3], buf[div * 3:])
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
if enc_chunk:
b64chunk = base64.b64encode(enc_chunk)
await self._writer.write(b64chunk)
elif self._encoding == 'quoted-printable':
elif self._encoding == "quoted-printable":
await self._writer.write(binascii.b2a_qp(chunk))
else:
await self._writer.write(chunk)

View file

@ -15,7 +15,6 @@ from typing import (
Dict,
Iterable,
Optional,
Text,
TextIO,
Tuple,
Type,
@ -33,20 +32,29 @@ from .helpers import (
parse_mimetype,
sentinel,
)
from .streams import DEFAULT_LIMIT, StreamReader
from .typedefs import JSONEncoder, _CIMultiDict
from .streams import StreamReader
from .typedefs import Final, JSONEncoder, _CIMultiDict
__all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload',
'BytesPayload', 'StringPayload',
'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload',
'TextIOPayload', 'StringIOPayload', 'JsonPayload',
'AsyncIterablePayload')
TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB
__all__ = (
"PAYLOAD_REGISTRY",
"get_payload",
"payload_type",
"Payload",
"BytesPayload",
"StringPayload",
"IOBasePayload",
"BytesIOPayload",
"BufferedReaderPayload",
"TextIOPayload",
"StringIOPayload",
"JsonPayload",
"AsyncIterablePayload",
)
TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB
if TYPE_CHECKING: # pragma: no cover
from typing import List # noqa
from typing import List
class LookupError(Exception):
@ -54,33 +62,35 @@ class LookupError(Exception):
class Order(str, enum.Enum):
normal = 'normal'
try_first = 'try_first'
try_last = 'try_last'
normal = "normal"
try_first = "try_first"
try_last = "try_last"
def get_payload(data: Any, *args: Any, **kwargs: Any) -> 'Payload':
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
def register_payload(
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
) -> None:
PAYLOAD_REGISTRY.register(factory, type, order=order)
class payload_type:
def __init__(self, type: Any, *, order: Order=Order.normal) -> None:
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
self.type = type
self.order = order
def __call__(self, factory: Type['Payload']) -> Type['Payload']:
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
register_payload(factory, self.type, order=self.order)
return factory
PayloadType = Type["Payload"]
_PayloadRegistryItem = Tuple[PayloadType, Any]
class PayloadRegistry:
"""Payload registry.
@ -88,15 +98,17 @@ class PayloadRegistry:
"""
def __init__(self) -> None:
self._first = [] # type: List[Tuple[Type[Payload], Any]]
self._normal = [] # type: List[Tuple[Type[Payload], Any]]
self._last = [] # type: List[Tuple[Type[Payload], Any]]
self._first = [] # type: List[_PayloadRegistryItem]
self._normal = [] # type: List[_PayloadRegistryItem]
self._last = [] # type: List[_PayloadRegistryItem]
def get(self,
data: Any,
*args: Any,
_CHAIN: Any=chain,
**kwargs: Any) -> 'Payload':
def get(
self,
data: Any,
*args: Any,
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
**kwargs: Any,
) -> "Payload":
if isinstance(data, Payload):
return data
for factory, type in _CHAIN(self._first, self._normal, self._last):
@ -105,11 +117,9 @@ class PayloadRegistry:
raise LookupError()
def register(self,
factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
def register(
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
) -> None:
if order is Order.try_first:
self._first.append((factory, type))
elif order is Order.normal:
@ -117,27 +127,25 @@ class PayloadRegistry:
elif order is Order.try_last:
self._last.append((factory, type))
else:
raise ValueError("Unsupported order {!r}".format(order))
raise ValueError(f"Unsupported order {order!r}")
class Payload(ABC):
_default_content_type = 'application/octet-stream' # type: str
_default_content_type = "application/octet-stream" # type: str
_size = None # type: Optional[int]
def __init__(self,
value: Any,
headers: Optional[
Union[
_CIMultiDict,
Dict[str, str],
Iterable[Tuple[str, str]]
]
] = None,
content_type: Optional[str]=sentinel,
filename: Optional[str]=None,
encoding: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: Any,
headers: Optional[
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
] = None,
content_type: Optional[str] = sentinel,
filename: Optional[str] = None,
encoding: Optional[str] = None,
**kwargs: Any,
) -> None:
self._encoding = encoding
self._filename = filename
self._headers = CIMultiDict() # type: _CIMultiDict
@ -170,9 +178,12 @@ class Payload(ABC):
@property
def _binary_headers(self) -> bytes:
return ''.join(
[k + ': ' + v + '\r\n' for k, v in self.headers.items()]
).encode('utf-8') + b'\r\n'
return (
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
"utf-8"
)
+ b"\r\n"
)
@property
def encoding(self) -> Optional[str]:
@ -184,13 +195,17 @@ class Payload(ABC):
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
def set_content_disposition(self,
disptype: str,
quote_fields: bool=True,
**params: Any) -> None:
def set_content_disposition(
self,
disptype: str,
quote_fields: bool = True,
_charset: str = "utf-8",
**params: Any,
) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, **params)
disptype, quote_fields=quote_fields, _charset=_charset, **params
)
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
@ -201,55 +216,57 @@ class Payload(ABC):
class BytesPayload(Payload):
def __init__(self,
value: ByteString,
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, (bytes, bytearray, memoryview)):
raise TypeError("value argument must be byte-ish, not {!r}"
.format(type(value)))
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
self._size = len(value)
if isinstance(value, memoryview):
self._size = value.nbytes
else:
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
if PY_36:
kwargs = {'source': self}
kwargs = {"source": self}
else:
kwargs = {}
warnings.warn("Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead", ResourceWarning,
**kwargs)
warnings.warn(
"Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead",
ResourceWarning,
**kwargs,
)
async def write(self, writer: AbstractStreamWriter) -> None:
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(self,
value: Text,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: str,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
real_encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
real_encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
real_encoding = mimetype.parameters.get('charset', 'utf-8')
real_encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
content_type = "text/plain; charset=%s" % encoding
real_encoding = encoding
super().__init__(
@ -262,66 +279,58 @@ class StringPayload(BytesPayload):
class StringIOPayload(StringPayload):
def __init__(self,
value: IO[str],
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
_value: IO[Any]
def __init__(self,
value: IO[Any],
disposition: str='attachment',
*args: Any,
**kwargs: Any) -> None:
if 'filename' not in kwargs:
kwargs['filename'] = guess_filename(value)
def __init__(
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
) -> None:
if "filename" not in kwargs:
kwargs["filename"] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(
disposition, filename=self._filename
)
self.set_content_disposition(disposition, filename=self._filename)
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
while chunk:
await writer.write(chunk)
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
finally:
await loop.run_in_executor(None, self._value.close)
class TextIOPayload(IOBasePayload):
_value: TextIO
def __init__(self,
value: TextIO,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: TextIO,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get('charset', 'utf-8')
encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
content_type = "text/plain; charset=%s" % encoding
super().__init__(
value,
@ -341,20 +350,20 @@ class TextIOPayload(IOBasePayload):
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
while chunk:
await writer.write(chunk.encode(self._encoding))
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
data = (
chunk.encode(encoding=self._encoding)
if self._encoding
else chunk.encode()
)
await writer.write(data)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
finally:
await loop.run_in_executor(None, self._value.close)
class BytesIOPayload(IOBasePayload):
@property
def size(self) -> int:
position = self._value.tell()
@ -364,7 +373,6 @@ class BytesIOPayload(IOBasePayload):
class BufferedReaderPayload(IOBasePayload):
@property
def size(self) -> Optional[int]:
try:
@ -376,22 +384,27 @@ class BufferedReaderPayload(IOBasePayload):
class JsonPayload(BytesPayload):
def __init__(self,
value: Any,
encoding: str='utf-8',
content_type: str='application/json',
dumps: JSONEncoder=json.dumps,
*args: Any,
**kwargs: Any) -> None:
def __init__(
self,
value: Any,
encoding: str = "utf-8",
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(
dumps(value).encode(encoding),
content_type=content_type, encoding=encoding, *args, **kwargs)
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
if TYPE_CHECKING: # pragma: no cover
from typing import AsyncIterator, AsyncIterable
from typing import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator[bytes]
_AsyncIterable = AsyncIterable[bytes]
@ -406,17 +419,16 @@ class AsyncIterablePayload(Payload):
_iter = None # type: Optional[_AsyncIterator]
def __init__(self,
value: _AsyncIterable,
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError("value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value)))
raise TypeError(
"value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value))
)
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
@ -435,7 +447,6 @@ class AsyncIterablePayload(Payload):
class StreamReaderPayload(AsyncIterablePayload):
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value.iter_any(), *args, **kwargs)
@ -446,11 +457,9 @@ PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(
BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
# multidict.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable,
order=Order.try_last)
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)

View file

@ -1,4 +1,5 @@
""" Payload implemenation for coroutines as data provider.
"""
Payload implemenation for coroutines as data provider.
As a simple case, you can upload data from file::
@ -21,36 +22,38 @@ Then you can use `file_sender` like this:
"""
import asyncio
import types
import warnings
from typing import Any, Awaitable, Callable, Dict, Tuple
from .abc import AbstractStreamWriter
from .payload import Payload, payload_type
__all__ = ('streamer',)
__all__ = ("streamer",)
class _stream_wrapper:
def __init__(self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any]) -> None:
self.coro = asyncio.coroutine(coro)
def __init__(
self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any],
) -> None:
self.coro = types.coroutine(coro)
self.args = args
self.kwargs = kwargs
async def __call__(self, writer: AbstractStreamWriter) -> None:
await self.coro(writer, *self.args, **self.kwargs)
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
class streamer:
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
warnings.warn("@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2,
)
self.coro = coro
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
@ -59,14 +62,12 @@ class streamer:
@payload_type(_stream_wrapper)
class StreamWrapperPayload(Payload):
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)
@payload_type(streamer)
class StreamPayload(StreamWrapperPayload):
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
super().__init__(value(), *args, **kwargs)

View file

@ -1 +1 @@
Marker
Marker

View file

@ -2,6 +2,7 @@ import asyncio
import contextlib
import warnings
from collections.abc import Callable
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
import pytest
@ -16,8 +17,8 @@ from .test_utils import (
loop_context,
setup_test_loop,
teardown_test_loop,
unused_port as _unused_port,
)
from .test_utils import unused_port as _unused_port
try:
import uvloop
@ -29,21 +30,33 @@ try:
except ImportError: # pragma: no cover
tokio = None
def pytest_addoption(parser): # type: ignore
parser.addoption(
'--aiohttp-fast', action='store_true', default=False,
help='run tests faster by disabling extra checks')
parser.addoption(
'--aiohttp-loop', action='store', default='pyloop',
help='run tests with specific loop: pyloop, uvloop, tokio or all')
parser.addoption(
'--aiohttp-enable-loop-debug', action='store_true', default=False,
help='enable event loop debug mode')
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
def pytest_fixture_setup(fixturedef): # type: ignore
"""
def pytest_addoption(parser): # type: ignore[no-untyped-def]
parser.addoption(
"--aiohttp-fast",
action="store_true",
default=False,
help="run tests faster by disabling extra checks",
)
parser.addoption(
"--aiohttp-loop",
action="store",
default="pyloop",
help="run tests with specific loop: pyloop, uvloop, tokio or all",
)
parser.addoption(
"--aiohttp-enable-loop-debug",
action="store_true",
default=False,
help="enable event loop debug mode",
)
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
"""Set up pytest fixture.
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
@ -59,35 +72,35 @@ def pytest_fixture_setup(fixturedef): # type: ignore
return
strip_request = False
if 'request' not in fixturedef.argnames:
fixturedef.argnames += ('request',)
if "request" not in fixturedef.argnames:
fixturedef.argnames += ("request",)
strip_request = True
def wrapper(*args, **kwargs): # type: ignore
request = kwargs['request']
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
request = kwargs["request"]
if strip_request:
del kwargs['request']
del kwargs["request"]
# if neither the fixture nor the test use the 'loop' fixture,
# 'getfixturevalue' will fail because the test is not parameterized
# (this can be removed someday if 'loop' is no longer parameterized)
if 'loop' not in request.fixturenames:
if "loop" not in request.fixturenames:
raise Exception(
"Asynchronous fixtures must depend on the 'loop' fixture or "
"be used in tests depending from it."
)
_loop = request.getfixturevalue('loop')
_loop = request.getfixturevalue("loop")
if is_async_gen:
# for async generators, we need to advance the generator once,
# then advance it again in a finalizer
gen = func(*args, **kwargs)
def finalizer(): # type: ignore
def finalizer(): # type: ignore[no-untyped-def]
try:
return _loop.run_until_complete(gen.__anext__())
except StopAsyncIteration: # NOQA
except StopAsyncIteration:
pass
request.addfinalizer(finalizer)
@ -99,42 +112,46 @@ def pytest_fixture_setup(fixturedef): # type: ignore
@pytest.fixture
def fast(request): # type: ignore
def fast(request): # type: ignore[no-untyped-def]
"""--fast config option"""
return request.config.getoption('--aiohttp-fast')
return request.config.getoption("--aiohttp-fast")
@pytest.fixture
def loop_debug(request): # type: ignore
def loop_debug(request): # type: ignore[no-untyped-def]
"""--enable-loop-debug config option"""
return request.config.getoption('--aiohttp-enable-loop-debug')
return request.config.getoption("--aiohttp-enable-loop-debug")
@contextlib.contextmanager
def _runtime_warning_context(): # type: ignore
"""
Context manager which checks for RuntimeWarnings, specifically to
def _runtime_warning_context(): # type: ignore[no-untyped-def]
"""Context manager which checks for RuntimeWarnings.
This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised.
"""
with warnings.catch_warnings(record=True) as _warnings:
yield
rw = ['{w.filename}:{w.lineno}:{w.message}'.format(w=w)
for w in _warnings # type: ignore
if w.category == RuntimeWarning]
rw = [
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
for w in _warnings
if w.category == RuntimeWarning
]
if rw:
raise RuntimeError('{} Runtime Warning{},\n{}'.format(
len(rw),
'' if len(rw) == 1 else 's',
'\n'.join(rw)
))
raise RuntimeError(
"{} Runtime Warning{},\n{}".format(
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
)
)
@contextlib.contextmanager
def _passthrough_loop_context(loop, fast=False): # type: ignore
"""
setups and tears down a loop unless one is passed in via the loop
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
"""Passthrough loop context.
Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
@ -147,66 +164,66 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore
teardown_test_loop(loop, fast=fast)
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore
"""
Fix pytest collecting for coroutines.
"""
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
"""Fix pytest collecting for coroutines."""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj))
def pytest_pyfunc_call(pyfuncitem): # type: ignore
"""
Run coroutines in an event loop instead of a normal function call.
"""
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
"""Run coroutines in an event loop instead of a normal function call."""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get('proactor_loop')\
or pyfuncitem.funcargs.get('loop', None)
existing_loop = pyfuncitem.funcargs.get(
"proactor_loop"
) or pyfuncitem.funcargs.get("loop", None)
with _runtime_warning_context():
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
testargs = {arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames}
testargs = {
arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames
}
_loop.run_until_complete(pyfuncitem.obj(**testargs))
return True
def pytest_generate_tests(metafunc): # type: ignore
if 'loop_factory' not in metafunc.fixturenames:
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
if "loop_factory" not in metafunc.fixturenames:
return
loops = metafunc.config.option.aiohttp_loop
avail_factories = {'pyloop': asyncio.DefaultEventLoopPolicy}
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
if uvloop is not None: # pragma: no cover
avail_factories['uvloop'] = uvloop.EventLoopPolicy
avail_factories["uvloop"] = uvloop.EventLoopPolicy
if tokio is not None: # pragma: no cover
avail_factories['tokio'] = tokio.EventLoopPolicy
avail_factories["tokio"] = tokio.EventLoopPolicy
if loops == 'all':
loops = 'pyloop,uvloop?,tokio?'
if loops == "all":
loops = "pyloop,uvloop?,tokio?"
factories = {} # type: ignore
for name in loops.split(','):
required = not name.endswith('?')
name = name.strip(' ?')
factories = {} # type: ignore[var-annotated]
for name in loops.split(","):
required = not name.endswith("?")
name = name.strip(" ?")
if name not in avail_factories: # pragma: no cover
if required:
raise ValueError(
"Unknown loop '%s', available loops: %s" % (
name, list(factories.keys())))
"Unknown loop '%s', available loops: %s"
% (name, list(factories.keys()))
)
else:
continue
factories[name] = avail_factories[name]
metafunc.parametrize("loop_factory",
list(factories.values()),
ids=list(factories.keys()))
metafunc.parametrize(
"loop_factory", list(factories.values()), ids=list(factories.keys())
)
@pytest.fixture
def loop(loop_factory, fast, loop_debug): # type: ignore
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
"""Return an instance of the event loop."""
policy = loop_factory()
asyncio.set_event_loop_policy(policy)
@ -218,12 +235,12 @@ def loop(loop_factory, fast, loop_debug): # type: ignore
@pytest.fixture
def proactor_loop(): # type: ignore
def proactor_loop(): # type: ignore[no-untyped-def]
if not PY_37:
policy = asyncio.get_event_loop_policy()
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
else:
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
asyncio.set_event_loop_policy(policy)
with loop_context(policy.new_event_loop) as _loop:
@ -232,27 +249,30 @@ def proactor_loop(): # type: ignore
@pytest.fixture
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning)
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_unused_port
@pytest.fixture
def aiohttp_unused_port(): # type: ignore
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
"""Return a port that is unused on the current host."""
return _unused_port
@pytest.fixture
def aiohttp_server(loop): # type: ignore
def aiohttp_server(loop): # type: ignore[no-untyped-def]
"""Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs)
"""
servers = []
async def go(app, *, port=None, **kwargs): # type: ignore
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = TestServer(app, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
@ -260,7 +280,7 @@ def aiohttp_server(loop): # type: ignore
yield go
async def finalize(): # type: ignore
async def finalize() -> None:
while servers:
await servers.pop().close()
@ -268,21 +288,24 @@ def aiohttp_server(loop): # type: ignore
@pytest.fixture
def test_server(aiohttp_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_server fixture instead",
DeprecationWarning)
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_server
@pytest.fixture
def aiohttp_raw_server(loop): # type: ignore
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
"""Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs)
"""
servers = []
async def go(handler, *, port=None, **kwargs): # type: ignore
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = RawTestServer(handler, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
@ -290,7 +313,7 @@ def aiohttp_raw_server(loop): # type: ignore
yield go
async def finalize(): # type: ignore
async def finalize() -> None:
while servers:
await servers.pop().close()
@ -298,14 +321,21 @@ def aiohttp_raw_server(loop): # type: ignore
@pytest.fixture
def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning)
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
aiohttp_raw_server,
):
warnings.warn(
"Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_raw_server
@pytest.fixture
def aiohttp_client(loop): # type: ignore
def aiohttp_client(
loop: asyncio.AbstractEventLoop,
) -> Generator[AiohttpClient, None, None]:
"""Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
@ -314,10 +344,16 @@ def aiohttp_client(loop): # type: ignore
"""
clients = []
async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore
async def go(
__param: Union[Application, BaseTestServer],
*args: Any,
server_kwargs: Optional[Dict[str, Any]] = None,
**kwargs: Any
) -> TestClient:
if (isinstance(__param, Callable) and # type: ignore
not isinstance(__param, (Application, BaseTestServer))):
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
__param, (Application, BaseTestServer)
):
__param = __param(loop, *args, **kwargs)
kwargs = {}
else:
@ -338,7 +374,7 @@ def aiohttp_client(loop): # type: ignore
yield go
async def finalize(): # type: ignore
async def finalize() -> None:
while clients:
await clients.pop().close()
@ -346,7 +382,10 @@ def aiohttp_client(loop): # type: ignore
@pytest.fixture
def test_client(aiohttp_client): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_client fixture instead",
DeprecationWarning)
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_client fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_client

View file

@ -1,14 +1,15 @@
import asyncio
import socket
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Type, Union
from .abc import AbstractResolver
from .helpers import get_running_loop
__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver')
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
try:
import aiodns
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
except ImportError: # pragma: no cover
aiodns = None
@ -17,25 +18,56 @@ aiodns_default = False
class ThreadedResolver(AbstractResolver):
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
concurrent.futures.ThreadPoolExecutor.
"""Threaded resolver.
Uses an Executor for synchronous getaddrinfo() calls.
concurrent.futures.ThreadPoolExecutor is used by default.
"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
async def resolve(
self, hostname: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
host, port, type=socket.SOCK_STREAM, family=family)
hostname,
port,
type=socket.SOCK_STREAM,
family=family,
flags=socket.AI_ADDRCONFIG,
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6:
if len(address) < 3:
# IPv6 is not supported by Python build,
# or IPv6 is not enabled in the host
continue
if address[3]: # type: ignore[misc]
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
host, _port = socket.getnameinfo(
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
)
port = int(_port)
else:
host, port = address[:2]
else: # IPv4
assert family == socket.AF_INET
host, port = address # type: ignore[misc]
hosts.append(
{'hostname': host,
'host': address[0], 'port': address[1],
'family': family, 'proto': proto,
'flags': socket.AI_NUMERICHOST})
{
"hostname": hostname,
"host": host,
"port": port,
"family": family,
"proto": proto,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
return hosts
@ -46,20 +78,25 @@ class ThreadedResolver(AbstractResolver):
class AsyncResolver(AbstractResolver):
"""Use the `aiodns` package to make asynchronous DNS lookups"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None,
*args: Any, **kwargs: Any) -> None:
def __init__(
self,
loop: Optional[asyncio.AbstractEventLoop] = None,
*args: Any,
**kwargs: Any
) -> None:
if aiodns is None:
raise RuntimeError("Resolver requires aiodns library")
self._loop = get_running_loop(loop)
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
if not hasattr(self._resolver, 'gethostbyname'):
if not hasattr(self._resolver, "gethostbyname"):
# aiodns 1.1 is not available, fallback to DNSResolver.query
self.resolve = self._resolve_with_query # type: ignore
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
async def resolve(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
try:
resp = await self._resolver.gethostbyname(host, family)
except aiodns.error.DNSError as exc:
@ -68,10 +105,15 @@ class AsyncResolver(AbstractResolver):
hosts = []
for address in resp.addresses:
hosts.append(
{'hostname': host,
'host': address, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
{
"hostname": host,
"host": address,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
if not hosts:
raise OSError("DNS lookup failed")
@ -79,12 +121,12 @@ class AsyncResolver(AbstractResolver):
return hosts
async def _resolve_with_query(
self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
if family == socket.AF_INET6:
qtype = 'AAAA'
qtype = "AAAA"
else:
qtype = 'A'
qtype = "A"
try:
resp = await self._resolver.query(host, qtype)
@ -95,10 +137,15 @@ class AsyncResolver(AbstractResolver):
hosts = []
for rr in resp:
hosts.append(
{'hostname': host,
'host': rr.host, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
{
"hostname": host,
"host": rr.host,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
)
if not hosts:
raise OSError("DNS lookup failed")
@ -106,7 +153,8 @@ class AsyncResolver(AbstractResolver):
return hosts
async def close(self) -> None:
return self._resolver.cancel()
self._resolver.cancel()
DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver

View file

@ -1,34 +0,0 @@
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
class Signal(FrozenList):
"""Coroutine-based signal implementation.
To connect a callback to a signal, use any list method.
Signals are fired using the send() coroutine, which takes named
arguments.
"""
__slots__ = ('_owner',)
def __init__(self, owner):
super().__init__()
self._owner = owner
def __repr__(self):
return '<Signal owner={}, frozen={}, {!r}>'.format(self._owner,
self.frozen,
list(self))
async def send(self, *args, **kwargs):
"""
Sends data to all registered receivers.
"""
if not self.frozen:
raise RuntimeError("Cannot send non-frozen signal.")
for receiver in self:
await receiver(*args, **kwargs) # type: ignore

View file

@ -1,17 +0,0 @@
from typing import Any, Generic, TypeVar
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
_T = TypeVar('_T')
class Signal(FrozenList[_T], Generic[_T]):
def __init__(self, owner: Any) -> None: ...
def __repr__(self) -> str: ...
async def send(self, *args: Any, **kwargs: Any) -> None: ...

View file

@ -1,25 +1,22 @@
import asyncio
import collections
import warnings
from typing import List # noqa
from typing import Awaitable, Callable, Generic, Optional, Tuple, TypeVar
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
from .base_protocol import BaseProtocol
from .helpers import BaseTimerContext, set_exception, set_result
from .log import internal_logger
try: # pragma: no cover
from typing import Deque # noqa
except ImportError:
from typing_extensions import Deque # noqa
from .typedefs import Final
__all__ = (
'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue',
'FlowControlDataQueue')
"EMPTY_PAYLOAD",
"EofStream",
"StreamReader",
"DataQueue",
"FlowControlDataQueue",
)
DEFAULT_LIMIT = 2 ** 16
_T = TypeVar('_T')
_T = TypeVar("_T")
class EofStream(Exception):
@ -27,66 +24,65 @@ class EofStream(Exception):
class AsyncStreamIterator(Generic[_T]):
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
self.read_func = read_func
def __aiter__(self) -> 'AsyncStreamIterator[_T]':
def __aiter__(self) -> "AsyncStreamIterator[_T]":
return self
async def __anext__(self) -> _T:
try:
rv = await self.read_func()
except EofStream:
raise StopAsyncIteration # NOQA
if rv == b'':
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
if rv == b"":
raise StopAsyncIteration
return rv
class ChunkTupleAsyncStreamIterator:
def __init__(self, stream: 'StreamReader') -> None:
def __init__(self, stream: "StreamReader") -> None:
self._stream = stream
def __aiter__(self) -> 'ChunkTupleAsyncStreamIterator':
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
return self
async def __anext__(self) -> Tuple[bytes, bool]:
rv = await self._stream.readchunk()
if rv == (b'', False):
raise StopAsyncIteration # NOQA
if rv == (b"", False):
raise StopAsyncIteration
return rv
class AsyncStreamReaderMixin:
def __aiter__(self) -> AsyncStreamIterator[bytes]:
return AsyncStreamIterator(self.readline) # type: ignore
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
"""Returns an asynchronous iterator that yields chunks of size n.
Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore
return AsyncStreamIterator(
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
)
def iter_any(self) -> AsyncStreamIterator[bytes]:
"""Returns an asynchronous iterator that yields all the available
data as soon as it is received
"""Yield all available data as soon as it is received.
Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(self.readany) # type: ignore
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
"""Returns an asynchronous iterator that yields chunks of data
as they are received by the server. The yielded objects are tuples
"""Yield chunks of data as they are received by the server.
The yielded objects are tuples
of (bytes, bool) as returned by the StreamReader.readchunk method.
Python-3.5 available for Python 3.5+ only
"""
return ChunkTupleAsyncStreamIterator(self) # type: ignore
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
class StreamReader(AsyncStreamReaderMixin):
@ -105,10 +101,14 @@ class StreamReader(AsyncStreamReaderMixin):
total_bytes = 0
def __init__(self, protocol: BaseProtocol,
*, limit: int=DEFAULT_LIMIT,
timer: Optional[BaseTimerContext]=None,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(
self,
protocol: BaseProtocol,
limit: int,
*,
timer: Optional[BaseTimerContext] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
self._protocol = protocol
self._low_water = limit
self._high_water = limit * 2
@ -130,16 +130,19 @@ class StreamReader(AsyncStreamReaderMixin):
def __repr__(self) -> str:
info = [self.__class__.__name__]
if self._size:
info.append('%d bytes' % self._size)
info.append("%d bytes" % self._size)
if self._eof:
info.append('eof')
if self._low_water != DEFAULT_LIMIT:
info.append('low=%d high=%d' % (self._low_water, self._high_water))
info.append("eof")
if self._low_water != 2 ** 16: # default limit
info.append("low=%d high=%d" % (self._low_water, self._high_water))
if self._waiter:
info.append('w=%r' % self._waiter)
info.append("w=%r" % self._waiter)
if self._exception:
info.append('e=%r' % self._exception)
return '<%s>' % ' '.join(info)
info.append("e=%r" % self._exception)
return "<%s>" % " ".join(info)
def get_read_buffer_limits(self) -> Tuple[int, int]:
return (self._low_water, self._high_water)
def exception(self) -> Optional[BaseException]:
return self._exception
@ -163,7 +166,7 @@ class StreamReader(AsyncStreamReaderMixin):
try:
callback()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
else:
self._eof_callbacks.append(callback)
@ -184,7 +187,7 @@ class StreamReader(AsyncStreamReaderMixin):
try:
cb()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
self._eof_callbacks.clear()
@ -208,17 +211,18 @@ class StreamReader(AsyncStreamReaderMixin):
self._eof_waiter = None
def unread_data(self, data: bytes) -> None:
""" rollback reading some data from stream, inserting it to buffer head.
"""
warnings.warn("unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2)
"""rollback reading some data from stream, inserting it to buffer head."""
warnings.warn(
"unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2,
)
if not data:
return
if self._buffer_offset:
self._buffer[0] = self._buffer[0][self._buffer_offset:]
self._buffer[0] = self._buffer[0][self._buffer_offset :]
self._buffer_offset = 0
self._size += len(data)
self._cursor -= len(data)
@ -226,8 +230,8 @@ class StreamReader(AsyncStreamReaderMixin):
self._eof_counter = 0
# TODO: size is ignored, remove the param later
def feed_data(self, data: bytes, size: int=0) -> None:
assert not self._eof, 'feed_data after feed_eof'
def feed_data(self, data: bytes, size: int = 0) -> None:
assert not self._eof, "feed_data after feed_eof"
if not data:
return
@ -241,21 +245,23 @@ class StreamReader(AsyncStreamReaderMixin):
self._waiter = None
set_result(waiter, None)
if (self._size > self._high_water and
not self._protocol._reading_paused):
if self._size > self._high_water and not self._protocol._reading_paused:
self._protocol.pause_reading()
def begin_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
if self.total_bytes:
raise RuntimeError("Called begin_http_chunk_receiving when"
"some data was already fed")
raise RuntimeError(
"Called begin_http_chunk_receiving when" "some data was already fed"
)
self._http_chunk_splits = []
def end_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
raise RuntimeError("Called end_chunk_receiving without calling "
"begin_chunk_receiving first")
raise RuntimeError(
"Called end_chunk_receiving without calling "
"begin_chunk_receiving first"
)
# self._http_chunk_splits contains logical byte offsets from start of
# the body transfer. Each offset is the offset of the end of a chunk.
@ -286,8 +292,10 @@ class StreamReader(AsyncStreamReaderMixin):
# would have an unexpected behaviour. It would not possible to know
# which coroutine would get the next data.
if self._waiter is not None:
raise RuntimeError('%s() called while another coroutine is '
'already waiting for incoming data' % func_name)
raise RuntimeError(
"%s() called while another coroutine is "
"already waiting for incoming data" % func_name
)
waiter = self._waiter = self._loop.create_future()
try:
@ -300,36 +308,43 @@ class StreamReader(AsyncStreamReaderMixin):
self._waiter = None
async def readline(self) -> bytes:
return await self.readuntil()
async def readuntil(self, separator: bytes = b"\n") -> bytes:
seplen = len(separator)
if seplen == 0:
raise ValueError("Separator should be at least one-byte string")
if self._exception is not None:
raise self._exception
line = []
line_size = 0
chunk = b""
chunk_size = 0
not_enough = True
while not_enough:
while self._buffer and not_enough:
offset = self._buffer_offset
ichar = self._buffer[0].find(b'\n', offset) + 1
# Read from current offset to found b'\n' or to the end.
ichar = self._buffer[0].find(separator, offset) + 1
# Read from current offset to found separator or to the end.
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
line.append(data)
line_size += len(data)
chunk += data
chunk_size += len(data)
if ichar:
not_enough = False
if line_size > self._high_water:
raise ValueError('Line is too long')
if chunk_size > self._high_water:
raise ValueError("Chunk too big")
if self._eof:
break
if not_enough:
await self._wait('readline')
await self._wait("readuntil")
return b''.join(line)
return chunk
async def read(self, n: int=-1) -> bytes:
async def read(self, n: int = -1) -> bytes:
if self._exception is not None:
raise self._exception
@ -339,14 +354,16 @@ class StreamReader(AsyncStreamReaderMixin):
# lets keep this code one major release.
if __debug__:
if self._eof and not self._buffer:
self._eof_counter = getattr(self, '_eof_counter', 0) + 1
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
if self._eof_counter > 5:
internal_logger.warning(
'Multiple access to StreamReader in eof state, '
'might be infinite loop.', stack_info=True)
"Multiple access to StreamReader in eof state, "
"might be infinite loop.",
stack_info=True,
)
if not n:
return b''
return b""
if n < 0:
# This used to just loop creating a new waiter hoping to
@ -359,13 +376,13 @@ class StreamReader(AsyncStreamReaderMixin):
if not block:
break
blocks.append(block)
return b''.join(blocks)
return b"".join(blocks)
# TODO: should be `if` instead of `while`
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait('read')
await self._wait("read")
return self._read_nowait(n)
@ -377,12 +394,14 @@ class StreamReader(AsyncStreamReaderMixin):
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait('readany')
await self._wait("readany")
return self._read_nowait(-1)
async def readchunk(self) -> Tuple[bytes, bool]:
"""Returns a tuple of (data, end_of_http_chunk). When chunked transfer
"""Returns a tuple of (data, end_of_http_chunk).
When chunked transfer
encoding is used, end_of_http_chunk is a boolean indicating if the end
of the data corresponds to the end of a HTTP chunk , otherwise it is
always False.
@ -396,9 +415,11 @@ class StreamReader(AsyncStreamReaderMixin):
if pos == self._cursor:
return (b"", True)
if pos > self._cursor:
return (self._read_nowait(pos-self._cursor), True)
internal_logger.warning('Skipping HTTP chunk end due to data '
'consumption beyond chunk boundary')
return (self._read_nowait(pos - self._cursor), True)
internal_logger.warning(
"Skipping HTTP chunk end due to data "
"consumption beyond chunk boundary"
)
if self._buffer:
return (self._read_nowait_chunk(-1), False)
@ -407,9 +428,9 @@ class StreamReader(AsyncStreamReaderMixin):
if self._eof:
# Special case for signifying EOF.
# (b'', True) is not a final return value actually.
return (b'', False)
return (b"", False)
await self._wait('readchunk')
await self._wait("readchunk")
async def readexactly(self, n: int) -> bytes:
if self._exception is not None:
@ -419,15 +440,14 @@ class StreamReader(AsyncStreamReaderMixin):
while n > 0:
block = await self.read(n)
if not block:
partial = b''.join(blocks)
raise asyncio.IncompleteReadError(
partial, len(partial) + n)
partial = b"".join(blocks)
raise asyncio.IncompleteReadError(partial, len(partial) + n)
blocks.append(block)
n -= len(block)
return b''.join(blocks)
return b"".join(blocks)
def read_nowait(self, n: int=-1) -> bytes:
def read_nowait(self, n: int = -1) -> bytes:
# default was changed to be consistent with .read(-1)
#
# I believe the most users don't know about the method and
@ -437,7 +457,8 @@ class StreamReader(AsyncStreamReaderMixin):
if self._waiter and not self._waiter.done():
raise RuntimeError(
'Called while some coroutine is waiting for incoming data.')
"Called while some coroutine is waiting for incoming data."
)
return self._read_nowait(n)
@ -445,7 +466,7 @@ class StreamReader(AsyncStreamReaderMixin):
first_buffer = self._buffer[0]
offset = self._buffer_offset
if n != -1 and len(first_buffer) - offset > n:
data = first_buffer[offset:offset + n]
data = first_buffer[offset : offset + n]
self._buffer_offset += n
elif offset:
@ -469,7 +490,7 @@ class StreamReader(AsyncStreamReaderMixin):
return data
def _read_nowait(self, n: int) -> bytes:
""" Read not more than n bytes, or whole buffer is n == -1 """
"""Read not more than n bytes, or whole buffer if n == -1"""
chunks = []
while self._buffer:
@ -480,10 +501,12 @@ class StreamReader(AsyncStreamReaderMixin):
if n == 0:
break
return b''.join(chunks) if chunks else b''
return b"".join(chunks) if chunks else b""
class EmptyStreamReader(AsyncStreamReaderMixin):
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
def __init__(self) -> None:
pass
def exception(self) -> Optional[BaseException]:
return None
@ -495,7 +518,7 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
try:
callback()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
def feed_eof(self) -> None:
pass
@ -509,29 +532,31 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
async def wait_eof(self) -> None:
return
def feed_data(self, data: bytes, n: int=0) -> None:
def feed_data(self, data: bytes, n: int = 0) -> None:
pass
async def readline(self) -> bytes:
return b''
return b""
async def read(self, n: int=-1) -> bytes:
return b''
async def read(self, n: int = -1) -> bytes:
return b""
# TODO add async def readuntil
async def readany(self) -> bytes:
return b''
return b""
async def readchunk(self) -> Tuple[bytes, bool]:
return (b'', True)
return (b"", True)
async def readexactly(self, n: int) -> bytes:
raise asyncio.IncompleteReadError(b'', n)
raise asyncio.IncompleteReadError(b"", n)
def read_nowait(self) -> bytes:
return b''
def read_nowait(self, n: int = -1) -> bytes:
return b""
EMPTY_PAYLOAD = EmptyStreamReader()
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
class DataQueue(Generic[_T]):
@ -566,7 +591,7 @@ class DataQueue(Generic[_T]):
self._waiter = None
set_exception(waiter, exc)
def feed_data(self, data: _T, size: int=0) -> None:
def feed_data(self, data: _T, size: int = 0) -> None:
self._size += size
self._buffer.append((data, size))
@ -610,17 +635,18 @@ class DataQueue(Generic[_T]):
class FlowControlDataQueue(DataQueue[_T]):
"""FlowControlDataQueue resumes and pauses an underlying stream.
It is a destination for parsed data."""
It is a destination for parsed data.
"""
def __init__(self, protocol: BaseProtocol, *,
limit: int=DEFAULT_LIMIT,
loop: asyncio.AbstractEventLoop) -> None:
def __init__(
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
) -> None:
super().__init__(loop=loop)
self._protocol = protocol
self._limit = limit * 2
def feed_data(self, data: _T, size: int=0) -> None:
def feed_data(self, data: _T, size: int = 0) -> None:
super().feed_data(data, size)
if self._size > self._limit and not self._protocol._reading_paused:

View file

@ -5,30 +5,25 @@ import socket
from contextlib import suppress
from typing import Optional # noqa
__all__ = ('tcp_keepalive', 'tcp_nodelay', 'tcp_cork')
__all__ = ("tcp_keepalive", "tcp_nodelay")
if hasattr(socket, 'TCP_CORK'): # pragma: no cover
CORK = socket.TCP_CORK # type: Optional[int]
elif hasattr(socket, 'TCP_NOPUSH'): # pragma: no cover
CORK = socket.TCP_NOPUSH # type: ignore
else: # pragma: no cover
CORK = None
if hasattr(socket, "SO_KEEPALIVE"):
if hasattr(socket, 'SO_KEEPALIVE'):
def tcp_keepalive(transport: asyncio.Transport) -> None:
sock = transport.get_extra_info('socket')
sock = transport.get_extra_info("socket")
if sock is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
else:
def tcp_keepalive(
transport: asyncio.Transport) -> None: # pragma: no cover
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
pass
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
sock = transport.get_extra_info('socket')
sock = transport.get_extra_info("socket")
if sock is None:
return
@ -40,24 +35,4 @@ def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
# socket may be closed already, on windows OSError get raised
with suppress(OSError):
sock.setsockopt(
socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
def tcp_cork(transport: asyncio.Transport, value: bool) -> None:
sock = transport.get_extra_info('socket')
if CORK is None:
return
if sock is None:
return
if sock.family not in (socket.AF_INET, socket.AF_INET6):
return
value = bool(value)
with suppress(OSError):
sock.setsockopt(
socket.IPPROTO_TCP, CORK, value)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)

View file

@ -2,15 +2,16 @@
import asyncio
import contextlib
import functools
import gc
import inspect
import ipaddress
import os
import socket
import sys
import unittest
import warnings
from abc import ABC, abstractmethod
from types import TracebackType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
Callable,
@ -19,26 +20,23 @@ from typing import ( # noqa
Optional,
Type,
Union,
cast,
)
from unittest import mock
from aiosignal import Signal
from multidict import CIMultiDict, CIMultiDictProxy
from yarl import URL
import aiohttp
from aiohttp.client import (
ClientResponse,
_RequestContextManager,
_WSRequestContextManager,
)
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
from . import ClientSession, hdrs
from .abc import AbstractCookieJar
from .client_reqrep import ClientResponse # noqa
from .client_ws import ClientWebSocketResponse # noqa
from .helpers import sentinel
from .client_reqrep import ClientResponse
from .client_ws import ClientWebSocketResponse
from .helpers import PY_38, sentinel
from .http import HttpVersion, RawRequestMessage
from .signals import Signal
from .web import (
Application,
AppRunner,
@ -56,13 +54,29 @@ if TYPE_CHECKING: # pragma: no cover
else:
SSLContext = None
if PY_38:
from unittest import IsolatedAsyncioTestCase as TestCase
else:
from asynctest import TestCase # type: ignore[no-redef]
def get_unused_port_socket(host: str) -> socket.socket:
return get_port_socket(host, 0)
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
def get_port_socket(host: str, port: int) -> socket.socket:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def get_unused_port_socket(
host: str, family: socket.AddressFamily = socket.AF_INET
) -> socket.socket:
return get_port_socket(host, 0, family)
def get_port_socket(
host: str, port: int, family: socket.AddressFamily
) -> socket.socket:
s = socket.socket(family, socket.SOCK_STREAM)
if REUSE_ADDRESS:
# Windows has different semantics for SO_REUSEADDR,
# so don't set it. Ref:
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host, port))
return s
@ -70,21 +84,26 @@ def get_port_socket(host: str, port: int) -> socket.socket:
def unused_port() -> int:
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('127.0.0.1', 0))
return s.getsockname()[1]
s.bind(("127.0.0.1", 0))
return cast(int, s.getsockname()[1])
class BaseTestServer(ABC):
__test__ = False
def __init__(self,
*,
scheme: Union[str, object]=sentinel,
loop: Optional[asyncio.AbstractEventLoop]=None,
host: str='127.0.0.1',
port: Optional[int]=None,
skip_url_asserts: bool=False,
**kwargs: Any) -> None:
def __init__(
self,
*,
scheme: Union[str, object] = sentinel,
loop: Optional[asyncio.AbstractEventLoop] = None,
host: str = "127.0.0.1",
port: Optional[int] = None,
skip_url_asserts: bool = False,
socket_factory: Callable[
[str, int, socket.AddressFamily], socket.socket
] = get_port_socket,
**kwargs: Any,
) -> None:
self._loop = loop
self.runner = None # type: Optional[BaseRunner]
self._root = None # type: Optional[URL]
@ -93,19 +112,25 @@ class BaseTestServer(ABC):
self._closed = False
self.scheme = scheme
self.skip_url_asserts = skip_url_asserts
self.socket_factory = socket_factory
async def start_server(self,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
async def start_server(
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
) -> None:
if self.runner:
return
self._loop = loop
self._ssl = kwargs.pop('ssl', None)
self._ssl = kwargs.pop("ssl", None)
self.runner = await self._make_runner(**kwargs)
await self.runner.setup()
if not self.port:
self.port = 0
_sock = get_port_socket(self.host, self.port)
try:
version = ipaddress.ip_address(self.host).version
except ValueError:
version = 4
family = socket.AF_INET6 if version == 6 else socket.AF_INET
_sock = self.socket_factory(self.host, self.port, family)
self.host, self.port = _sock.getsockname()[:2]
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
await site.start()
@ -116,13 +141,11 @@ class BaseTestServer(ABC):
self.port = sockets[0].getsockname()[1]
if self.scheme is sentinel:
if self._ssl:
scheme = 'https'
scheme = "https"
else:
scheme = 'http'
scheme = "http"
self.scheme = scheme
self._root = URL('{}://{}:{}'.format(self.scheme,
self.host,
self.port))
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
@abstractmethod # pragma: no cover
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
@ -176,31 +199,38 @@ class BaseTestServer(ABC):
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> 'BaseTestServer':
async def __aenter__(self) -> "BaseTestServer":
await self.start_server(loop=self._loop)
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType]) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
await self.close()
class TestServer(BaseTestServer):
def __init__(self, app: Application, *,
scheme: Union[str, object]=sentinel,
host: str='127.0.0.1',
port: Optional[int]=None,
**kwargs: Any):
def __init__(
self,
app: Application,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
):
self.app = app
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
@ -209,20 +239,20 @@ class TestServer(BaseTestServer):
class RawTestServer(BaseTestServer):
def __init__(self, handler: _RequestHandler, *,
scheme: Union[str, object]=sentinel,
host: str='127.0.0.1',
port: Optional[int]=None,
**kwargs: Any) -> None:
def __init__(
self,
handler: _RequestHandler,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
) -> None:
self._handler = handler
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self,
debug: bool=True,
**kwargs: Any) -> ServerRunner:
srv = Server(
self._handler, loop=self._loop, debug=debug, **kwargs)
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
return ServerRunner(srv, debug=debug, **kwargs)
@ -233,22 +263,26 @@ class TestClient:
To write functional tests for aiohttp based servers.
"""
__test__ = False
def __init__(self, server: BaseTestServer, *,
cookie_jar: Optional[AbstractCookieJar]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
def __init__(
self,
server: BaseTestServer,
*,
cookie_jar: Optional[AbstractCookieJar] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any,
) -> None:
if not isinstance(server, BaseTestServer):
raise TypeError("server must be TestServer "
"instance, found type: %r" % type(server))
raise TypeError(
"server must be TestServer " "instance, found type: %r" % type(server)
)
self._server = server
self._loop = loop
if cookie_jar is None:
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
self._session = ClientSession(loop=loop,
cookie_jar=cookie_jar,
**kwargs)
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
self._closed = False
self._responses = [] # type: List[ClientResponse]
self._websockets = [] # type: List[ClientWebSocketResponse]
@ -269,8 +303,8 @@ class TestClient:
return self._server
@property
def app(self) -> Application:
return getattr(self._server, "app", None)
def app(self) -> Optional[Application]:
return cast(Optional[Application], getattr(self._server, "app", None))
@property
def session(self) -> ClientSession:
@ -286,17 +320,13 @@ class TestClient:
def make_url(self, path: str) -> URL:
return self._server.make_url(path)
async def _request(self, method: str, path: str,
**kwargs: Any) -> ClientResponse:
resp = await self._session.request(
method, self.make_url(path), **kwargs
)
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
resp = await self._session.request(method, self.make_url(path), **kwargs)
# save it to close later
self._responses.append(resp)
return resp
def request(self, method: str, path: str,
**kwargs: Any) -> _RequestContextManager:
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
"""Routes a request to tested http server.
The interface is identical to aiohttp.ClientSession.request,
@ -304,51 +334,35 @@ class TestClient:
test server.
"""
return _RequestContextManager(
self._request(method, path, **kwargs)
)
return _RequestContextManager(self._request(method, path, **kwargs))
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP GET request."""
return _RequestContextManager(
self._request(hdrs.METH_GET, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP POST request."""
return _RequestContextManager(
self._request(hdrs.METH_POST, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP OPTIONS request."""
return _RequestContextManager(
self._request(hdrs.METH_OPTIONS, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP HEAD request."""
return _RequestContextManager(
self._request(hdrs.METH_HEAD, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_PATCH, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_DELETE, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
"""Initiate websocket connection.
@ -356,14 +370,10 @@ class TestClient:
The api corresponds to aiohttp.ClientSession.ws_connect.
"""
return _WSRequestContextManager(
self._ws_connect(path, **kwargs)
)
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
async def _ws_connect(self, path: str,
**kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(
self.make_url(path), **kwargs)
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
self._websockets.append(ws)
return ws
@ -391,27 +401,30 @@ class TestClient:
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> 'TestClient':
async def __aenter__(self) -> "TestClient":
await self.start_server()
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType]) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
await self.close()
class AioHTTPTestCase(unittest.TestCase):
"""A base class to allow for unittest web applications using
aiohttp.
class AioHTTPTestCase(TestCase):
"""A base class to allow for unittest web applications using aiohttp.
Provides the following:
@ -426,44 +439,41 @@ class AioHTTPTestCase(unittest.TestCase):
"""
async def get_application(self) -> Application:
"""
"""Get application.
This method should be overridden
to return the aiohttp.web.Application
object to test.
"""
return self.get_app()
def get_app(self) -> Application:
"""Obsolete method used to constructing web application.
Use .get_application() coroutine instead
Use .get_application() coroutine instead.
"""
raise RuntimeError("Did you forget to define get_application()?")
def setUp(self) -> None:
self.loop = setup_test_loop()
self.app = self.loop.run_until_complete(self.get_application())
self.server = self.loop.run_until_complete(self.get_server(self.app))
self.client = self.loop.run_until_complete(
self.get_client(self.server))
self.loop.run_until_complete(self.client.start_server())
try:
self.loop = asyncio.get_running_loop()
except (AttributeError, RuntimeError): # AttributeError->py36
self.loop = asyncio.get_event_loop_policy().get_event_loop()
self.loop.run_until_complete(self.setUpAsync())
async def setUpAsync(self) -> None:
pass
self.app = await self.get_application()
self.server = await self.get_server(self.app)
self.client = await self.get_client(self.server)
await self.client.start_server()
def tearDown(self) -> None:
self.loop.run_until_complete(self.tearDownAsync())
self.loop.run_until_complete(self.client.close())
teardown_test_loop(self.loop)
async def tearDownAsync(self) -> None:
pass
await self.client.close()
async def get_server(self, app: Application) -> TestServer:
"""Return a TestServer instance."""
@ -475,27 +485,26 @@ class AioHTTPTestCase(unittest.TestCase):
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
"""A decorator dedicated to use with asynchronous methods of an
AioHTTPTestCase.
Handles executing an asynchronous function, using
the self.loop of the AioHTTPTestCase.
"""
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
@functools.wraps(func, *args, **kwargs)
def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any:
return self.loop.run_until_complete(
func(self, *inner_args, **inner_kwargs))
return new_func
In 3.8+, this does nothing.
"""
warnings.warn(
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
DeprecationWarning,
stacklevel=2,
)
return func
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
@contextlib.contextmanager
def loop_context(loop_factory: _LOOP_FACTORY=asyncio.new_event_loop,
fast: bool=False) -> Iterator[asyncio.AbstractEventLoop]:
def loop_context(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
) -> Iterator[asyncio.AbstractEventLoop]:
"""A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop.
@ -506,10 +515,9 @@ def loop_context(loop_factory: _LOOP_FACTORY=asyncio.new_event_loop,
def setup_test_loop(
loop_factory: _LOOP_FACTORY=asyncio.new_event_loop
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
) -> asyncio.AbstractEventLoop:
"""Create and return an asyncio.BaseEventLoop
instance.
"""Create and return an asyncio.BaseEventLoop instance.
The caller should also call teardown_test_loop,
once they are done with the loop.
@ -517,26 +525,31 @@ def setup_test_loop(
loop = loop_factory()
try:
module = loop.__class__.__module__
skip_watcher = 'uvloop' in module
skip_watcher = "uvloop" in module
except AttributeError: # pragma: no cover
# Just in case
skip_watcher = True
asyncio.set_event_loop(loop)
if sys.platform != "win32" and not skip_watcher:
policy = asyncio.get_event_loop_policy()
watcher = asyncio.SafeChildWatcher() # type: ignore
watcher: asyncio.AbstractChildWatcher
try: # Python >= 3.8
# Refs:
# * https://github.com/pytest-dev/pytest-xdist/issues/620
# * https://stackoverflow.com/a/58614689/595220
# * https://bugs.python.org/issue35621
# * https://github.com/python/cpython/pull/14344
watcher = asyncio.ThreadedChildWatcher()
except AttributeError: # Python < 3.8
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(loop)
with contextlib.suppress(NotImplementedError):
policy.set_child_watcher(watcher)
return loop
def teardown_test_loop(loop: asyncio.AbstractEventLoop,
fast: bool=False) -> None:
"""Teardown and cleanup an event_loop created
by setup_test_loop.
"""
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
"""Teardown and cleanup an event_loop created by setup_test_loop."""
closed = loop.is_closed()
if not closed:
loop.call_soon(loop.stop)
@ -567,11 +580,11 @@ def _create_app_mock() -> mock.MagicMock:
return app
def _create_transport(sslcontext: Optional[SSLContext]=None) -> mock.Mock:
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
transport = mock.Mock()
def get_extra_info(key: str) -> Optional[SSLContext]:
if key == 'sslcontext':
if key == "sslcontext":
return sslcontext
else:
return None
@ -580,26 +593,28 @@ def _create_transport(sslcontext: Optional[SSLContext]=None) -> mock.Mock:
return transport
def make_mocked_request(method: str, path: str,
headers: Any=None, *,
match_info: Any=sentinel,
version: HttpVersion=HttpVersion(1, 1),
closing: bool=False,
app: Any=None,
writer: Any=sentinel,
protocol: Any=sentinel,
transport: Any=sentinel,
payload: Any=sentinel,
sslcontext: Optional[SSLContext]=None,
client_max_size: int=1024**2,
loop: Any=...) -> Any:
def make_mocked_request(
method: str,
path: str,
headers: Any = None,
*,
match_info: Any = sentinel,
version: HttpVersion = HttpVersion(1, 1),
closing: bool = False,
app: Any = None,
writer: Any = sentinel,
protocol: Any = sentinel,
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
client_max_size: int = 1024 ** 2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
specific conditions and errors are hard to trigger.
"""
task = mock.Mock()
if loop is ...:
loop = mock.Mock()
@ -611,16 +626,26 @@ def make_mocked_request(method: str, path: str,
if headers:
headers = CIMultiDictProxy(CIMultiDict(headers))
raw_hdrs = tuple(
(k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
else:
headers = CIMultiDictProxy(CIMultiDict())
raw_hdrs = ()
chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
message = RawRequestMessage(
method, path, version, headers,
raw_hdrs, closing, False, False, chunked, URL(path))
method,
path,
version,
headers,
raw_hdrs,
closing,
None,
False,
chunked,
URL(path),
)
if app is None:
app = _create_app_mock()
@ -645,21 +670,24 @@ def make_mocked_request(method: str, path: str,
if payload is sentinel:
payload = mock.Mock()
req = Request(message, payload,
protocol, writer, task, loop,
client_max_size=client_max_size)
req = Request(
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
)
match_info = UrlMappingMatchInfo(
{} if match_info is sentinel else match_info, mock.Mock())
{} if match_info is sentinel else match_info, mock.Mock()
)
match_info.add_app(app)
req._match_info = match_info
return req
def make_mocked_coro(return_value: Any=sentinel,
raise_exception: Any=sentinel) -> Any:
def make_mocked_coro(
return_value: Any = sentinel, raise_exception: Any = sentinel
) -> Any:
"""Creates a coroutine mock."""
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
if raise_exception is not sentinel:
raise raise_exception

View file

@ -1,84 +1,112 @@
from types import SimpleNamespace
from typing import TYPE_CHECKING, Awaitable, Callable, Type, Union
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
import attr
from multidict import CIMultiDict # noqa
from aiosignal import Signal
from multidict import CIMultiDict
from yarl import URL
from .client_reqrep import ClientResponse
from .signals import Signal
if TYPE_CHECKING: # pragma: no cover
from .client import ClientSession # noqa
from .client import ClientSession
from .typedefs import Protocol
_SignalArgs = Union[
'TraceRequestStartParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionCreateEndParams',
'TraceConnectionReuseconnParams',
'TraceDnsResolveHostStartParams',
'TraceDnsResolveHostEndParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceRequestRedirectParams',
'TraceRequestChunkSentParams',
'TraceResponseChunkReceivedParams',
]
_Signal = Signal[Callable[[ClientSession, SimpleNamespace, _SignalArgs],
Awaitable[None]]]
else:
_Signal = Signal
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
class _SignalCallback(Protocol[_ParamT_contra]):
def __call__(
self,
__client_session: ClientSession,
__trace_config_ctx: SimpleNamespace,
__params: _ParamT_contra,
) -> Awaitable[None]:
...
__all__ = (
'TraceConfig', 'TraceRequestStartParams', 'TraceRequestEndParams',
'TraceRequestExceptionParams', 'TraceConnectionQueuedStartParams',
'TraceConnectionQueuedEndParams', 'TraceConnectionCreateStartParams',
'TraceConnectionCreateEndParams', 'TraceConnectionReuseconnParams',
'TraceDnsResolveHostStartParams', 'TraceDnsResolveHostEndParams',
'TraceDnsCacheHitParams', 'TraceDnsCacheMissParams',
'TraceRequestRedirectParams',
'TraceRequestChunkSentParams', 'TraceResponseChunkReceivedParams',
"TraceConfig",
"TraceRequestStartParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionCreateEndParams",
"TraceConnectionReuseconnParams",
"TraceDnsResolveHostStartParams",
"TraceDnsResolveHostEndParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceRequestRedirectParams",
"TraceRequestChunkSentParams",
"TraceResponseChunkReceivedParams",
"TraceRequestHeadersSentParams",
)
class TraceConfig:
"""First-class used to trace requests launched via ClientSession
objects."""
"""First-class used to trace requests launched via ClientSession objects."""
def __init__(
self,
trace_config_ctx_factory: Type[SimpleNamespace]=SimpleNamespace
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
self._on_request_start = Signal(self) # type: _Signal
self._on_request_chunk_sent = Signal(self) # type: _Signal
self._on_response_chunk_received = Signal(self) # type: _Signal
self._on_request_end = Signal(self) # type: _Signal
self._on_request_exception = Signal(self) # type: _Signal
self._on_request_redirect = Signal(self) # type: _Signal
self._on_connection_queued_start = Signal(self) # type: _Signal
self._on_connection_queued_end = Signal(self) # type: _Signal
self._on_connection_create_start = Signal(self) # type: _Signal
self._on_connection_create_end = Signal(self) # type: _Signal
self._on_connection_reuseconn = Signal(self) # type: _Signal
self._on_dns_resolvehost_start = Signal(self) # type: _Signal
self._on_dns_resolvehost_end = Signal(self) # type: _Signal
self._on_dns_cache_hit = Signal(self) # type: _Signal
self._on_dns_cache_miss = Signal(self) # type: _Signal
self._on_request_start = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestStartParams]]
self._on_request_chunk_sent = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
self._on_response_chunk_received = Signal(
self
) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
self._on_request_end = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestEndParams]]
self._on_request_exception = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
self._on_request_redirect = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
self._on_connection_queued_start = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
self._on_connection_queued_end = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
self._on_connection_create_start = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
self._on_connection_create_end = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
self._on_connection_reuseconn = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
self._on_dns_resolvehost_start = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
self._on_dns_resolvehost_end = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
self._on_dns_cache_hit = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
self._on_dns_cache_miss = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
self._on_request_headers_sent = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestHeadersSentParams]]
self._trace_config_ctx_factory = trace_config_ctx_factory # type: Type[SimpleNamespace] # noqa
self._trace_config_ctx_factory = trace_config_ctx_factory
def trace_config_ctx(
self,
trace_request_ctx: SimpleNamespace=None
) -> SimpleNamespace: # noqa
""" Return a new trace_config_ctx instance """
return self._trace_config_ctx_factory(
trace_request_ctx=trace_request_ctx)
self, trace_request_ctx: Optional[SimpleNamespace] = None
) -> SimpleNamespace:
"""Return a new trace_config_ctx instance"""
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
def freeze(self) -> None:
self._on_request_start.freeze()
@ -96,292 +124,349 @@ class TraceConfig:
self._on_dns_resolvehost_end.freeze()
self._on_dns_cache_hit.freeze()
self._on_dns_cache_miss.freeze()
self._on_request_headers_sent.freeze()
@property
def on_request_start(self) -> _Signal:
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
return self._on_request_start
@property
def on_request_chunk_sent(self) -> _Signal:
def on_request_chunk_sent(
self,
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
return self._on_request_chunk_sent
@property
def on_response_chunk_received(self) -> _Signal:
def on_response_chunk_received(
self,
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
return self._on_response_chunk_received
@property
def on_request_end(self) -> _Signal:
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
return self._on_request_end
@property
def on_request_exception(self) -> _Signal:
def on_request_exception(
self,
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
return self._on_request_exception
@property
def on_request_redirect(self) -> _Signal:
def on_request_redirect(
self,
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
return self._on_request_redirect
@property
def on_connection_queued_start(self) -> _Signal:
def on_connection_queued_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
return self._on_connection_queued_start
@property
def on_connection_queued_end(self) -> _Signal:
def on_connection_queued_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
return self._on_connection_queued_end
@property
def on_connection_create_start(self) -> _Signal:
def on_connection_create_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
return self._on_connection_create_start
@property
def on_connection_create_end(self) -> _Signal:
def on_connection_create_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
return self._on_connection_create_end
@property
def on_connection_reuseconn(self) -> _Signal:
def on_connection_reuseconn(
self,
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
return self._on_connection_reuseconn
@property
def on_dns_resolvehost_start(self) -> _Signal:
def on_dns_resolvehost_start(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
return self._on_dns_resolvehost_start
@property
def on_dns_resolvehost_end(self) -> _Signal:
def on_dns_resolvehost_end(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
return self._on_dns_resolvehost_end
@property
def on_dns_cache_hit(self) -> _Signal:
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
return self._on_dns_cache_hit
@property
def on_dns_cache_miss(self) -> _Signal:
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
return self._on_dns_cache_miss
@property
def on_request_headers_sent(
self,
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
return self._on_request_headers_sent
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestStartParams:
""" Parameters sent by the `on_request_start` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
"""Parameters sent by the `on_request_start` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestChunkSentParams:
""" Parameters sent by the `on_request_chunk_sent` signal"""
chunk = attr.ib(type=bytes)
"""Parameters sent by the `on_request_chunk_sent` signal"""
method: str
url: URL
chunk: bytes
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceResponseChunkReceivedParams:
""" Parameters sent by the `on_response_chunk_received` signal"""
chunk = attr.ib(type=bytes)
"""Parameters sent by the `on_response_chunk_received` signal"""
method: str
url: URL
chunk: bytes
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestEndParams:
""" Parameters sent by the `on_request_end` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
response = attr.ib(type=ClientResponse)
"""Parameters sent by the `on_request_end` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestExceptionParams:
""" Parameters sent by the `on_request_exception` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
exception = attr.ib(type=BaseException)
"""Parameters sent by the `on_request_exception` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
exception: BaseException
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestRedirectParams:
""" Parameters sent by the `on_request_redirect` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
response = attr.ib(type=ClientResponse)
"""Parameters sent by the `on_request_redirect` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedStartParams:
""" Parameters sent by the `on_connection_queued_start` signal"""
"""Parameters sent by the `on_connection_queued_start` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedEndParams:
""" Parameters sent by the `on_connection_queued_end` signal"""
"""Parameters sent by the `on_connection_queued_end` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateStartParams:
""" Parameters sent by the `on_connection_create_start` signal"""
"""Parameters sent by the `on_connection_create_start` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateEndParams:
""" Parameters sent by the `on_connection_create_end` signal"""
"""Parameters sent by the `on_connection_create_end` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionReuseconnParams:
""" Parameters sent by the `on_connection_reuseconn` signal"""
"""Parameters sent by the `on_connection_reuseconn` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostStartParams:
""" Parameters sent by the `on_dns_resolvehost_start` signal"""
host = attr.ib(type=str)
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostEndParams:
""" Parameters sent by the `on_dns_resolvehost_end` signal"""
host = attr.ib(type=str)
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheHitParams:
""" Parameters sent by the `on_dns_cache_hit` signal"""
host = attr.ib(type=str)
"""Parameters sent by the `on_dns_cache_hit` signal"""
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheMissParams:
""" Parameters sent by the `on_dns_cache_miss` signal"""
host = attr.ib(type=str)
"""Parameters sent by the `on_dns_cache_miss` signal"""
host: str
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestHeadersSentParams:
"""Parameters sent by the `on_request_headers_sent` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
class Trace:
""" Internal class used to keep together the main dependencies used
at the moment of send a signal."""
"""Internal dependency holder class.
def __init__(self,
session: 'ClientSession',
trace_config: TraceConfig,
trace_config_ctx: SimpleNamespace) -> None:
Used to keep together the main dependencies used
at the moment of send a signal.
"""
def __init__(
self,
session: "ClientSession",
trace_config: TraceConfig,
trace_config_ctx: SimpleNamespace,
) -> None:
self._trace_config = trace_config
self._trace_config_ctx = trace_config_ctx
self._session = session
async def send_request_start(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]') -> None:
async def send_request_start(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
return await self._trace_config.on_request_start.send(
self._session,
self._trace_config_ctx,
TraceRequestStartParams(method, url, headers)
TraceRequestStartParams(method, url, headers),
)
async def send_request_chunk_sent(self, chunk: bytes) -> None:
async def send_request_chunk_sent(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_request_chunk_sent.send(
self._session,
self._trace_config_ctx,
TraceRequestChunkSentParams(chunk)
TraceRequestChunkSentParams(method, url, chunk),
)
async def send_response_chunk_received(self, chunk: bytes) -> None:
async def send_response_chunk_received(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_response_chunk_received.send(
self._session,
self._trace_config_ctx,
TraceResponseChunkReceivedParams(chunk)
TraceResponseChunkReceivedParams(method, url, chunk),
)
async def send_request_end(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
response: ClientResponse) -> None:
async def send_request_end(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config.on_request_end.send(
self._session,
self._trace_config_ctx,
TraceRequestEndParams(method, url, headers, response)
TraceRequestEndParams(method, url, headers, response),
)
async def send_request_exception(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
exception: BaseException) -> None:
async def send_request_exception(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
exception: BaseException,
) -> None:
return await self._trace_config.on_request_exception.send(
self._session,
self._trace_config_ctx,
TraceRequestExceptionParams(method, url, headers, exception)
TraceRequestExceptionParams(method, url, headers, exception),
)
async def send_request_redirect(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
response: ClientResponse) -> None:
async def send_request_redirect(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config._on_request_redirect.send(
self._session,
self._trace_config_ctx,
TraceRequestRedirectParams(method, url, headers, response)
TraceRequestRedirectParams(method, url, headers, response),
)
async def send_connection_queued_start(self) -> None:
return await self._trace_config.on_connection_queued_start.send(
self._session,
self._trace_config_ctx,
TraceConnectionQueuedStartParams()
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
)
async def send_connection_queued_end(self) -> None:
return await self._trace_config.on_connection_queued_end.send(
self._session,
self._trace_config_ctx,
TraceConnectionQueuedEndParams()
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
)
async def send_connection_create_start(self) -> None:
return await self._trace_config.on_connection_create_start.send(
self._session,
self._trace_config_ctx,
TraceConnectionCreateStartParams()
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
)
async def send_connection_create_end(self) -> None:
return await self._trace_config.on_connection_create_end.send(
self._session,
self._trace_config_ctx,
TraceConnectionCreateEndParams()
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
)
async def send_connection_reuseconn(self) -> None:
return await self._trace_config.on_connection_reuseconn.send(
self._session,
self._trace_config_ctx,
TraceConnectionReuseconnParams()
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
)
async def send_dns_resolvehost_start(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_start.send(
self._session,
self._trace_config_ctx,
TraceDnsResolveHostStartParams(host)
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
)
async def send_dns_resolvehost_end(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_end.send(
self._session,
self._trace_config_ctx,
TraceDnsResolveHostEndParams(host)
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
)
async def send_dns_cache_hit(self, host: str) -> None:
return await self._trace_config.on_dns_cache_hit.send(
self._session,
self._trace_config_ctx,
TraceDnsCacheHitParams(host)
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
)
async def send_dns_cache_miss(self, host: str) -> None:
return await self._trace_config.on_dns_cache_miss.send(
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
)
async def send_request_headers(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
return await self._trace_config._on_request_headers_sent.send(
self._session,
self._trace_config_ctx,
TraceDnsCacheMissParams(host)
TraceRequestHeadersSentParams(method, url, headers),
)

View file

@ -1,10 +1,10 @@
import json
import os # noqa
import pathlib # noqa
import os
import sys
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Iterable,
Mapping,
@ -12,15 +12,19 @@ from typing import (
Union,
)
from multidict import (
CIMultiDict,
CIMultiDictProxy,
MultiDict,
MultiDictProxy,
istr,
)
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
from yarl import URL
# These are for other modules to use (to avoid repeating the conditional import).
if sys.version_info >= (3, 8):
from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
else:
from typing_extensions import ( # noqa: F401
Final,
Protocol as Protocol,
TypedDict as TypedDict,
)
DEFAULT_JSON_ENCODER = json.dumps
DEFAULT_JSON_DECODER = json.loads
@ -29,7 +33,9 @@ if TYPE_CHECKING: # pragma: no cover
_CIMultiDictProxy = CIMultiDictProxy[str]
_MultiDict = MultiDict[str]
_MultiDictProxy = MultiDictProxy[str]
from http.cookies import BaseCookie # noqa
from http.cookies import BaseCookie, Morsel
from .web import Request, StreamResponse
else:
_CIMultiDict = CIMultiDict
_CIMultiDictProxy = CIMultiDictProxy
@ -39,15 +45,20 @@ else:
Byteish = Union[bytes, bytearray, memoryview]
JSONEncoder = Callable[[Any], str]
JSONDecoder = Callable[[str], Any]
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict,
_CIMultiDictProxy]
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
StrOrURL = Union[str, URL]
LooseCookies = Union[Iterable[Tuple[str, 'BaseCookie[str]']],
Mapping[str, 'BaseCookie[str]'], 'BaseCookie[str]']
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
LooseCookiesIterables = Iterable[
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
]
LooseCookies = Union[
LooseCookiesMappings,
LooseCookiesIterables,
"BaseCookie[str]",
]
if sys.version_info >= (3, 6):
PathLike = Union[str, 'os.PathLike[str]']
else:
PathLike = Union[str, pathlib.PurePath]
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
PathLike = Union[str, "os.PathLike[str]"]

View file

@ -5,302 +5,318 @@ import sys
from argparse import ArgumentParser
from collections.abc import Iterable
from importlib import import_module
from typing import Any, Awaitable, Callable, List, Optional, Type, Union, cast
from typing import (
Any,
Awaitable,
Callable,
Iterable as TypingIterable,
List,
Optional,
Set,
Type,
Union,
cast,
)
from .abc import AbstractAccessLogger
from .helpers import all_tasks
from .log import access_logger
from .web_app import Application as Application
from .web_app import CleanupError as CleanupError
from .web_exceptions import HTTPAccepted as HTTPAccepted
from .web_exceptions import HTTPBadGateway as HTTPBadGateway
from .web_exceptions import HTTPBadRequest as HTTPBadRequest
from .web_exceptions import HTTPClientError as HTTPClientError
from .web_exceptions import HTTPConflict as HTTPConflict
from .web_exceptions import HTTPCreated as HTTPCreated
from .web_exceptions import HTTPError as HTTPError
from .web_exceptions import HTTPException as HTTPException
from .web_exceptions import HTTPExpectationFailed as HTTPExpectationFailed
from .web_exceptions import HTTPFailedDependency as HTTPFailedDependency
from .web_exceptions import HTTPForbidden as HTTPForbidden
from .web_exceptions import HTTPFound as HTTPFound
from .web_exceptions import HTTPGatewayTimeout as HTTPGatewayTimeout
from .web_exceptions import HTTPGone as HTTPGone
from .web_exceptions import HTTPInsufficientStorage as HTTPInsufficientStorage
from .web_exceptions import HTTPInternalServerError as HTTPInternalServerError
from .web_exceptions import HTTPLengthRequired as HTTPLengthRequired
from .web_exceptions import HTTPMethodNotAllowed as HTTPMethodNotAllowed
from .web_exceptions import HTTPMisdirectedRequest as HTTPMisdirectedRequest
from .web_exceptions import HTTPMovedPermanently as HTTPMovedPermanently
from .web_exceptions import HTTPMultipleChoices as HTTPMultipleChoices
from .web_app import Application as Application, CleanupError as CleanupError
from .web_exceptions import (
HTTPAccepted as HTTPAccepted,
HTTPBadGateway as HTTPBadGateway,
HTTPBadRequest as HTTPBadRequest,
HTTPClientError as HTTPClientError,
HTTPConflict as HTTPConflict,
HTTPCreated as HTTPCreated,
HTTPError as HTTPError,
HTTPException as HTTPException,
HTTPExpectationFailed as HTTPExpectationFailed,
HTTPFailedDependency as HTTPFailedDependency,
HTTPForbidden as HTTPForbidden,
HTTPFound as HTTPFound,
HTTPGatewayTimeout as HTTPGatewayTimeout,
HTTPGone as HTTPGone,
HTTPInsufficientStorage as HTTPInsufficientStorage,
HTTPInternalServerError as HTTPInternalServerError,
HTTPLengthRequired as HTTPLengthRequired,
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
HTTPMovedPermanently as HTTPMovedPermanently,
HTTPMultipleChoices as HTTPMultipleChoices,
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
)
from .web_exceptions import HTTPNoContent as HTTPNoContent
from .web_exceptions import (
HTTPNoContent as HTTPNoContent,
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
)
from .web_exceptions import HTTPNotAcceptable as HTTPNotAcceptable
from .web_exceptions import HTTPNotExtended as HTTPNotExtended
from .web_exceptions import HTTPNotFound as HTTPNotFound
from .web_exceptions import HTTPNotImplemented as HTTPNotImplemented
from .web_exceptions import HTTPNotModified as HTTPNotModified
from .web_exceptions import HTTPOk as HTTPOk
from .web_exceptions import HTTPPartialContent as HTTPPartialContent
from .web_exceptions import HTTPPaymentRequired as HTTPPaymentRequired
from .web_exceptions import HTTPPermanentRedirect as HTTPPermanentRedirect
from .web_exceptions import HTTPPreconditionFailed as HTTPPreconditionFailed
from .web_exceptions import (
HTTPNotAcceptable as HTTPNotAcceptable,
HTTPNotExtended as HTTPNotExtended,
HTTPNotFound as HTTPNotFound,
HTTPNotImplemented as HTTPNotImplemented,
HTTPNotModified as HTTPNotModified,
HTTPOk as HTTPOk,
HTTPPartialContent as HTTPPartialContent,
HTTPPaymentRequired as HTTPPaymentRequired,
HTTPPermanentRedirect as HTTPPermanentRedirect,
HTTPPreconditionFailed as HTTPPreconditionFailed,
HTTPPreconditionRequired as HTTPPreconditionRequired,
)
from .web_exceptions import (
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
)
from .web_exceptions import HTTPRedirection as HTTPRedirection
from .web_exceptions import (
HTTPRedirection as HTTPRedirection,
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
)
from .web_exceptions import (
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
)
from .web_exceptions import (
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
)
from .web_exceptions import HTTPRequestTimeout as HTTPRequestTimeout
from .web_exceptions import HTTPRequestURITooLong as HTTPRequestURITooLong
from .web_exceptions import HTTPResetContent as HTTPResetContent
from .web_exceptions import HTTPSeeOther as HTTPSeeOther
from .web_exceptions import HTTPServerError as HTTPServerError
from .web_exceptions import HTTPServiceUnavailable as HTTPServiceUnavailable
from .web_exceptions import HTTPSuccessful as HTTPSuccessful
from .web_exceptions import HTTPTemporaryRedirect as HTTPTemporaryRedirect
from .web_exceptions import HTTPTooManyRequests as HTTPTooManyRequests
from .web_exceptions import HTTPUnauthorized as HTTPUnauthorized
from .web_exceptions import (
HTTPRequestTimeout as HTTPRequestTimeout,
HTTPRequestURITooLong as HTTPRequestURITooLong,
HTTPResetContent as HTTPResetContent,
HTTPSeeOther as HTTPSeeOther,
HTTPServerError as HTTPServerError,
HTTPServiceUnavailable as HTTPServiceUnavailable,
HTTPSuccessful as HTTPSuccessful,
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
HTTPTooManyRequests as HTTPTooManyRequests,
HTTPUnauthorized as HTTPUnauthorized,
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
)
from .web_exceptions import HTTPUnprocessableEntity as HTTPUnprocessableEntity
from .web_exceptions import (
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
)
from .web_exceptions import HTTPUpgradeRequired as HTTPUpgradeRequired
from .web_exceptions import HTTPUseProxy as HTTPUseProxy
from .web_exceptions import (
HTTPUpgradeRequired as HTTPUpgradeRequired,
HTTPUseProxy as HTTPUseProxy,
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
HTTPVersionNotSupported as HTTPVersionNotSupported,
)
from .web_exceptions import HTTPVersionNotSupported as HTTPVersionNotSupported
from .web_fileresponse import FileResponse as FileResponse
from .web_log import AccessLogger
from .web_middlewares import middleware as middleware
from .web_middlewares import (
middleware as middleware,
normalize_path_middleware as normalize_path_middleware,
)
from .web_protocol import PayloadAccessError as PayloadAccessError
from .web_protocol import RequestHandler as RequestHandler
from .web_protocol import RequestPayloadError as RequestPayloadError
from .web_request import BaseRequest as BaseRequest
from .web_request import FileField as FileField
from .web_request import Request as Request
from .web_response import ContentCoding as ContentCoding
from .web_response import Response as Response
from .web_response import StreamResponse as StreamResponse
from .web_response import json_response as json_response
from .web_routedef import AbstractRouteDef as AbstractRouteDef
from .web_routedef import RouteDef as RouteDef
from .web_routedef import RouteTableDef as RouteTableDef
from .web_routedef import StaticDef as StaticDef
from .web_routedef import delete as delete
from .web_routedef import get as get
from .web_routedef import head as head
from .web_routedef import options as options
from .web_routedef import patch as patch
from .web_routedef import post as post
from .web_routedef import put as put
from .web_routedef import route as route
from .web_routedef import static as static
from .web_routedef import view as view
from .web_runner import AppRunner as AppRunner
from .web_runner import BaseRunner as BaseRunner
from .web_runner import BaseSite as BaseSite
from .web_runner import GracefulExit as GracefulExit
from .web_runner import NamedPipeSite as NamedPipeSite
from .web_runner import ServerRunner as ServerRunner
from .web_runner import SockSite as SockSite
from .web_runner import TCPSite as TCPSite
from .web_runner import UnixSite as UnixSite
from .web_protocol import (
PayloadAccessError as PayloadAccessError,
RequestHandler as RequestHandler,
RequestPayloadError as RequestPayloadError,
)
from .web_request import (
BaseRequest as BaseRequest,
FileField as FileField,
Request as Request,
)
from .web_response import (
ContentCoding as ContentCoding,
Response as Response,
StreamResponse as StreamResponse,
json_response as json_response,
)
from .web_routedef import (
AbstractRouteDef as AbstractRouteDef,
RouteDef as RouteDef,
RouteTableDef as RouteTableDef,
StaticDef as StaticDef,
delete as delete,
get as get,
head as head,
options as options,
patch as patch,
post as post,
put as put,
route as route,
static as static,
view as view,
)
from .web_runner import (
AppRunner as AppRunner,
BaseRunner as BaseRunner,
BaseSite as BaseSite,
GracefulExit as GracefulExit,
NamedPipeSite as NamedPipeSite,
ServerRunner as ServerRunner,
SockSite as SockSite,
TCPSite as TCPSite,
UnixSite as UnixSite,
)
from .web_server import Server as Server
from .web_urldispatcher import AbstractResource as AbstractResource
from .web_urldispatcher import AbstractRoute as AbstractRoute
from .web_urldispatcher import DynamicResource as DynamicResource
from .web_urldispatcher import PlainResource as PlainResource
from .web_urldispatcher import Resource as Resource
from .web_urldispatcher import ResourceRoute as ResourceRoute
from .web_urldispatcher import StaticResource as StaticResource
from .web_urldispatcher import UrlDispatcher as UrlDispatcher
from .web_urldispatcher import UrlMappingMatchInfo as UrlMappingMatchInfo
from .web_urldispatcher import View as View
from .web_ws import WebSocketReady as WebSocketReady
from .web_ws import WebSocketResponse as WebSocketResponse
from .web_ws import WSMsgType as WSMsgType
from .web_urldispatcher import (
AbstractResource as AbstractResource,
AbstractRoute as AbstractRoute,
DynamicResource as DynamicResource,
PlainResource as PlainResource,
Resource as Resource,
ResourceRoute as ResourceRoute,
StaticResource as StaticResource,
UrlDispatcher as UrlDispatcher,
UrlMappingMatchInfo as UrlMappingMatchInfo,
View as View,
)
from .web_ws import (
WebSocketReady as WebSocketReady,
WebSocketResponse as WebSocketResponse,
WSMsgType as WSMsgType,
)
__all__ = (
# web_app
'Application',
'CleanupError',
"Application",
"CleanupError",
# web_exceptions
'HTTPAccepted',
'HTTPBadGateway',
'HTTPBadRequest',
'HTTPClientError',
'HTTPConflict',
'HTTPCreated',
'HTTPError',
'HTTPException',
'HTTPExpectationFailed',
'HTTPFailedDependency',
'HTTPForbidden',
'HTTPFound',
'HTTPGatewayTimeout',
'HTTPGone',
'HTTPInsufficientStorage',
'HTTPInternalServerError',
'HTTPLengthRequired',
'HTTPMethodNotAllowed',
'HTTPMisdirectedRequest',
'HTTPMovedPermanently',
'HTTPMultipleChoices',
'HTTPNetworkAuthenticationRequired',
'HTTPNoContent',
'HTTPNonAuthoritativeInformation',
'HTTPNotAcceptable',
'HTTPNotExtended',
'HTTPNotFound',
'HTTPNotImplemented',
'HTTPNotModified',
'HTTPOk',
'HTTPPartialContent',
'HTTPPaymentRequired',
'HTTPPermanentRedirect',
'HTTPPreconditionFailed',
'HTTPPreconditionRequired',
'HTTPProxyAuthenticationRequired',
'HTTPRedirection',
'HTTPRequestEntityTooLarge',
'HTTPRequestHeaderFieldsTooLarge',
'HTTPRequestRangeNotSatisfiable',
'HTTPRequestTimeout',
'HTTPRequestURITooLong',
'HTTPResetContent',
'HTTPSeeOther',
'HTTPServerError',
'HTTPServiceUnavailable',
'HTTPSuccessful',
'HTTPTemporaryRedirect',
'HTTPTooManyRequests',
'HTTPUnauthorized',
'HTTPUnavailableForLegalReasons',
'HTTPUnprocessableEntity',
'HTTPUnsupportedMediaType',
'HTTPUpgradeRequired',
'HTTPUseProxy',
'HTTPVariantAlsoNegotiates',
'HTTPVersionNotSupported',
"HTTPAccepted",
"HTTPBadGateway",
"HTTPBadRequest",
"HTTPClientError",
"HTTPConflict",
"HTTPCreated",
"HTTPError",
"HTTPException",
"HTTPExpectationFailed",
"HTTPFailedDependency",
"HTTPForbidden",
"HTTPFound",
"HTTPGatewayTimeout",
"HTTPGone",
"HTTPInsufficientStorage",
"HTTPInternalServerError",
"HTTPLengthRequired",
"HTTPMethodNotAllowed",
"HTTPMisdirectedRequest",
"HTTPMovedPermanently",
"HTTPMultipleChoices",
"HTTPNetworkAuthenticationRequired",
"HTTPNoContent",
"HTTPNonAuthoritativeInformation",
"HTTPNotAcceptable",
"HTTPNotExtended",
"HTTPNotFound",
"HTTPNotImplemented",
"HTTPNotModified",
"HTTPOk",
"HTTPPartialContent",
"HTTPPaymentRequired",
"HTTPPermanentRedirect",
"HTTPPreconditionFailed",
"HTTPPreconditionRequired",
"HTTPProxyAuthenticationRequired",
"HTTPRedirection",
"HTTPRequestEntityTooLarge",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPRequestRangeNotSatisfiable",
"HTTPRequestTimeout",
"HTTPRequestURITooLong",
"HTTPResetContent",
"HTTPSeeOther",
"HTTPServerError",
"HTTPServiceUnavailable",
"HTTPSuccessful",
"HTTPTemporaryRedirect",
"HTTPTooManyRequests",
"HTTPUnauthorized",
"HTTPUnavailableForLegalReasons",
"HTTPUnprocessableEntity",
"HTTPUnsupportedMediaType",
"HTTPUpgradeRequired",
"HTTPUseProxy",
"HTTPVariantAlsoNegotiates",
"HTTPVersionNotSupported",
# web_fileresponse
'FileResponse',
"FileResponse",
# web_middlewares
'middleware',
'normalize_path_middleware',
"middleware",
"normalize_path_middleware",
# web_protocol
'PayloadAccessError',
'RequestHandler',
'RequestPayloadError',
"PayloadAccessError",
"RequestHandler",
"RequestPayloadError",
# web_request
'BaseRequest',
'FileField',
'Request',
"BaseRequest",
"FileField",
"Request",
# web_response
'ContentCoding',
'Response',
'StreamResponse',
'json_response',
"ContentCoding",
"Response",
"StreamResponse",
"json_response",
# web_routedef
'AbstractRouteDef',
'RouteDef',
'RouteTableDef',
'StaticDef',
'delete',
'get',
'head',
'options',
'patch',
'post',
'put',
'route',
'static',
'view',
"AbstractRouteDef",
"RouteDef",
"RouteTableDef",
"StaticDef",
"delete",
"get",
"head",
"options",
"patch",
"post",
"put",
"route",
"static",
"view",
# web_runner
'AppRunner',
'BaseRunner',
'BaseSite',
'GracefulExit',
'ServerRunner',
'SockSite',
'TCPSite',
'UnixSite',
'NamedPipeSite',
"AppRunner",
"BaseRunner",
"BaseSite",
"GracefulExit",
"ServerRunner",
"SockSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
# web_server
'Server',
"Server",
# web_urldispatcher
'AbstractResource',
'AbstractRoute',
'DynamicResource',
'PlainResource',
'Resource',
'ResourceRoute',
'StaticResource',
'UrlDispatcher',
'UrlMappingMatchInfo',
'View',
"AbstractResource",
"AbstractRoute",
"DynamicResource",
"PlainResource",
"Resource",
"ResourceRoute",
"StaticResource",
"UrlDispatcher",
"UrlMappingMatchInfo",
"View",
# web_ws
'WebSocketReady',
'WebSocketResponse',
'WSMsgType',
"WebSocketReady",
"WebSocketResponse",
"WSMsgType",
# web
'run_app',
"run_app",
)
try:
from ssl import SSLContext
except ImportError: # pragma: no cover
SSLContext = Any # type: ignore
SSLContext = Any # type: ignore[misc,assignment]
HostSequence = TypingIterable[str]
async def _run_app(app: Union[Application, Awaitable[Application]], *,
host: Optional[str]=None,
port: Optional[int]=None,
path: Optional[str]=None,
sock: Optional[socket.socket]=None,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
print: Callable[..., None]=print,
backlog: int=128,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log_format: str=AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger]=access_logger,
handle_signals: bool=True,
reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
# A internal functio to actually do all dirty job for application running
if asyncio.iscoroutine(app):
app = await app # type: ignore
app = await app # type: ignore[misc]
app = cast(Application, app)
runner = AppRunner(app, handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log)
runner = AppRunner(
app,
handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
keepalive_timeout=keepalive_timeout,
)
await runner.setup()
@ -309,187 +325,255 @@ async def _run_app(app: Union[Application, Awaitable[Application]], *,
try:
if host is not None:
if isinstance(host, (str, bytes, bytearray, memoryview)):
sites.append(TCPSite(runner, host, port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
host,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
else:
for h in host:
sites.append(TCPSite(runner, h, port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
h,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
elif path is None and sock is None or port is not None:
sites.append(TCPSite(runner, port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
if path is not None:
if isinstance(path, (str, bytes, bytearray, memoryview)):
sites.append(UnixSite(runner, path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
UnixSite(
runner,
path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for p in path:
sites.append(UnixSite(runner, p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
UnixSite(
runner,
p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
if sock is not None:
if not isinstance(sock, Iterable):
sites.append(SockSite(runner, sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
SockSite(
runner,
sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for s in sock:
sites.append(SockSite(runner, s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
SockSite(
runner,
s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
for site in sites:
await site.start()
if print: # pragma: no branch
names = sorted(str(s.name) for s in runner.sites)
print("======== Running on {} ========\n"
"(Press CTRL+C to quit)".format(', '.join(names)))
print(
"======== Running on {} ========\n"
"(Press CTRL+C to quit)".format(", ".join(names))
)
# sleep forever by 1 hour intervals,
# on Windows before Python 3.8 wake up every 1 second to handle
# Ctrl+C smoothly
if sys.platform == "win32" and sys.version_info < (3, 8):
delay = 1
else:
delay = 3600
while True:
await asyncio.sleep(3600) # sleep forever by 1 hour intervals
await asyncio.sleep(delay)
finally:
await runner.cleanup()
def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None:
to_cancel = all_tasks(loop)
def _cancel_tasks(
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
) -> None:
if not to_cancel:
return
for task in to_cancel:
task.cancel()
loop.run_until_complete(
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
for task in to_cancel:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler({
'message': 'unhandled exception during asyncio.run() shutdown',
'exception': task.exception(),
'task': task,
})
loop.call_exception_handler(
{
"message": "unhandled exception during asyncio.run() shutdown",
"exception": task.exception(),
"task": task,
}
)
def run_app(app: Union[Application, Awaitable[Application]], *,
host: Optional[str]=None,
port: Optional[int]=None,
path: Optional[str]=None,
sock: Optional[socket.socket]=None,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
print: Callable[..., None]=print,
backlog: int=128,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log_format: str=AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger]=access_logger,
handle_signals: bool=True,
reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
def run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
"""Run an app locally"""
loop = asyncio.get_event_loop()
if loop is None:
loop = asyncio.new_event_loop()
# Configure if and only if in debugging mode and using the default logger
if loop.get_debug() and access_log and access_log.name == 'aiohttp.access':
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
if access_log.level == logging.NOTSET:
access_log.setLevel(logging.DEBUG)
if not access_log.hasHandlers():
access_log.addHandler(logging.StreamHandler())
main_task = loop.create_task(
_run_app(
app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
keepalive_timeout=keepalive_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
try:
loop.run_until_complete(_run_app(app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port))
asyncio.set_event_loop(loop)
loop.run_until_complete(main_task)
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
pass
finally:
_cancel_all_tasks(loop)
if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy
loop.run_until_complete(loop.shutdown_asyncgens())
_cancel_tasks({main_task}, loop)
_cancel_tasks(all_tasks(loop), loop)
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
def main(argv: List[str]) -> None:
arg_parser = ArgumentParser(
description="aiohttp.web Application server",
prog="aiohttp.web"
description="aiohttp.web Application server", prog="aiohttp.web"
)
arg_parser.add_argument(
"entry_func",
help=("Callable returning the `aiohttp.web.Application` instance to "
"run. Should be specified in the 'module:function' syntax."),
metavar="entry-func"
help=(
"Callable returning the `aiohttp.web.Application` instance to "
"run. Should be specified in the 'module:function' syntax."
),
metavar="entry-func",
)
arg_parser.add_argument(
"-H", "--hostname",
"-H",
"--hostname",
help="TCP/IP hostname to serve on (default: %(default)r)",
default="localhost"
default="localhost",
)
arg_parser.add_argument(
"-P", "--port",
"-P",
"--port",
help="TCP/IP port to serve on (default: %(default)r)",
type=int,
default="8080"
default="8080",
)
arg_parser.add_argument(
"-U", "--path",
"-U",
"--path",
help="Unix file system path to serve on. Specifying a path will cause "
"hostname and port arguments to be ignored.",
"hostname and port arguments to be ignored.",
)
args, extra_argv = arg_parser.parse_known_args(argv)
# Import logic
mod_str, _, func_str = args.entry_func.partition(":")
if not func_str or not mod_str:
arg_parser.error(
"'entry-func' not in 'module:function' syntax"
)
arg_parser.error("'entry-func' not in 'module:function' syntax")
if mod_str.startswith("."):
arg_parser.error("relative module names not supported")
try:
module = import_module(mod_str)
except ImportError as ex:
arg_parser.error("unable to import %s: %s" % (mod_str, ex))
arg_parser.error(f"unable to import {mod_str}: {ex}")
try:
func = getattr(module, func_str)
except AttributeError:
arg_parser.error("module %r has no attribute %r" % (mod_str, func_str))
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
# Compatibility logic
if args.path is not None and not hasattr(socket, 'AF_UNIX'):
arg_parser.error("file system paths not supported by your operating"
" environment")
if args.path is not None and not hasattr(socket, "AF_UNIX"):
arg_parser.error(
"file system paths not supported by your operating" " environment"
)
logging.basicConfig(level=logging.DEBUG)

View file

@ -1,8 +1,8 @@
import asyncio
import logging
import warnings
from functools import partial
from typing import ( # noqa
from functools import partial, update_wrapper
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
@ -22,6 +22,9 @@ from typing import ( # noqa
cast,
)
from aiosignal import Signal
from frozenlist import FrozenList
from . import hdrs
from .abc import (
AbstractAccessLogger,
@ -29,11 +32,9 @@ from .abc import (
AbstractRouter,
AbstractStreamWriter,
)
from .frozenlist import FrozenList
from .helpers import DEBUG
from .http_parser import RawRequestMessage
from .log import web_logger
from .signals import Signal
from .streams import StreamReader
from .web_log import AccessLogger
from .web_middlewares import _fix_request_current_app
@ -44,6 +45,7 @@ from .web_routedef import AbstractRouteDef
from .web_server import Server
from .web_urldispatcher import (
AbstractResource,
AbstractRoute,
Domain,
MaskDomain,
MatchedSubAppResource,
@ -51,26 +53,25 @@ from .web_urldispatcher import (
UrlDispatcher,
)
__all__ = ('Application', 'CleanupError')
__all__ = ("Application", "CleanupError")
if TYPE_CHECKING: # pragma: no cover
_AppSignal = Signal[Callable[['Application'], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse],
Awaitable[None]]]
_Handler = Callable[[Request], Awaitable[StreamResponse]]
_Middleware = Union[Callable[[Request, _Handler],
Awaitable[StreamResponse]],
Callable[['Application', _Handler], # old-style
Awaitable[_Handler]]]
from .typedefs import Handler
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
_Middleware = Union[
Callable[[Request, Handler], Awaitable[StreamResponse]],
Callable[["Application", Handler], Awaitable[Handler]], # old-style
]
_Middlewares = FrozenList[_Middleware]
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
_Subapps = List['Application']
_Subapps = List["Application"]
else:
# No type checker mode, skip types
_AppSignal = Signal
_RespPrepareSignal = Signal
_Handler = Callable
_Middleware = Callable
_Middlewares = FrozenList
_MiddlewaresHandlers = Optional[Sequence]
@ -78,37 +79,57 @@ else:
class Application(MutableMapping[str, Any]):
ATTRS = frozenset([
'logger', '_debug', '_router', '_loop', '_handler_args',
'_middlewares', '_middlewares_handlers', '_run_middlewares',
'_state', '_frozen', '_pre_frozen', '_subapps',
'_on_response_prepare', '_on_startup', '_on_shutdown',
'_on_cleanup', '_client_max_size', '_cleanup_ctx'])
ATTRS = frozenset(
[
"logger",
"_debug",
"_router",
"_loop",
"_handler_args",
"_middlewares",
"_middlewares_handlers",
"_run_middlewares",
"_state",
"_frozen",
"_pre_frozen",
"_subapps",
"_on_response_prepare",
"_on_startup",
"_on_shutdown",
"_on_cleanup",
"_client_max_size",
"_cleanup_ctx",
]
)
def __init__(self, *,
logger: logging.Logger=web_logger,
router: Optional[UrlDispatcher]=None,
middlewares: Iterable[_Middleware]=(),
handler_args: Mapping[str, Any]=None,
client_max_size: int=1024**2,
loop: Optional[asyncio.AbstractEventLoop]=None,
debug: Any=... # mypy doesn't support ellipsis
) -> None:
def __init__(
self,
*,
logger: logging.Logger = web_logger,
router: Optional[UrlDispatcher] = None,
middlewares: Iterable[_Middleware] = (),
handler_args: Optional[Mapping[str, Any]] = None,
client_max_size: int = 1024 ** 2,
loop: Optional[asyncio.AbstractEventLoop] = None,
debug: Any = ..., # mypy doesn't support ellipsis
) -> None:
if router is None:
router = UrlDispatcher()
else:
warnings.warn("router argument is deprecated", DeprecationWarning,
stacklevel=2)
warnings.warn(
"router argument is deprecated", DeprecationWarning, stacklevel=2
)
assert isinstance(router, AbstractRouter), router
if loop is not None:
warnings.warn("loop argument is deprecated", DeprecationWarning,
stacklevel=2)
warnings.warn(
"loop argument is deprecated", DeprecationWarning, stacklevel=2
)
if debug is not ...:
warnings.warn("debug argument is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"debug argument is deprecated", DeprecationWarning, stacklevel=2
)
self._debug = debug
self._router = router # type: UrlDispatcher
self._loop = loop
@ -136,19 +157,24 @@ class Application(MutableMapping[str, Any]):
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
self._client_max_size = client_max_size
def __init_subclass__(cls: Type['Application']) -> None:
warnings.warn("Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2)
def __init_subclass__(cls: Type["Application"]) -> None:
warnings.warn(
"Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2,
)
if DEBUG: # pragma: no cover
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn("Setting custom web.Application.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Setting custom web.Application.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
# MutableMapping API
@ -161,10 +187,11 @@ class Application(MutableMapping[str, Any]):
def _check_frozen(self) -> None:
if self._frozen:
warnings.warn("Changing state of started or joined "
"application is deprecated",
DeprecationWarning,
stacklevel=3)
warnings.warn(
"Changing state of started or joined " "application is deprecated",
DeprecationWarning,
stacklevel=3,
)
def __setitem__(self, key: str, value: Any) -> None:
self._check_frozen()
@ -186,9 +213,7 @@ class Application(MutableMapping[str, Any]):
# Technically the loop can be None
# but we mask it by explicit type cast
# to provide more convinient type annotation
warnings.warn("loop property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
return cast(asyncio.AbstractEventLoop, self._loop)
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
@ -196,7 +221,8 @@ class Application(MutableMapping[str, Any]):
loop = asyncio.get_event_loop()
if self._loop is not None and self._loop is not loop:
raise RuntimeError(
"web.Application instance initialized with different loop")
"web.Application instance initialized with different loop"
)
self._loop = loop
@ -235,8 +261,7 @@ class Application(MutableMapping[str, Any]):
for subapp in self._subapps:
subapp.pre_freeze()
self._run_middlewares = (self._run_middlewares or
subapp._run_middlewares)
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
@property
def frozen(self) -> bool:
@ -253,41 +278,37 @@ class Application(MutableMapping[str, Any]):
@property
def debug(self) -> bool:
warnings.warn("debug property is deprecated",
DeprecationWarning,
stacklevel=2)
return self._debug
def _reg_subapp_signals(self, subapp: 'Application') -> None:
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
return self._debug # type: ignore[no-any-return]
def _reg_subapp_signals(self, subapp: "Application") -> None:
def reg_handler(signame: str) -> None:
subsig = getattr(subapp, signame)
async def handler(app: 'Application') -> None:
async def handler(app: "Application") -> None:
await subsig.send(subapp)
appsig = getattr(self, signame)
appsig.append(handler)
reg_handler('on_startup')
reg_handler('on_shutdown')
reg_handler('on_cleanup')
reg_handler("on_startup")
reg_handler("on_shutdown")
reg_handler("on_cleanup")
def add_subapp(self, prefix: str,
subapp: 'Application') -> AbstractResource:
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
if not isinstance(prefix, str):
raise TypeError("Prefix must be str")
prefix = prefix.rstrip('/')
prefix = prefix.rstrip("/")
if not prefix:
raise ValueError("Prefix cannot be empty")
factory = partial(PrefixedSubAppResource, prefix, subapp)
return self._add_subapp(factory, subapp)
def _add_subapp(self,
resource_factory: Callable[[], AbstractResource],
subapp: 'Application') -> AbstractResource:
def _add_subapp(
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
) -> AbstractResource:
if self.frozen:
raise RuntimeError(
"Cannot add sub application to frozen application")
raise RuntimeError("Cannot add sub application to frozen application")
if subapp.frozen:
raise RuntimeError("Cannot add frozen application")
resource = resource_factory()
@ -299,19 +320,18 @@ class Application(MutableMapping[str, Any]):
subapp._set_loop(self._loop)
return resource
def add_domain(self, domain: str,
subapp: 'Application') -> AbstractResource:
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
elif '*' in domain:
elif "*" in domain:
rule = MaskDomain(domain) # type: Domain
else:
rule = Domain(domain)
factory = partial(MatchedSubAppResource, rule, subapp)
return self._add_subapp(factory, subapp)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None:
self.router.add_routes(routes)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
return self.router.add_routes(routes)
@property
def on_response_prepare(self) -> _RespPrepareSignal:
@ -330,7 +350,7 @@ class Application(MutableMapping[str, Any]):
return self._on_cleanup
@property
def cleanup_ctx(self) -> 'CleanupContext':
def cleanup_ctx(self) -> "CleanupContext":
return self._cleanup_ctx
@property
@ -341,45 +361,53 @@ class Application(MutableMapping[str, Any]):
def middlewares(self) -> _Middlewares:
return self._middlewares
def _make_handler(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None,
access_log_class: Type[
AbstractAccessLogger]=AccessLogger,
**kwargs: Any) -> Server:
def _make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
if not issubclass(access_log_class, AbstractAccessLogger):
raise TypeError(
'access_log_class must be subclass of '
'aiohttp.abc.AbstractAccessLogger, got {}'.format(
access_log_class))
"access_log_class must be subclass of "
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
)
self._set_loop(loop)
self.freeze()
kwargs['debug'] = self._debug
kwargs['access_log_class'] = access_log_class
kwargs["debug"] = self._debug
kwargs["access_log_class"] = access_log_class
if self._handler_args:
for k, v in self._handler_args.items():
kwargs[k] = v
return Server(self._handle, # type: ignore
request_factory=self._make_request,
loop=self._loop, **kwargs)
return Server(
self._handle, # type: ignore[arg-type]
request_factory=self._make_request,
loop=self._loop,
**kwargs,
)
def make_handler(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None,
access_log_class: Type[
AbstractAccessLogger]=AccessLogger,
**kwargs: Any) -> Server:
def make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
warnings.warn("Application.make_handler(...) is deprecated, "
"use AppRunner API instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
DeprecationWarning,
stacklevel=2,
)
return self._make_handler(loop=loop,
access_log_class=access_log_class,
**kwargs)
return self._make_handler(
loop=loop, access_log_class=access_log_class, **kwargs
)
async def startup(self) -> None:
"""Causes on_startup signal
@ -400,27 +428,41 @@ class Application(MutableMapping[str, Any]):
Should be called after shutdown()
"""
await self.on_cleanup.send(self)
if self.on_cleanup.frozen:
await self.on_cleanup.send(self)
else:
# If an exception occurs in startup, ensure cleanup contexts are completed.
await self._cleanup_ctx._on_cleanup(self)
def _make_request(self, message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: 'asyncio.Task[None]',
_cls: Type[Request]=Request) -> Request:
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
_cls: Type[Request] = Request,
) -> Request:
return _cls(
message, payload, protocol, writer, task,
message,
payload,
protocol,
writer,
task,
self._loop,
client_max_size=self._client_max_size)
client_max_size=self._client_max_size,
)
def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
for m in reversed(self._middlewares):
if getattr(m, '__middleware_version__', None) == 1:
if getattr(m, "__middleware_version__", None) == 1:
yield m, True
else:
warnings.warn('old-style middleware "{!r}" deprecated, '
'see #2252'.format(m),
DeprecationWarning, stacklevel=2)
warnings.warn(
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
DeprecationWarning,
stacklevel=2,
)
yield m, False
yield _fix_request_current_app(self), True
@ -431,14 +473,16 @@ class Application(MutableMapping[str, Any]):
match_info = await self._router.resolve(request)
if debug: # pragma: no cover
if not isinstance(match_info, AbstractMatchInfo):
raise TypeError("match_info should be AbstractMatchInfo "
"instance, not {!r}".format(match_info))
raise TypeError(
"match_info should be AbstractMatchInfo "
"instance, not {!r}".format(match_info)
)
match_info.add_app(self)
match_info.freeze()
resp = None
request._match_info = match_info # type: ignore
request._match_info = match_info
expect = request.headers.get(hdrs.EXPECT)
if expect:
resp = await match_info.expect_handler(request)
@ -449,22 +493,24 @@ class Application(MutableMapping[str, Any]):
if self._run_middlewares:
for app in match_info.apps[::-1]:
for m, new_style in app._middlewares_handlers: # type: ignore # noqa
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
if new_style:
handler = partial(m, handler=handler)
handler = update_wrapper(
partial(m, handler=handler), handler
)
else:
handler = await m(app, handler) # type: ignore
handler = await m(app, handler) # type: ignore[arg-type]
resp = await handler(request)
return resp
def __call__(self) -> 'Application':
def __call__(self) -> "Application":
"""gunicorn compatibility"""
return self
def __repr__(self) -> str:
return "<Application 0x{:x}>".format(id(self))
return f"<Application 0x{id(self):x}>"
def __bool__(self) -> bool:
return True
@ -473,18 +519,16 @@ class Application(MutableMapping[str, Any]):
class CleanupError(RuntimeError):
@property
def exceptions(self) -> List[BaseException]:
return self.args[1]
return cast(List[BaseException], self.args[1])
if TYPE_CHECKING: # pragma: no cover
_CleanupContextBase = FrozenList[Callable[[Application],
AsyncIterator[None]]]
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
else:
_CleanupContextBase = FrozenList
class CleanupContext(_CleanupContextBase):
def __init__(self) -> None:
super().__init__()
self._exits = [] # type: List[AsyncIterator[None]]
@ -505,8 +549,7 @@ class CleanupContext(_CleanupContextBase):
except Exception as exc:
errors.append(exc)
else:
errors.append(RuntimeError("{!r} has more than one 'yield'"
.format(it)))
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
if errors:
if len(errors) == 1:
raise errors[0]

View file

@ -7,63 +7,63 @@ from .typedefs import LooseHeaders, StrOrURL
from .web_response import Response
__all__ = (
'HTTPException',
'HTTPError',
'HTTPRedirection',
'HTTPSuccessful',
'HTTPOk',
'HTTPCreated',
'HTTPAccepted',
'HTTPNonAuthoritativeInformation',
'HTTPNoContent',
'HTTPResetContent',
'HTTPPartialContent',
'HTTPMultipleChoices',
'HTTPMovedPermanently',
'HTTPFound',
'HTTPSeeOther',
'HTTPNotModified',
'HTTPUseProxy',
'HTTPTemporaryRedirect',
'HTTPPermanentRedirect',
'HTTPClientError',
'HTTPBadRequest',
'HTTPUnauthorized',
'HTTPPaymentRequired',
'HTTPForbidden',
'HTTPNotFound',
'HTTPMethodNotAllowed',
'HTTPNotAcceptable',
'HTTPProxyAuthenticationRequired',
'HTTPRequestTimeout',
'HTTPConflict',
'HTTPGone',
'HTTPLengthRequired',
'HTTPPreconditionFailed',
'HTTPRequestEntityTooLarge',
'HTTPRequestURITooLong',
'HTTPUnsupportedMediaType',
'HTTPRequestRangeNotSatisfiable',
'HTTPExpectationFailed',
'HTTPMisdirectedRequest',
'HTTPUnprocessableEntity',
'HTTPFailedDependency',
'HTTPUpgradeRequired',
'HTTPPreconditionRequired',
'HTTPTooManyRequests',
'HTTPRequestHeaderFieldsTooLarge',
'HTTPUnavailableForLegalReasons',
'HTTPServerError',
'HTTPInternalServerError',
'HTTPNotImplemented',
'HTTPBadGateway',
'HTTPServiceUnavailable',
'HTTPGatewayTimeout',
'HTTPVersionNotSupported',
'HTTPVariantAlsoNegotiates',
'HTTPInsufficientStorage',
'HTTPNotExtended',
'HTTPNetworkAuthenticationRequired',
"HTTPException",
"HTTPError",
"HTTPRedirection",
"HTTPSuccessful",
"HTTPOk",
"HTTPCreated",
"HTTPAccepted",
"HTTPNonAuthoritativeInformation",
"HTTPNoContent",
"HTTPResetContent",
"HTTPPartialContent",
"HTTPMultipleChoices",
"HTTPMovedPermanently",
"HTTPFound",
"HTTPSeeOther",
"HTTPNotModified",
"HTTPUseProxy",
"HTTPTemporaryRedirect",
"HTTPPermanentRedirect",
"HTTPClientError",
"HTTPBadRequest",
"HTTPUnauthorized",
"HTTPPaymentRequired",
"HTTPForbidden",
"HTTPNotFound",
"HTTPMethodNotAllowed",
"HTTPNotAcceptable",
"HTTPProxyAuthenticationRequired",
"HTTPRequestTimeout",
"HTTPConflict",
"HTTPGone",
"HTTPLengthRequired",
"HTTPPreconditionFailed",
"HTTPRequestEntityTooLarge",
"HTTPRequestURITooLong",
"HTTPUnsupportedMediaType",
"HTTPRequestRangeNotSatisfiable",
"HTTPExpectationFailed",
"HTTPMisdirectedRequest",
"HTTPUnprocessableEntity",
"HTTPFailedDependency",
"HTTPUpgradeRequired",
"HTTPPreconditionRequired",
"HTTPTooManyRequests",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPUnavailableForLegalReasons",
"HTTPServerError",
"HTTPInternalServerError",
"HTTPNotImplemented",
"HTTPBadGateway",
"HTTPServiceUnavailable",
"HTTPGatewayTimeout",
"HTTPVersionNotSupported",
"HTTPVariantAlsoNegotiates",
"HTTPInsufficientStorage",
"HTTPNotExtended",
"HTTPNetworkAuthenticationRequired",
)
@ -71,6 +71,7 @@ __all__ = (
# HTTP Exceptions
############################################################
class HTTPException(Response, Exception):
# You should set in subclasses:
@ -81,22 +82,32 @@ class HTTPException(Response, Exception):
__http_exception__ = True
def __init__(self, *,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
def __init__(
self,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if body is not None:
warnings.warn(
"body argument is deprecated for http web exceptions",
DeprecationWarning)
Response.__init__(self, status=self.status_code,
headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
DeprecationWarning,
)
Response.__init__(
self,
status=self.status_code,
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
Exception.__init__(self, self.reason)
if self.body is None and not self.empty_body:
self.text = "{}: {}".format(self.status, self.reason)
self.text = f"{self.status}: {self.reason}"
def __bool__(self) -> bool:
return True
@ -150,20 +161,26 @@ class HTTPPartialContent(HTTPSuccessful):
class _HTTPMove(HTTPRedirection):
def __init__(self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
def __init__(
self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if not location:
raise ValueError("HTTP redirects need a location to redirect to.")
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Location'] = str(URL(location))
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Location"] = str(URL(location))
self.location = location
@ -236,19 +253,26 @@ class HTTPNotFound(HTTPClientError):
class HTTPMethodNotAllowed(HTTPClientError):
status_code = 405
def __init__(self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
allow = ','.join(sorted(allowed_methods))
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Allow'] = allow
def __init__(
self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
allow = ",".join(sorted(allowed_methods))
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Allow"] = allow
self.allowed_methods = set(allowed_methods) # type: Set[str]
self.method = method.upper()
@ -284,14 +308,11 @@ class HTTPPreconditionFailed(HTTPClientError):
class HTTPRequestEntityTooLarge(HTTPClientError):
status_code = 413
def __init__(self,
max_size: float,
actual_size: float,
**kwargs: Any) -> None:
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
kwargs.setdefault(
'text',
'Maximum request body size {} exceeded, '
'actual body size {}'.format(max_size, actual_size)
"text",
"Maximum request body size {} exceeded, "
"actual body size {}".format(max_size, actual_size),
)
super().__init__(**kwargs)
@ -343,17 +364,24 @@ class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
class HTTPUnavailableForLegalReasons(HTTPClientError):
status_code = 451
def __init__(self,
link: str,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Link'] = '<%s>; rel="blocked-by"' % link
def __init__(
self,
link: str,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Link"] = '<%s>; rel="blocked-by"' % link
self.link = link

View file

@ -2,26 +2,25 @@ import asyncio
import mimetypes
import os
import pathlib
from functools import partial
import sys
from typing import ( # noqa
IO,
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Iterator,
List,
Optional,
Tuple,
Union,
cast,
)
from . import hdrs
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import set_exception, set_result
from .http_writer import StreamWriter
from .log import server_logger
from .typedefs import LooseHeaders
from .helpers import ETAG_ANY, ETag
from .typedefs import Final, LooseHeaders
from .web_exceptions import (
HTTPNotModified,
HTTPPartialContent,
@ -30,105 +29,29 @@ from .web_exceptions import (
)
from .web_response import StreamResponse
__all__ = ('FileResponse',)
__all__ = ("FileResponse",)
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest # noqa
from .web_request import BaseRequest
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
class SendfileStreamWriter(StreamWriter):
def __init__(self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
fobj: IO[Any],
count: int,
on_chunk_sent: _T_OnChunkSent=None) -> None:
super().__init__(protocol, loop, on_chunk_sent)
self._sendfile_buffer = [] # type: List[bytes]
self._fobj = fobj
self._count = count
self._offset = fobj.tell()
self._in_fd = fobj.fileno()
def _write(self, chunk: bytes) -> None:
# we overwrite StreamWriter._write, so nothing can be appended to
# _buffer, and nothing is written to the transport directly by the
# parent class
self.output_size += len(chunk)
self._sendfile_buffer.append(chunk)
def _sendfile_cb(self, fut: 'asyncio.Future[None]', out_fd: int) -> None:
if fut.cancelled():
return
try:
if self._do_sendfile(out_fd):
set_result(fut, None)
except Exception as exc:
set_exception(fut, exc)
def _do_sendfile(self, out_fd: int) -> bool:
try:
n = os.sendfile(out_fd,
self._in_fd,
self._offset,
self._count)
if n == 0: # in_fd EOF reached
n = self._count
except (BlockingIOError, InterruptedError):
n = 0
self.output_size += n
self._offset += n
self._count -= n
assert self._count >= 0
return self._count == 0
def _done_fut(self, out_fd: int, fut: 'asyncio.Future[None]') -> None:
self.loop.remove_writer(out_fd)
async def sendfile(self) -> None:
assert self.transport is not None
out_socket = self.transport.get_extra_info('socket').dup()
out_socket.setblocking(False)
out_fd = out_socket.fileno()
loop = self.loop
data = b''.join(self._sendfile_buffer)
try:
await loop.sock_sendall(out_socket, data)
if not self._do_sendfile(out_fd):
fut = loop.create_future()
fut.add_done_callback(partial(self._done_fut, out_fd))
loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd)
await fut
except asyncio.CancelledError:
raise
except Exception:
server_logger.debug('Socket error')
self.transport.close()
finally:
out_socket.close()
await super().write_eof()
async def write_eof(self, chunk: bytes=b'') -> None:
pass
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
class FileResponse(StreamResponse):
"""A response object can be used to send files."""
def __init__(self, path: Union[str, pathlib.Path],
chunk_size: int=256*1024,
status: int=200,
reason: Optional[str]=None,
headers: Optional[LooseHeaders]=None) -> None:
def __init__(
self,
path: Union[str, pathlib.Path],
chunk_size: int = 256 * 1024,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
super().__init__(status=status, reason=reason, headers=headers)
if isinstance(path, str):
@ -137,111 +60,122 @@ class FileResponse(StreamResponse):
self._path = path
self._chunk_size = chunk_size
async def _sendfile_system(self, request: 'BaseRequest',
fobj: IO[Any],
count: int) -> AbstractStreamWriter:
# Write count bytes of fobj to resp using
# the os.sendfile system call.
#
# For details check
# https://github.com/KeepSafe/aiohttp/issues/1177
# See https://github.com/KeepSafe/aiohttp/issues/958 for details
#
# request should be an aiohttp.web.Request instance.
# fobj should be an open file object.
# count should be an integer > 0.
transport = request.transport
assert transport is not None
if (transport.get_extra_info("sslcontext") or
transport.get_extra_info("socket") is None or
self.compression):
writer = await self._sendfile_fallback(request, fobj, count)
else:
writer = SendfileStreamWriter(
request.protocol,
request._loop,
fobj,
count
)
request._payload_writer = writer
await super().prepare(request)
await writer.sendfile()
return writer
async def _sendfile_fallback(self, request: 'BaseRequest',
fobj: IO[Any],
count: int) -> AbstractStreamWriter:
# Mimic the _sendfile_system() method, but without using the
# os.sendfile() system call. This should be used on systems
# that don't support the os.sendfile().
async def _sendfile_fallback(
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
# To keep memory usage low,fobj is transferred in chunks
# controlled by the constructor's chunk_size argument.
writer = await super().prepare(request)
assert writer is not None
chunk_size = self._chunk_size
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, fobj.seek, offset)
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
while chunk:
await writer.write(chunk)
count = count - chunk_size
if count <= 0:
break
chunk = await loop.run_in_executor(
None, fobj.read, min(chunk_size, count)
)
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
await writer.drain()
return writer
if hasattr(os, "sendfile") and not NOSENDFILE: # pragma: no cover
_sendfile = _sendfile_system
else: # pragma: no cover
_sendfile = _sendfile_fallback
async def _sendfile(
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
writer = await super().prepare(request)
assert writer is not None
async def prepare(
self,
request: 'BaseRequest'
if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
return await self._sendfile_fallback(writer, fobj, offset, count)
loop = request._loop
transport = request.transport
assert transport is not None
try:
await loop.sendfile(transport, fobj, offset, count)
except NotImplementedError:
return await self._sendfile_fallback(writer, fobj, offset, count)
await super().write_eof()
return writer
@staticmethod
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
if len(etags) == 1 and etags[0].value == ETAG_ANY:
return True
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
async def _not_modified(
self, request: "BaseRequest", etag_value: str, last_modified: float
) -> Optional[AbstractStreamWriter]:
self.set_status(HTTPNotModified.status_code)
self._length_check = False
self.etag = etag_value # type: ignore[assignment]
self.last_modified = last_modified # type: ignore[assignment]
# Delete any Content-Length headers provided by user. HTTP 304
# should always have empty response body
return await super().prepare(request)
async def _precondition_failed(
self, request: "BaseRequest"
) -> Optional[AbstractStreamWriter]:
self.set_status(HTTPPreconditionFailed.status_code)
self.content_length = 0
return await super().prepare(request)
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
filepath = self._path
gzip = False
if 'gzip' in request.headers.get(hdrs.ACCEPT_ENCODING, ''):
gzip_path = filepath.with_name(filepath.name + '.gz')
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
gzip_path = filepath.with_name(filepath.name + ".gz")
if gzip_path.is_file():
filepath = gzip_path
gzip = True
loop = asyncio.get_event_loop()
st = await loop.run_in_executor(None, filepath.stat)
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
modsince = request.if_modified_since
if modsince is not None and st.st_mtime <= modsince.timestamp():
self.set_status(HTTPNotModified.status_code)
self._length_check = False
# Delete any Content-Length headers provided by user. HTTP 304
# should always have empty response body
return await super().prepare(request)
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
last_modified = st.st_mtime
# https://tools.ietf.org/html/rfc7232#section-6
ifmatch = request.if_match
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
return await self._precondition_failed(request)
unmodsince = request.if_unmodified_since
if unmodsince is not None and st.st_mtime > unmodsince.timestamp():
self.set_status(HTTPPreconditionFailed.status_code)
return await super().prepare(request)
if (
unmodsince is not None
and ifmatch is None
and st.st_mtime > unmodsince.timestamp()
):
return await self._precondition_failed(request)
ifnonematch = request.if_none_match
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
return await self._not_modified(request, etag_value, last_modified)
modsince = request.if_modified_since
if (
modsince is not None
and ifnonematch is None
and st.st_mtime <= modsince.timestamp()
):
return await self._not_modified(request, etag_value, last_modified)
if hdrs.CONTENT_TYPE not in self.headers:
ct, encoding = mimetypes.guess_type(str(filepath))
if not ct:
ct = 'application/octet-stream'
ct = "application/octet-stream"
should_set_ct = True
else:
encoding = 'gzip' if gzip else None
encoding = "gzip" if gzip else None
should_set_ct = False
status = self._status
@ -273,8 +207,7 @@ class FileResponse(StreamResponse):
#
# Will do the same below. Many servers ignore this and do not
# send a Content-Range header with HTTP 416
self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format(
file_size)
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
@ -296,8 +229,9 @@ class FileResponse(StreamResponse):
# of the representation (i.e., the server replaces the
# value of last-byte-pos with a value that is one less than
# the current length of the selected representation).
count = min(end if end is not None else file_size,
file_size) - start
count = (
min(end if end is not None else file_size, file_size) - start
)
if start >= file_size:
# HTTP 416 should be returned in this case.
@ -309,8 +243,7 @@ class FileResponse(StreamResponse):
# suffix-byte-range-spec with a non-zero suffix-length,
# then the byte-range-set is satisfiable. Otherwise, the
# byte-range-set is unsatisfiable.
self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format(
file_size)
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
@ -320,27 +253,36 @@ class FileResponse(StreamResponse):
self.set_status(status)
if should_set_ct:
self.content_type = ct # type: ignore
self.content_type = ct # type: ignore[assignment]
if encoding:
self.headers[hdrs.CONTENT_ENCODING] = encoding
if gzip:
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
self.last_modified = st.st_mtime # type: ignore
self.etag = etag_value # type: ignore[assignment]
self.last_modified = st.st_mtime # type: ignore[assignment]
self.content_length = count
self.headers[hdrs.ACCEPT_RANGES] = 'bytes'
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
real_start = cast(int, start)
if status == HTTPPartialContent.status_code:
self.headers[hdrs.CONTENT_RANGE] = 'bytes {0}-{1}/{2}'.format(
real_start, real_start + count - 1, file_size)
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
real_start, real_start + count - 1, file_size
)
fobj = await loop.run_in_executor(None, filepath.open, 'rb')
# If we are sending 0 bytes calling sendfile() will throw a ValueError
if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
return await super().prepare(request)
fobj = await loop.run_in_executor(None, filepath.open, "rb")
if start: # be aware that start could be None or int=0 here.
await loop.run_in_executor(None, fobj.seek, start)
offset = start
else:
offset = 0
try:
return await self._sendfile(request, fobj, count)
return await self._sendfile(request, fobj, offset, count)
finally:
await loop.run_in_executor(None, fobj.close)

View file

@ -10,7 +10,7 @@ from .abc import AbstractAccessLogger
from .web_request import BaseRequest
from .web_response import StreamResponse
KeyMethod = namedtuple('KeyMethod', 'key method')
KeyMethod = namedtuple("KeyMethod", "key method")
class AccessLogger(AbstractAccessLogger):
@ -39,27 +39,27 @@ class AccessLogger(AbstractAccessLogger):
%{FOO}e os.environ['FOO']
"""
LOG_FORMAT_MAP = {
'a': 'remote_address',
't': 'request_start_time',
'P': 'process_id',
'r': 'first_request_line',
's': 'response_status',
'b': 'response_size',
'T': 'request_time',
'Tf': 'request_time_frac',
'D': 'request_time_micro',
'i': 'request_header',
'o': 'response_header',
"a": "remote_address",
"t": "request_start_time",
"P": "process_id",
"r": "first_request_line",
"s": "response_status",
"b": "response_size",
"T": "request_time",
"Tf": "request_time_frac",
"D": "request_time_micro",
"i": "request_header",
"o": "response_header",
}
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
FORMAT_RE = re.compile(r'%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)')
CLEANUP_RE = re.compile(r'(%[^s])')
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
CLEANUP_RE = re.compile(r"(%[^s])")
_FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
def __init__(self, logger: logging.Logger,
log_format: str=LOG_FORMAT) -> None:
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
"""Initialise the logger.
logger is a logger object to be used for logging.
@ -101,119 +101,92 @@ class AccessLogger(AbstractAccessLogger):
methods = list()
for atom in self.FORMAT_RE.findall(log_format):
if atom[1] == '':
if atom[1] == "":
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
m = getattr(AccessLogger, '_format_%s' % atom[0])
m = getattr(AccessLogger, "_format_%s" % atom[0])
key_method = KeyMethod(format_key1, m)
else:
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
m = getattr(AccessLogger, '_format_%s' % atom[2])
key_method = KeyMethod(format_key2,
functools.partial(m, atom[1]))
m = getattr(AccessLogger, "_format_%s" % atom[2])
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
methods.append(key_method)
log_format = self.FORMAT_RE.sub(r'%s', log_format)
log_format = self.CLEANUP_RE.sub(r'%\1', log_format)
log_format = self.FORMAT_RE.sub(r"%s", log_format)
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
return log_format, methods
@staticmethod
def _format_i(key: str,
request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_i(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
if request is None:
return '(no headers)'
return "(no headers)"
# suboptimal, make istr(key) once
return request.headers.get(key, '-')
return request.headers.get(key, "-")
@staticmethod
def _format_o(key: str,
request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_o(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
# suboptimal, make istr(key) once
return response.headers.get(key, '-')
return response.headers.get(key, "-")
@staticmethod
def _format_a(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return '-'
return "-"
ip = request.remote
return ip if ip is not None else '-'
return ip if ip is not None else "-"
@staticmethod
def _format_t(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
now = datetime.datetime.utcnow()
start_time = now - datetime.timedelta(seconds=time)
return start_time.strftime('[%d/%b/%Y:%H:%M:%S +0000]')
return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
@staticmethod
def _format_P(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "<%s>" % os.getpid()
@staticmethod
def _format_r(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return '-'
return '%s %s HTTP/%s.%s' % (request.method, request.path_qs,
request.version.major,
request.version.minor)
return "-"
return "{} {} HTTP/{}.{}".format(
request.method,
request.path_qs,
request.version.major,
request.version.minor,
)
@staticmethod
def _format_s(request: BaseRequest,
response: StreamResponse,
time: float) -> int:
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.status
@staticmethod
def _format_b(request: BaseRequest,
response: StreamResponse,
time: float) -> int:
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.body_length
@staticmethod
def _format_T(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time))
@staticmethod
def _format_Tf(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
return '%06f' % time
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "%06f" % time
@staticmethod
def _format_D(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time * 1000000))
def _format_line(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> Iterable[Tuple[str,
Callable[[BaseRequest,
StreamResponse,
float],
str]]]:
return [(key, method(request, response, time))
for key, method in self._methods]
def _format_line(
self, request: BaseRequest, response: StreamResponse, time: float
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
return [(key, method(request, response, time)) for key, method in self._methods]
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
try:
fmt_info = self._format_line(request, response, time)
@ -225,10 +198,10 @@ class AccessLogger(AbstractAccessLogger):
if key.__class__ is str:
extra[key] = value
else:
k1, k2 = key
dct = extra.get(k1, {}) # type: Any
dct[k2] = value
extra[k1] = dct
k1, k2 = key # type: ignore[misc]
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
dct[k2] = value # type: ignore[index,has-type]
extra[k1] = dct # type: ignore[has-type,assignment]
self.logger.info(self._log_format % tuple(values), extra=extra)
except Exception:

View file

@ -1,28 +1,28 @@
import re
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
from .typedefs import Handler
from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import SystemRoute
__all__ = (
'middleware',
'normalize_path_middleware',
"middleware",
"normalize_path_middleware",
)
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application # noqa
from .web_app import Application
_Func = TypeVar('_Func')
_Func = TypeVar("_Func")
async def _check_request_resolves(request: Request,
path: str) -> Tuple[bool, Request]:
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
alt_request = request.clone(rel_url=path)
match_info = await request.app.router.resolve(alt_request)
alt_request._match_info = match_info # type: ignore
alt_request._match_info = match_info
if match_info.http_exception is None:
return True, alt_request
@ -31,22 +31,23 @@ async def _check_request_resolves(request: Request,
def middleware(f: _Func) -> _Func:
f.__middleware_version__ = 1 # type: ignore
f.__middleware_version__ = 1 # type: ignore[attr-defined]
return f
_Handler = Callable[[Request], Awaitable[StreamResponse]]
_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
def normalize_path_middleware(
*, append_slash: bool=True, remove_slash: bool=False,
merge_slashes: bool=True,
redirect_class: Type[_HTTPMove]=HTTPPermanentRedirect) -> _Middleware:
"""
Middleware factory which produces a middleware that normalizes
the path of a request. By normalizing it means:
*,
append_slash: bool = True,
remove_slash: bool = False,
merge_slashes: bool = True,
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
) -> _Middleware:
"""Factory for producing a middleware that normalizes the path of a request.
Normalizing means:
- Add or remove a trailing slash to the path.
- Double slashes are replaced by one.
@ -72,37 +73,35 @@ def normalize_path_middleware(
If merge_slashes is True, merge multiple consecutive slashes in the
path into one.
"""
correct_configuration = not (append_slash and remove_slash)
assert correct_configuration, "Cannot both remove and append slash"
@middleware
async def impl(request: Request, handler: _Handler) -> StreamResponse:
async def impl(request: Request, handler: Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if '?' in request.raw_path:
path, query = request.raw_path.split('?', 1)
query = '?' + query
if "?" in request.raw_path:
path, query = request.raw_path.split("?", 1)
query = "?" + query
else:
query = ''
query = ""
path = request.raw_path
if merge_slashes:
paths_to_check.append(re.sub('//+', '/', path))
if append_slash and not request.path.endswith('/'):
paths_to_check.append(path + '/')
if remove_slash and request.path.endswith('/'):
paths_to_check.append(re.sub("//+", "/", path))
if append_slash and not request.path.endswith("/"):
paths_to_check.append(path + "/")
if remove_slash and request.path.endswith("/"):
paths_to_check.append(path[:-1])
if merge_slashes and append_slash:
paths_to_check.append(
re.sub('//+', '/', path + '/'))
paths_to_check.append(re.sub("//+", "/", path + "/"))
if merge_slashes and remove_slash:
merged_slashes = re.sub('//+', '/', path)
merged_slashes = re.sub("//+", "/", path)
paths_to_check.append(merged_slashes[:-1])
for path in paths_to_check:
resolves, request = await _check_request_resolves(
request, path)
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
resolves, request = await _check_request_resolves(request, path)
if resolves:
raise redirect_class(request.raw_path + query)
@ -111,10 +110,10 @@ def normalize_path_middleware(
return impl
def _fix_request_current_app(app: 'Application') -> _Middleware:
def _fix_request_current_app(app: "Application") -> _Middleware:
@middleware
async def impl(request: Request, handler: _Handler) -> StreamResponse:
async def impl(request: Request, handler: Handler) -> StreamResponse:
with request.match_info.set_current_app(app):
return await handler(request)
return impl

View file

@ -12,16 +12,21 @@ from typing import (
Any,
Awaitable,
Callable,
Deque,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
import attr
import yarl
from .abc import AbstractAccessLogger, AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import CeilTimeout, current_task
from .helpers import ceil_timeout
from .http import (
HttpProcessingError,
HttpRequestParser,
@ -37,25 +42,37 @@ from .web_log import AccessLogger
from .web_request import BaseRequest
from .web_response import Response, StreamResponse
__all__ = ('RequestHandler', 'RequestPayloadError', 'PayloadAccessError')
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
if TYPE_CHECKING: # pragma: no cover
from .web_server import Server # noqa
from .web_server import Server
_RequestFactory = Callable[[RawRequestMessage,
StreamReader,
'RequestHandler',
AbstractStreamWriter,
'asyncio.Task[None]'],
BaseRequest]
_RequestFactory = Callable[
[
RawRequestMessage,
StreamReader,
"RequestHandler",
AbstractStreamWriter,
"asyncio.Task[None]",
],
BaseRequest,
]
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
ERROR = RawRequestMessage(
'UNKNOWN', '/', HttpVersion10, {},
{}, True, False, False, False, yarl.URL('/'))
"UNKNOWN",
"/",
HttpVersion10,
{}, # type: ignore[arg-type]
{}, # type: ignore[arg-type]
True,
None,
False,
False,
yarl.URL("/"),
)
class RequestPayloadError(Exception):
@ -66,6 +83,16 @@ class PayloadAccessError(Exception):
"""Payload was accessed after response was sent."""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class _ErrInfo:
status: int
exc: BaseException
message: str
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
class RequestHandler(BaseProtocol):
"""HTTP protocol implementation.
@ -77,66 +104,88 @@ class RequestHandler(BaseProtocol):
status line, bad headers or incomplete payload. If any error occurs,
connection gets closed.
:param keepalive_timeout: number of seconds before closing
keep-alive connection
:type keepalive_timeout: int or None
keepalive_timeout -- number of seconds before closing
keep-alive connection
:param bool tcp_keepalive: TCP keep-alive is on, default is on
tcp_keepalive -- TCP keep-alive is on, default is on
:param bool debug: enable debug mode
debug -- enable debug mode
:param logger: custom logger object
:type logger: aiohttp.log.server_logger
logger -- custom logger object
:param access_log_class: custom class for access_logger
:type access_log_class: aiohttp.abc.AbstractAccessLogger
access_log_class -- custom class for access_logger
:param access_log: custom logging object
:type access_log: aiohttp.log.server_logger
access_log -- custom logging object
:param str access_log_format: access log format string
access_log_format -- access log format string
:param loop: Optional event loop
loop -- Optional event loop
:param int max_line_size: Optional maximum header line size
max_line_size -- Optional maximum header line size
:param int max_field_size: Optional maximum header field size
max_field_size -- Optional maximum header field size
:param int max_headers: Optional maximum header size
max_headers -- Optional maximum header size
"""
KEEPALIVE_RESCHEDULE_DELAY = 1
__slots__ = ('_request_count', '_keepalive', '_manager',
'_request_handler', '_request_factory', '_tcp_keepalive',
'_keepalive_time', '_keepalive_handle', '_keepalive_timeout',
'_lingering_time', '_messages', '_message_tail',
'_waiter', '_error_handler', '_task_handler',
'_upgrade', '_payload_parser', '_request_parser',
'_reading_paused', 'logger', 'debug', 'access_log',
'access_logger', '_close', '_force_close')
def __init__(self, manager: 'Server', *,
loop: asyncio.AbstractEventLoop,
keepalive_timeout: float=75., # NGINX default is 75 secs
tcp_keepalive: bool=True,
logger: Logger=server_logger,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log: Logger=access_logger,
access_log_format: str=AccessLogger.LOG_FORMAT,
debug: bool=False,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190,
lingering_time: float=10.0):
__slots__ = (
"_request_count",
"_keepalive",
"_manager",
"_request_handler",
"_request_factory",
"_tcp_keepalive",
"_keepalive_time",
"_keepalive_handle",
"_keepalive_timeout",
"_lingering_time",
"_messages",
"_message_tail",
"_waiter",
"_task_handler",
"_upgrade",
"_payload_parser",
"_request_parser",
"_reading_paused",
"logger",
"debug",
"access_log",
"access_logger",
"_close",
"_force_close",
"_current_request",
)
def __init__(
self,
manager: "Server",
*,
loop: asyncio.AbstractEventLoop,
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
tcp_keepalive: bool = True,
logger: Logger = server_logger,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log: Logger = access_logger,
access_log_format: str = AccessLogger.LOG_FORMAT,
debug: bool = False,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
lingering_time: float = 10.0,
read_bufsize: int = 2 ** 16,
auto_decompress: bool = True,
):
super().__init__(loop)
self._request_count = 0
self._keepalive = False
self._current_request = None # type: Optional[BaseRequest]
self._manager = manager # type: Optional[Server]
self._request_handler = manager.request_handler # type: Optional[_RequestHandler] # noqa
self._request_factory = manager.request_factory # type: Optional[_RequestFactory] # noqa
self._request_handler: Optional[_RequestHandler] = manager.request_handler
self._request_factory: Optional[_RequestFactory] = manager.request_factory
self._tcp_keepalive = tcp_keepalive
# placeholder to be replaced on keepalive timeout setup
@ -145,28 +194,32 @@ class RequestHandler(BaseProtocol):
self._keepalive_timeout = keepalive_timeout
self._lingering_time = float(lingering_time)
self._messages = deque() # type: Any # Python 3.5 has no typing.Deque
self._message_tail = b''
self._messages: Deque[_MsgType] = deque()
self._message_tail = b""
self._waiter = None # type: Optional[asyncio.Future[None]]
self._error_handler = None # type: Optional[asyncio.Task[None]]
self._task_handler = None # type: Optional[asyncio.Task[None]]
self._upgrade = False
self._payload_parser = None # type: Any
self._request_parser = HttpRequestParser(
self, loop,
self,
loop,
read_bufsize,
max_line_size=max_line_size,
max_field_size=max_field_size,
max_headers=max_headers,
payload_exception=RequestPayloadError) # type: Optional[HttpRequestParser] # noqa
payload_exception=RequestPayloadError,
auto_decompress=auto_decompress,
) # type: Optional[HttpRequestParser]
self.logger = logger
self.debug = debug
self.access_log = access_log
if access_log:
self.access_logger = access_log_class(
access_log, access_log_format) # type: Optional[AbstractAccessLogger] # noqa
access_log, access_log_format
) # type: Optional[AbstractAccessLogger]
else:
self.access_logger = None
@ -176,16 +229,19 @@ class RequestHandler(BaseProtocol):
def __repr__(self) -> str:
return "<{} {}>".format(
self.__class__.__name__,
'connected' if self.transport is not None else 'disconnected')
"connected" if self.transport is not None else "disconnected",
)
@property
def keepalive_timeout(self) -> float:
return self._keepalive_timeout
async def shutdown(self, timeout: Optional[float]=15.0) -> None:
"""Worker process is about to exit, we need cleanup everything and
stop accepting requests. It is especially important for keep-alive
connections."""
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
"""Do worker process exit preparations.
We need to clean up everything and stop accepting requests.
It is especially important for keep-alive connections.
"""
self._force_close = True
if self._keepalive_handle is not None:
@ -196,13 +252,11 @@ class RequestHandler(BaseProtocol):
# wait for handlers
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
with CeilTimeout(timeout, loop=self._loop):
if (self._error_handler is not None and
not self._error_handler.done()):
await self._error_handler
async with ceil_timeout(timeout):
if self._current_request is not None:
self._current_request._cancel(asyncio.CancelledError())
if (self._task_handler is not None and
not self._task_handler.done()):
if self._task_handler is not None and not self._task_handler.done():
await self._task_handler
# force-close non-idle handler
@ -240,11 +294,15 @@ class RequestHandler(BaseProtocol):
if self._keepalive_handle is not None:
self._keepalive_handle.cancel()
if self._current_request is not None:
if exc is None:
exc = ConnectionResetError("Connection lost")
self._current_request._cancel(exc)
if self._task_handler is not None:
self._task_handler.cancel()
if self._error_handler is not None:
self._error_handler.cancel()
if self._waiter is not None:
self._waiter.cancel()
self._task_handler = None
@ -260,7 +318,7 @@ class RequestHandler(BaseProtocol):
if self._message_tail:
self._payload_parser.feed_data(self._message_tail)
self._message_tail = b''
self._message_tail = b""
def eof_received(self) -> None:
pass
@ -269,40 +327,30 @@ class RequestHandler(BaseProtocol):
if self._force_close or self._close:
return
# parse http messages
messages: Sequence[_MsgType]
if self._payload_parser is None and not self._upgrade:
assert self._request_parser is not None
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
# something happened during parsing
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self._loop),
400, exc, exc.message))
self.close()
except Exception as exc:
# 500: internal error
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self._loop),
500, exc))
self.close()
else:
if messages:
# sometimes the parser returns no messages
for (msg, payload) in messages:
self._request_count += 1
self._messages.append((msg, payload))
messages = [
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
]
upgraded = False
tail = b""
waiter = self._waiter
if waiter is not None:
if not waiter.done():
# don't set result twice
waiter.set_result(None)
for msg, payload in messages or ():
self._request_count += 1
self._messages.append((msg, payload))
self._upgrade = upgraded
if upgraded and tail:
self._message_tail = tail
waiter = self._waiter
if messages and waiter is not None and not waiter.done():
# don't set result twice
waiter.set_result(None)
self._upgrade = upgraded
if upgraded and tail:
self._message_tail = tail
# no parser, just store
elif self._payload_parser is None and self._upgrade and data:
@ -325,14 +373,17 @@ class RequestHandler(BaseProtocol):
self._keepalive_handle = None
def close(self) -> None:
"""Stop accepting new pipelinig messages and close
connection when handlers done processing messages"""
"""Close connection.
Stop accepting new pipelining messages and close
connection when handlers done processing messages.
"""
self._close = True
if self._waiter:
self._waiter.cancel()
def force_close(self) -> None:
"""Force close connection"""
"""Forcefully close connection."""
self._force_close = True
if self._waiter:
self._waiter.cancel()
@ -340,12 +391,11 @@ class RequestHandler(BaseProtocol):
self.transport.close()
self.transport = None
def log_access(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log_access(
self, request: BaseRequest, response: StreamResponse, time: float
) -> None:
if self.access_logger is not None:
self.access_logger.log(request, response, time)
self.access_logger.log(request, response, self._loop.time() - time)
def log_debug(self, *args: Any, **kw: Any) -> None:
if self.debug:
@ -369,7 +419,47 @@ class RequestHandler(BaseProtocol):
# not all request handlers are done,
# reschedule itself to next second
self._keepalive_handle = self._loop.call_later(
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive)
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
)
async def _handle_request(
self,
request: BaseRequest,
start_time: float,
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
) -> Tuple[StreamResponse, bool]:
assert self._request_handler is not None
try:
try:
self._current_request = request
resp = await request_handler(request)
finally:
self._current_request = None
except HTTPException as exc:
resp = exc
reset = await self.finish_response(request, resp, start_time)
except asyncio.CancelledError:
raise
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
reset = await self.finish_response(request, resp, start_time)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
reset = await self.finish_response(request, resp, start_time)
else:
# Deprecation warning (See #2415)
if getattr(resp, "__http_exception__", False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning,
)
reset = await self.finish_response(request, resp, start_time)
return resp, reset
async def start(self) -> None:
"""Process incoming request.
@ -403,114 +493,76 @@ class RequestHandler(BaseProtocol):
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
start = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(
message, payload, self, writer, handler)
if isinstance(message, _ErrInfo):
# make request_factory work
request_handler = self._make_error_handler(message)
message = ERROR
else:
request_handler = self._request_handler
request = self._request_factory(message, payload, self, writer, handler)
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(
self._request_handler(request))
self._handle_request(request, start, request_handler)
)
try:
resp = await task
except HTTPException as exc:
resp = exc
resp, reset = await task
except (asyncio.CancelledError, ConnectionError):
self.log_debug('Ignored premature client disconnection')
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug('Request handler timed out.', exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
else:
# Deprecation warning (See #2415)
if getattr(resp, '__http_exception__', False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning)
# Drop the processed task from asyncio.Task.all_tasks() early
del task
if self.debug:
if not isinstance(resp, StreamResponse):
if resp is None:
raise RuntimeError("Missing return "
"statement on request handler")
else:
raise RuntimeError("Web-handler should return "
"a response instance, "
"got {!r}".format(resp))
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return "
"statement on request handler")
else:
raise RuntimeError("Web-handler should return "
"a response instance, "
"got {!r}".format(resp))
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionError:
self.log_debug('Ignored premature client disconnection 2')
if reset:
self.log_debug("Ignored premature client disconnection 2")
break
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
'Start lingering close timer for %s sec.',
lingering_time)
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(
asyncio.TimeoutError, asyncio.CancelledError):
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
async with ceil_timeout(end_t - now):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug('Uncompleted request.')
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug('Ignored premature client disconnection ')
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception(
'Unhandled runtime exception', exc_info=exc)
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception('Unhandled exception', exc_info=exc)
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug('Ignored premature client disconnection.')
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
@ -519,81 +571,111 @@ class RequestHandler(BaseProtocol):
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout,
self._process_keepalive)
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
if self.transport is not None:
self.transport.close()
def handle_error(self,
request: BaseRequest,
status: int=500,
exc: Optional[BaseException]=None,
message: Optional[str]=None) -> StreamResponse:
async def finish_response(
self, request: BaseRequest, resp: StreamResponse, start_time: float
) -> bool:
"""Prepare the response and write_eof, then log access.
This has to
be called within the context of any exception so the access logger
can get exception information. Returns True if the client disconnects
prematurely.
"""
if self._request_parser is not None:
self._request_parser.set_upgraded(False)
self._upgrade = False
if self._message_tail:
self._request_parser.feed_data(self._message_tail)
self._message_tail = b""
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return " "statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionError:
self.log_access(request, resp, start_time)
return True
else:
self.log_access(request, resp, start_time)
return False
def handle_error(
self,
request: BaseRequest,
status: int = 500,
exc: Optional[BaseException] = None,
message: Optional[str] = None,
) -> StreamResponse:
"""Handle errors.
Returns HTTP response with specific status code. Logs additional
information. It always closes current connection."""
information. It always closes current connection.
"""
self.log_exception("Error handling request", exc_info=exc)
ct = 'text/plain'
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
title = '{0.value} {0.phrase}'.format(
HTTPStatus.INTERNAL_SERVER_ERROR
# some data already got sent, connection is broken
if request.writer.output_size > 0:
raise ConnectionError(
"Response is sent already, cannot send another response "
"with the error message"
)
ct = "text/plain"
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
tb = None
if self.debug:
with suppress(Exception):
tb = traceback.format_exc()
if 'text/html' in request.headers.get('Accept', ''):
if "text/html" in request.headers.get("Accept", ""):
if tb:
tb = html_escape(tb)
msg = '<h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb)
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
message = (
"<html><head>"
"<title>{title}</title>"
"</head><body>\n<h1>{title}</h1>"
"\n{msg}\n</body></html>\n"
).format(title=title, msg=msg)
ct = 'text/html'
ct = "text/html"
else:
if tb:
msg = tb
message = title + '\n\n' + msg
message = title + "\n\n" + msg
resp = Response(status=status, text=message, content_type=ct)
resp.force_close()
# some data already got sent, connection is broken
if request.writer.output_size > 0 or self.transport is None:
self.force_close()
return resp
async def handle_parse_error(self,
writer: AbstractStreamWriter,
status: int,
exc: Optional[BaseException]=None,
message: Optional[str]=None) -> None:
request = BaseRequest( # type: ignore
ERROR,
EMPTY_PAYLOAD,
self, writer,
current_task(),
self._loop)
def _make_error_handler(
self, err_info: _ErrInfo
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
async def handler(request: BaseRequest) -> StreamResponse:
return self.handle_error(
request, err_info.status, err_info.exc, err_info.message
)
resp = self.handle_error(request, status, exc, message)
await resp.prepare(request)
await resp.write_eof()
if self.transport is not None:
self.transport.close()
self._error_handler = None
return handler

View file

@ -7,10 +7,9 @@ import string
import tempfile
import types
import warnings
from email.utils import parsedate
from http.cookies import SimpleCookie
from types import MappingProxyType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
Dict,
@ -18,6 +17,7 @@ from typing import ( # noqa
Mapping,
MutableMapping,
Optional,
Pattern,
Tuple,
Union,
cast,
@ -30,12 +30,24 @@ from yarl import URL
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel
from .helpers import (
DEBUG,
ETAG_ANY,
LIST_QUOTED_ETAG_RE,
ChainMapProxy,
ETag,
HeadersMixin,
parse_http_date,
reify,
sentinel,
)
from .http_parser import RawRequestMessage
from .http_writer import HttpVersion
from .multipart import BodyPartReader, MultipartReader
from .streams import EmptyStreamReader, StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
Final,
JSONDecoder,
LooseHeaders,
RawHeaders,
@ -44,48 +56,51 @@ from .typedefs import (
from .web_exceptions import HTTPRequestEntityTooLarge
from .web_response import StreamResponse
__all__ = ('BaseRequest', 'FileField', 'Request')
__all__ = ("BaseRequest", "FileField", "Request")
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application # noqa
from .web_urldispatcher import UrlMappingMatchInfo # noqa
from .web_protocol import RequestHandler # noqa
from .web_app import Application
from .web_protocol import RequestHandler
from .web_urldispatcher import UrlMappingMatchInfo
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class FileField:
name = attr.ib(type=str)
filename = attr.ib(type=str)
file = attr.ib(type=io.BufferedReader)
content_type = attr.ib(type=str)
headers = attr.ib(type=CIMultiDictProxy) # type: CIMultiDictProxy[str]
name: str
filename: str
file: io.BufferedReader
content_type: str
headers: "CIMultiDictProxy[str]"
_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
# '-' at the end to prevent interpretation as range in a char class
_TOKEN = r'[{tchar}]+'.format(tchar=_TCHAR)
_TOKEN: Final[str] = fr"[{_TCHAR}]+"
_QDTEXT = r'[{}]'.format(
r''.join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))))
_QDTEXT: Final[str] = r"[{}]".format(
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
)
# qdtext includes 0x5C to escape 0x5D ('\]')
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
_QUOTED_PAIR = r'\\[\t !-~]'
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format(
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR)
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
)
_FORWARDED_PAIR = (
r'({token})=({token}|{quoted_string})(:\d{{1,4}})?'.format(
token=_TOKEN,
quoted_string=_QUOTED_STRING))
_FORWARDED_PAIR: Final[
str
] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
token=_TOKEN, quoted_string=_QUOTED_STRING
)
_QUOTED_PAIR_REPLACE_RE = re.compile(r'\\([\t !-~])')
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
# same pattern as _QUOTED_PAIR but contains a capture group
_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR)
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
############################################################
# HTTP Request
@ -94,25 +109,51 @@ _FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR)
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT,
hdrs.METH_TRACE, hdrs.METH_DELETE}
POST_METHODS = {
hdrs.METH_PATCH,
hdrs.METH_POST,
hdrs.METH_PUT,
hdrs.METH_TRACE,
hdrs.METH_DELETE,
}
ATTRS = HeadersMixin.ATTRS | frozenset([
'_message', '_protocol', '_payload_writer', '_payload', '_headers',
'_method', '_version', '_rel_url', '_post', '_read_bytes',
'_state', '_cache', '_task', '_client_max_size', '_loop',
'_transport_sslcontext', '_transport_peername'])
ATTRS = HeadersMixin.ATTRS | frozenset(
[
"_message",
"_protocol",
"_payload_writer",
"_payload",
"_headers",
"_method",
"_version",
"_rel_url",
"_post",
"_read_bytes",
"_state",
"_cache",
"_task",
"_client_max_size",
"_loop",
"_transport_sslcontext",
"_transport_peername",
]
)
def __init__(self, message: RawRequestMessage,
payload: StreamReader, protocol: 'RequestHandler',
payload_writer: AbstractStreamWriter,
task: 'asyncio.Task[None]',
loop: asyncio.AbstractEventLoop,
*, client_max_size: int=1024**2,
state: Optional[Dict[str, Any]]=None,
scheme: Optional[str]=None,
host: Optional[str]=None,
remote: Optional[str]=None) -> None:
def __init__(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: "RequestHandler",
payload_writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
loop: asyncio.AbstractEventLoop,
*,
client_max_size: int = 1024 ** 2,
state: Optional[Dict[str, Any]] = None,
scheme: Optional[str] = None,
host: Optional[str] = None,
remote: Optional[str] = None,
) -> None:
if state is None:
state = {}
self._message = message
@ -124,7 +165,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._method = message.method
self._version = message.version
self._rel_url = message.url
self._post = None # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]] # noqa
self._post = (
None
) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
self._read_bytes = None # type: Optional[bytes]
self._state = state
@ -135,54 +178,58 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
transport = self._protocol.transport
assert transport is not None
self._transport_sslcontext = transport.get_extra_info('sslcontext')
self._transport_peername = transport.get_extra_info('peername')
self._transport_sslcontext = transport.get_extra_info("sslcontext")
self._transport_peername = transport.get_extra_info("peername")
if scheme is not None:
self._cache['scheme'] = scheme
self._cache["scheme"] = scheme
if host is not None:
self._cache['host'] = host
self._cache["host"] = host
if remote is not None:
self._cache['remote'] = remote
self._cache["remote"] = remote
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel,
headers: LooseHeaders=sentinel, scheme: str=sentinel,
host: str=sentinel,
remote: str=sentinel) -> 'BaseRequest':
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "BaseRequest":
"""Clone itself with replacement some attributes.
Creates and returns a new instance of Request object. If no parameters
are given, an exact copy is returned. If a parameter is not passed, it
will reuse the one from the current request object.
"""
if self._read_bytes:
raise RuntimeError("Cannot clone request "
"after reading its content")
raise RuntimeError("Cannot clone request " "after reading its content")
dct = {} # type: Dict[str, Any]
if method is not sentinel:
dct['method'] = method
dct["method"] = method
if rel_url is not sentinel:
new_url = URL(rel_url)
dct['url'] = new_url
dct['path'] = str(new_url)
dct["url"] = new_url
dct["path"] = str(new_url)
if headers is not sentinel:
# a copy semantic
dct['headers'] = CIMultiDictProxy(CIMultiDict(headers))
dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8'))
for k, v in headers.items())
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
dct["raw_headers"] = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
message = self._message._replace(**dct)
kwargs = {}
if scheme is not sentinel:
kwargs['scheme'] = scheme
kwargs["scheme"] = scheme
if host is not sentinel:
kwargs['host'] = host
kwargs["host"] = host
if remote is not sentinel:
kwargs['remote'] = remote
kwargs["remote"] = remote
return self.__class__(
message,
@ -193,14 +240,15 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._loop,
client_max_size=self._client_max_size,
state=self._state.copy(),
**kwargs)
**kwargs,
)
@property
def task(self) -> 'asyncio.Task[None]':
def task(self) -> "asyncio.Task[None]":
return self._task
@property
def protocol(self) -> 'RequestHandler':
def protocol(self) -> "RequestHandler":
return self._protocol
@property
@ -215,9 +263,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def message(self) -> RawRequestMessage:
warnings.warn("Request.message is deprecated",
DeprecationWarning,
stacklevel=3)
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
return self._message
@reify
@ -226,9 +272,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn("request.loop property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
# MutableMapping API
@ -253,7 +299,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def secure(self) -> bool:
"""A bool indicating if the request is handled with SSL."""
return self.scheme == 'https'
return self.scheme == "https"
@reify
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
@ -284,37 +330,36 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
if match is not None: # got a valid forwarded-pair
if match is not None: # got a valid forwarded-pair
if need_separator:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
pos = field_value.find(",", pos)
else:
name, value, port = match.groups()
if value[0] == '"':
# quoted string: remove quotes and unescape
value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1',
value[1:-1])
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
if port:
value += port
elem[name.lower()] = value
pos += len(match.group(0))
need_separator = True
elif field_value[pos] == ',': # next forwarded-element
elif field_value[pos] == ",": # next forwarded-element
need_separator = False
elem = {}
elems.append(types.MappingProxyType(elem))
pos += 1
elif field_value[pos] == ';': # next forwarded-pair
elif field_value[pos] == ";": # next forwarded-pair
need_separator = False
pos += 1
elif field_value[pos] in ' \t':
elif field_value[pos] in " \t":
# Allow whitespace even between forwarded-pairs, though
# RFC 7239 doesn't. This simplifies code and is in line
# with Postel's law.
pos += 1
else:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
pos = field_value.find(",", pos)
return tuple(elems)
@reify
@ -329,9 +374,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
'http' or 'https'.
"""
if self._transport_sslcontext:
return 'https'
return "https"
else:
return 'http'
return "http"
@reify
def method(self) -> str:
@ -342,7 +387,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._method
@reify
def version(self) -> Tuple[int, int]:
def version(self) -> HttpVersion:
"""Read only property for getting HTTP version of request.
Returns aiohttp.protocol.HttpVersion instance.
@ -362,8 +407,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
host = self._message.headers.get(hdrs.HOST)
if host is not None:
return host
else:
return socket.getfqdn()
return socket.getfqdn()
@reify
def remote(self) -> Optional[str]:
@ -374,10 +418,11 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
- overridden value by .clone(remote=new_remote) call.
- peername of opened socket
"""
if self._transport_peername is None:
return None
if isinstance(self._transport_peername, (list, tuple)):
return self._transport_peername[0]
else:
return self._transport_peername
return str(self._transport_peername[0])
return str(self._transport_peername)
@reify
def url(self) -> URL:
@ -402,7 +447,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def raw_path(self) -> str:
""" The URL including raw *PATH INFO* without the host or scheme.
"""The URL including raw *PATH INFO* without the host or scheme.
Warning, the path is unquoted and may contains non valid URL characters
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
@ -410,9 +456,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._message.path
@reify
def query(self) -> 'MultiDictProxy[str]':
def query(self) -> "MultiDictProxy[str]":
"""A multidict with all the variables in the query string."""
return self._rel_url.query
return MultiDictProxy(self._rel_url.query)
@reify
def query_string(self) -> str:
@ -423,7 +469,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._rel_url.query_string
@reify
def headers(self) -> 'CIMultiDictProxy[str]':
def headers(self) -> "CIMultiDictProxy[str]":
"""A case-insensitive multidict proxy with all headers."""
return self._headers
@ -432,24 +478,13 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
"""A sequence of pairs for all headers."""
return self._message.raw_headers
@staticmethod
def _http_date(_date_str: str) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object
"""
if _date_str is not None:
timetuple = parsedate(_date_str)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None
@reify
def if_modified_since(self) -> Optional[datetime.datetime]:
"""The value of If-Modified-Since HTTP header, or None.
This header is represented as a `datetime` object.
"""
return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
@reify
def if_unmodified_since(self) -> Optional[datetime.datetime]:
@ -457,7 +492,53 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
This header is represented as a `datetime` object.
"""
return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
@staticmethod
def _etag_values(etag_header: str) -> Iterator[ETag]:
"""Extract `ETag` objects from raw header."""
if etag_header == ETAG_ANY:
yield ETag(
is_weak=False,
value=ETAG_ANY,
)
else:
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
is_weak, value, garbage = match.group(2, 3, 4)
# Any symbol captured by 4th group means
# that the following sequence is invalid.
if garbage:
break
yield ETag(
is_weak=bool(is_weak),
value=value,
)
@classmethod
def _if_match_or_none_impl(
cls, header_value: Optional[str]
) -> Optional[Tuple[ETag, ...]]:
if not header_value:
return None
return tuple(cls._etag_values(header_value))
@reify
def if_match(self) -> Optional[Tuple[ETag, ...]]:
"""The value of If-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
@reify
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
"""The value of If-None-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
@reify
def if_range(self) -> Optional[datetime.datetime]:
@ -465,7 +546,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
This header is represented as a `datetime` object.
"""
return self._http_date(self.headers.get(hdrs.IF_RANGE))
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
@reify
def keep_alive(self) -> bool:
@ -478,10 +559,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
A read-only dictionary-like object.
"""
raw = self.headers.get(hdrs.COOKIE, '')
parsed = SimpleCookie(raw)
return MappingProxyType(
{key: val.value for key, val in parsed.items()})
raw = self.headers.get(hdrs.COOKIE, "")
parsed = SimpleCookie(raw) # type: SimpleCookie[str]
return MappingProxyType({key: val.value for key, val in parsed.items()})
@reify
def http_range(self) -> slice:
@ -494,7 +574,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
start, end = None, None
if rng is not None:
try:
pattern = r'^bytes=(\d*)-(\d*)$'
pattern = r"^bytes=(\d*)-(\d*)$"
start, end = re.findall(pattern, rng)[0]
except IndexError: # pattern was not found in header
raise ValueError("range not in acceptable format")
@ -512,10 +592,10 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
end += 1
if start >= end:
raise ValueError('start cannot be after end')
raise ValueError("start cannot be after end")
if start is end is None: # No valid range supplied
raise ValueError('No start or end of range specified')
raise ValueError("No start or end of range specified")
return slice(start, end, 1)
@ -528,8 +608,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
def has_body(self) -> bool:
"""Return True if request's HTTP BODY can be read, False otherwise."""
warnings.warn(
"Deprecated, use .can_read_body #2005",
DeprecationWarning, stacklevel=2)
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
)
return not self._payload.at_eof()
@property
@ -564,8 +644,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
body_size = len(body)
if body_size >= self._client_max_size:
raise HTTPRequestEntityTooLarge(
max_size=self._client_max_size,
actual_size=body_size
max_size=self._client_max_size, actual_size=body_size
)
if not chunk:
break
@ -575,10 +654,10 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or 'utf-8'
encoding = self.charset or "utf-8"
return bytes_body.decode(encoding)
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any:
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
"""Return BODY as JSON."""
body = await self.text()
return loads(body)
@ -587,7 +666,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
"""Return async iterator to process BODY as multipart."""
return MultipartReader(self._headers, self._payload)
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]':
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
@ -596,15 +675,17 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._post
content_type = self.content_type
if (content_type not in ('',
'application/x-www-form-urlencoded',
'multipart/form-data')):
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
if content_type == 'multipart/form-data':
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
@ -614,66 +695,92 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
if isinstance(field, BodyPartReader):
if field.filename and field_ct:
assert field.name is not None
# Note that according to RFC 7578, the Content-Type header
# is optional, even for files, so we can't assume it's
# present.
# https://tools.ietf.org/html/rfc7578#section-4.4
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
tmp.close()
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
tmp.seek(0)
ff = FileField(field.name, field.filename,
cast(io.BufferedReader, tmp),
field_ct, field.headers)
if field_ct is None:
field_ct = "application/octet-stream"
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
field_ct,
field.headers,
)
out.add(field.name, ff)
else:
# deal with ordinary data
value = await field.read(decode=True)
if field_ct is None or \
field_ct.startswith('text/'):
charset = field.get_charset(default='utf-8')
if field_ct is None or field_ct.startswith("text/"):
charset = field.get_charset(default="utf-8")
out.add(field.name, value.decode(charset))
else:
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
max_size=max_size, actual_size=size
)
else:
raise ValueError(
'To decode nested multipart you need '
'to use custom reader',
"To decode nested multipart you need " "to use custom reader",
)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or 'utf-8'
charset = self.charset or "utf-8"
out.extend(
parse_qsl(
data.rstrip().decode(charset),
keep_blank_values=True,
encoding=charset))
encoding=charset,
)
)
self._post = MultiDictProxy(out)
return self._post
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""Extra info from protocol transport"""
protocol = self._protocol
if protocol is None:
return default
transport = protocol.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def __repr__(self) -> str:
ascii_encodable_path = self.path.encode('ascii', 'backslashreplace') \
.decode('ascii')
return "<{} {} {} >".format(self.__class__.__name__,
self._method, ascii_encodable_path)
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
"ascii"
)
return "<{} {} {} >".format(
self.__class__.__name__, self._method, ascii_encodable_path
)
def __eq__(self, other: object) -> bool:
return id(self) == id(other)
@ -684,10 +791,13 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
async def _prepare_hook(self, response: StreamResponse) -> None:
return
def _cancel(self, exc: BaseException) -> None:
self._payload.set_exception(exc)
class Request(BaseRequest):
ATTRS = BaseRequest.ATTRS | frozenset(['_match_info'])
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@ -699,38 +809,48 @@ class Request(BaseRequest):
self._match_info = None # type: Optional[UrlMappingMatchInfo]
if DEBUG:
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn("Setting custom {}.{} attribute "
"is discouraged".format(self.__class__.__name__,
name),
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Setting custom {}.{} attribute "
"is discouraged".format(self.__class__.__name__, name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
def clone(self, *, method: str=sentinel, rel_url:
StrOrURL=sentinel, headers: LooseHeaders=sentinel,
scheme: str=sentinel, host: str=sentinel, remote:
str=sentinel) -> 'Request':
ret = super().clone(method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote)
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "Request":
ret = super().clone(
method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote,
)
new_ret = cast(Request, ret)
new_ret._match_info = self._match_info
return new_ret
@reify
def match_info(self) -> 'UrlMappingMatchInfo':
def match_info(self) -> "UrlMappingMatchInfo":
"""Result of route resolving."""
match_info = self._match_info
assert match_info is not None
return match_info
@property
def app(self) -> 'Application':
def app(self) -> "Application":
"""Application instance."""
match_info = self._match_info
assert match_info is not None
@ -743,7 +863,7 @@ class Request(BaseRequest):
lst = match_info.apps
app = self.app
idx = lst.index(app)
sublist = list(reversed(lst[:idx + 1]))
sublist = list(reversed(lst[: idx + 1]))
return ChainMapProxy(sublist)
async def _prepare_hook(self, response: StreamResponse) -> None:

View file

@ -1,5 +1,5 @@
import asyncio # noqa
import collections.abc # noqa
import asyncio
import collections.abc
import datetime
import enum
import json
@ -8,9 +8,8 @@ import time
import warnings
import zlib
from concurrent.futures import Executor
from email.utils import parsedate
from http.cookies import SimpleCookie
from typing import ( # noqa
from http.cookies import Morsel, SimpleCookie
from typing import (
TYPE_CHECKING,
Any,
Dict,
@ -27,29 +26,46 @@ from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
from .helpers import HeadersMixin, rfc822_formatted_time, sentinel
from .helpers import (
ETAG_ANY,
PY_38,
QUOTED_ETAG_RE,
ETag,
HeadersMixin,
parse_http_date,
rfc822_formatted_time,
sentinel,
validate_etag_value,
)
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
__all__ = ('ContentCoding', 'StreamResponse', 'Response', 'json_response')
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest # noqa
from .web_request import BaseRequest
BaseClass = MutableMapping[str, Any]
else:
BaseClass = collections.abc.MutableMapping
if not PY_38:
# allow samesite to be used in python < 3.8
# already permitted in python 3.8, see https://bugs.python.org/issue29613
Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
# Additional registered codings are listed at:
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
deflate = 'deflate'
gzip = 'gzip'
identity = 'identity'
deflate = "deflate"
gzip = "gzip"
identity = "identity"
############################################################
@ -61,16 +77,19 @@ class StreamResponse(BaseClass, HeadersMixin):
_length_check = True
def __init__(self, *,
status: int=200,
reason: Optional[str]=None,
headers: Optional[LooseHeaders]=None) -> None:
def __init__(
self,
*,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
self._body = None
self._keep_alive = None # type: Optional[bool]
self._chunked = False
self._compression = False
self._compression_force = None # type: Optional[ContentCoding]
self._cookies = SimpleCookie()
self._cookies = SimpleCookie() # type: SimpleCookie[str]
self._req = None # type: Optional[BaseRequest]
self._payload_writer = None # type: Optional[AbstractStreamWriter]
@ -90,8 +109,11 @@ class StreamResponse(BaseClass, HeadersMixin):
return self._payload_writer is not None
@property
def task(self) -> 'asyncio.Task[None]':
return getattr(self._req, 'task', None)
def task(self) -> "Optional[asyncio.Task[None]]":
if self._req:
return self._req.task
else:
return None
@property
def status(self) -> int:
@ -109,19 +131,21 @@ class StreamResponse(BaseClass, HeadersMixin):
def reason(self) -> str:
return self._reason
def set_status(self, status: int,
reason: Optional[str]=None,
_RESPONSES: Mapping[int,
Tuple[str, str]]=RESPONSES) -> None:
assert not self.prepared, \
'Cannot change the response status code after ' \
'the headers have been sent'
def set_status(
self,
status: int,
reason: Optional[str] = None,
_RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
) -> None:
assert not self.prepared, (
"Cannot change the response status code after " "the headers have been sent"
)
self._status = int(status)
if reason is None:
try:
reason = _RESPONSES[self._status][0]
except Exception:
reason = ''
reason = ""
self._reason = reason
@property
@ -137,61 +161,68 @@ class StreamResponse(BaseClass, HeadersMixin):
@property
def output_length(self) -> int:
warnings.warn('output_length is deprecated', DeprecationWarning)
warnings.warn("output_length is deprecated", DeprecationWarning)
assert self._payload_writer
return self._payload_writer.buffer_size
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
raise RuntimeError(
"You can't enable chunked encoding when " "a content length is set"
)
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
def enable_compression(
self, force: Optional[Union[bool, ContentCoding]] = None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
warnings.warn(
"Using boolean for force is deprecated #3318", DeprecationWarning
)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
assert isinstance(force, ContentCoding), (
"force should one of " "None, bool or " "ContentEncoding"
)
self._compression = True
self._compression_force = force
@property
def headers(self) -> 'CIMultiDict[str]':
def headers(self) -> "CIMultiDict[str]":
return self._headers
@property
def cookies(self) -> SimpleCookie:
def cookies(self) -> "SimpleCookie[str]":
return self._cookies
def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
def set_cookie(
self,
name: str,
value: str,
*,
expires: Optional[str] = None,
domain: Optional[str] = None,
max_age: Optional[Union[int, str]] = None,
path: str = "/",
secure: Optional[bool] = None,
httponly: Optional[bool] = None,
version: Optional[str] = None,
samesite: Optional[str] = None,
) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
if old is not None and old.coded_value == "":
# deleted cookie
self._cookies.pop(name, None)
@ -199,39 +230,46 @@ class StreamResponse(BaseClass, HeadersMixin):
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
c["expires"] = expires
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
del c["expires"]
if domain is not None:
c['domain'] = domain
c["domain"] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c["max-age"] = str(max_age)
elif "max-age" in c:
del c["max-age"]
c['path'] = path
c["path"] = path
if secure is not None:
c['secure'] = secure
c["secure"] = secure
if httponly is not None:
c['httponly'] = httponly
c["httponly"] = httponly
if version is not None:
c['version'] = version
c["version"] = version
if samesite is not None:
c["samesite"] = samesite
def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
def del_cookie(
self, name: str, *, domain: Optional[str] = None, path: str = "/"
) -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path)
self.set_cookie(
name,
"",
max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain,
path=path,
)
@property
def content_length(self) -> Optional[int]:
@ -243,8 +281,9 @@ class StreamResponse(BaseClass, HeadersMixin):
if value is not None:
value = int(value)
if self._chunked:
raise RuntimeError("You can't set content length when "
"chunked encoding is enable")
raise RuntimeError(
"You can't set content length when " "chunked encoding is enable"
)
self._headers[hdrs.CONTENT_LENGTH] = str(value)
else:
self._headers.pop(hdrs.CONTENT_LENGTH, None)
@ -268,14 +307,16 @@ class StreamResponse(BaseClass, HeadersMixin):
@charset.setter
def charset(self, value: Optional[str]) -> None:
ctype = self.content_type # read header values if needed
if ctype == 'application/octet-stream':
raise RuntimeError("Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first")
if ctype == "application/octet-stream":
raise RuntimeError(
"Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first"
)
assert self._content_dict is not None
if value is None:
self._content_dict.pop('charset', None)
self._content_dict.pop("charset", None)
else:
self._content_dict['charset'] = str(value).lower()
self._content_dict["charset"] = str(value).lower()
self._generate_content_type_header()
@property
@ -284,38 +325,70 @@ class StreamResponse(BaseClass, HeadersMixin):
This header is represented as a `datetime` object.
"""
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
@last_modified.setter
def last_modified(self,
value: Optional[
Union[int, float, datetime.datetime, str]]) -> None:
def last_modified(
self, value: Optional[Union[int, float, datetime.datetime, str]]
) -> None:
if value is None:
self._headers.pop(hdrs.LAST_MODIFIED, None)
elif isinstance(value, (int, float)):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)))
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
)
elif isinstance(value, datetime.datetime):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple())
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
)
elif isinstance(value, str):
self._headers[hdrs.LAST_MODIFIED] = value
@property
def etag(self) -> Optional[ETag]:
quoted_value = self._headers.get(hdrs.ETAG)
if not quoted_value:
return None
elif quoted_value == ETAG_ANY:
return ETag(value=ETAG_ANY)
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
if not match:
return None
is_weak, value = match.group(1, 2)
return ETag(
is_weak=bool(is_weak),
value=value,
)
@etag.setter
def etag(self, value: Optional[Union[ETag, str]]) -> None:
if value is None:
self._headers.pop(hdrs.ETAG, None)
elif (isinstance(value, str) and value == ETAG_ANY) or (
isinstance(value, ETag) and value.value == ETAG_ANY
):
self._headers[hdrs.ETAG] = ETAG_ANY
elif isinstance(value, str):
validate_etag_value(value)
self._headers[hdrs.ETAG] = f'"{value}"'
elif isinstance(value, ETag) and isinstance(value.value, str):
validate_etag_value(value.value)
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
self._headers[hdrs.ETAG] = hdr_value
else:
raise ValueError(
f"Unsupported etag type: {type(value)}. "
f"etag must be str, ETag or None"
)
def _generate_content_type_header(
self,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE) -> None:
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
) -> None:
assert self._content_dict is not None
assert self._content_type is not None
params = '; '.join("{}={}".format(k, v)
for k, v in self._content_dict.items())
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
if params:
ctype = self._content_type + '; ' + params
ctype = self._content_type + "; " + params
else:
ctype = self._content_type
self._headers[CONTENT_TYPE] = ctype
@ -329,43 +402,49 @@ class StreamResponse(BaseClass, HeadersMixin):
# remove the header
self._headers.popall(hdrs.CONTENT_LENGTH, None)
async def _start_compression(self, request: 'BaseRequest') -> None:
async def _start_compression(self, request: "BaseRequest") -> None:
if self._compression_force:
await self._do_start_compression(self._compression_force)
else:
accept_encoding = request.headers.get(
hdrs.ACCEPT_ENCODING, '').lower()
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
for coding in ContentCoding:
if coding.value in accept_encoding:
await self._do_start_compression(coding)
return
async def prepare(
self,
request: 'BaseRequest'
) -> Optional[AbstractStreamWriter]:
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
if self._eof_sent:
return None
if self._payload_writer is not None:
return self._payload_writer
await request._prepare_hook(self)
return await self._start(request)
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
self._req = request
writer = self._payload_writer = request._payload_writer
await self._prepare_headers()
await request._prepare_hook(self)
await self._write_headers()
return writer
async def _prepare_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
keep_alive = self._keep_alive
if keep_alive is None:
keep_alive = request.keep_alive
self._keep_alive = keep_alive
version = request.version
writer = self._payload_writer = request._payload_writer
headers = self._headers
for cookie in self._cookies.values():
value = cookie.output(header='')[1:]
value = cookie.output(header="")[1:]
headers.add(hdrs.SET_COOKIE, value)
if self._compression:
@ -375,23 +454,29 @@ class StreamResponse(BaseClass, HeadersMixin):
if version != HttpVersion11:
raise RuntimeError(
"Using chunked encoding is forbidden "
"for HTTP/{0.major}.{0.minor}".format(request.version))
"for HTTP/{0.major}.{0.minor}".format(request.version)
)
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
elif self._length_check:
writer.length = self.content_length
if writer.length is None:
if version >= HttpVersion11:
if version >= HttpVersion11 and self.status != 204:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
else:
keep_alive = False
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
del headers[hdrs.CONTENT_LENGTH]
headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream')
if self.status not in (204, 304):
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
@ -399,21 +484,27 @@ class StreamResponse(BaseClass, HeadersMixin):
if hdrs.CONNECTION not in headers:
if keep_alive:
if version == HttpVersion10:
headers[hdrs.CONNECTION] = 'keep-alive'
headers[hdrs.CONNECTION] = "keep-alive"
else:
if version == HttpVersion11:
headers[hdrs.CONNECTION] = 'close'
headers[hdrs.CONNECTION] = "close"
async def _write_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
# status line
status_line = 'HTTP/{}.{} {} {}'.format(
version[0], version[1], self._status, self._reason)
await writer.write_headers(status_line, headers)
return writer
version = request.version
status_line = "HTTP/{}.{} {} {}".format(
version[0], version[1], self._status, self._reason
)
await writer.write_headers(status_line, self._headers)
async def write(self, data: bytes) -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
raise RuntimeError("Cannot call write() after write_eof()")
@ -424,22 +515,23 @@ class StreamResponse(BaseClass, HeadersMixin):
async def drain(self) -> None:
assert not self._eof_sent, "EOF has already been sent"
assert self._payload_writer is not None, \
"Response has not been started"
warnings.warn("drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2)
assert self._payload_writer is not None, "Response has not been started"
warnings.warn(
"drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2,
)
await self._payload_writer.drain()
async def write_eof(self, data: bytes=b'') -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
async def write_eof(self, data: bytes = b"") -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
return
assert self._payload_writer is not None, \
"Response has not been started"
assert self._payload_writer is not None, "Response has not been started"
await self._payload_writer.write_eof(data)
self._eof_sent = True
@ -452,11 +544,10 @@ class StreamResponse(BaseClass, HeadersMixin):
info = "eof"
elif self.prepared:
assert self._req is not None
info = "{} {} ".format(self._req.method, self._req.path)
info = f"{self._req.method} {self._req.path} "
else:
info = "not prepared"
return "<{} {} {}>".format(self.__class__.__name__,
self.reason, info)
return f"<{self.__class__.__name__} {self.reason} {info}>"
def __getitem__(self, key: str) -> Any:
return self._state[key]
@ -481,17 +572,19 @@ class StreamResponse(BaseClass, HeadersMixin):
class Response(StreamResponse):
def __init__(self, *,
body: Any=None,
status: int=200,
reason: Optional[str]=None,
text: Optional[str]=None,
headers: Optional[LooseHeaders]=None,
content_type: Optional[str]=None,
charset: Optional[str]=None,
zlib_executor_size: Optional[int]=None,
zlib_executor: Executor=None) -> None:
def __init__(
self,
*,
body: Any = None,
status: int = 200,
reason: Optional[str] = None,
text: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: Optional[str] = None,
charset: Optional[str] = None,
zlib_executor_size: Optional[int] = None,
zlib_executor: Optional[Executor] = None,
) -> None:
if body is not None and text is not None:
raise ValueError("body and text are not allowed together")
@ -503,38 +596,39 @@ class Response(StreamResponse):
real_headers = headers # = cast('CIMultiDict[str]', headers)
if content_type is not None and "charset" in content_type:
raise ValueError("charset must not be in content_type "
"argument")
raise ValueError("charset must not be in content_type " "argument")
if text is not None:
if hdrs.CONTENT_TYPE in real_headers:
if content_type or charset:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
# fast path for filling headers
if not isinstance(text, str):
raise TypeError("text argument must be str (%r)" %
type(text))
raise TypeError("text argument must be str (%r)" % type(text))
if content_type is None:
content_type = 'text/plain'
content_type = "text/plain"
if charset is None:
charset = 'utf-8'
real_headers[hdrs.CONTENT_TYPE] = (
content_type + '; charset=' + charset)
charset = "utf-8"
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
body = text.encode(charset)
text = None
else:
if hdrs.CONTENT_TYPE in real_headers:
if content_type is not None or charset is not None:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
if content_type is not None:
if charset is not None:
content_type += '; charset=' + charset
content_type += "; charset=" + charset
real_headers[hdrs.CONTENT_TYPE] = content_type
super().__init__(status=status, reason=reason, headers=real_headers)
@ -553,9 +647,12 @@ class Response(StreamResponse):
return self._body
@body.setter
def body(self, body: bytes,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH) -> None:
def body(
self,
body: bytes,
CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
) -> None:
if body is None:
self._body = None # type: Optional[bytes]
self._body_payload = False # type: bool
@ -566,7 +663,7 @@ class Response(StreamResponse):
try:
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
raise ValueError('Unsupported body type %r' % type(body))
raise ValueError("Unsupported body type %r" % type(body))
self._body_payload = True
@ -594,17 +691,18 @@ class Response(StreamResponse):
def text(self) -> Optional[str]:
if self._body is None:
return None
return self._body.decode(self.charset or 'utf-8')
return self._body.decode(self.charset or "utf-8")
@text.setter
def text(self, text: str) -> None:
assert text is None or isinstance(text, str), \
"text argument must be str (%r)" % type(text)
assert text is None or isinstance(
text, str
), "text argument must be str (%r)" % type(text)
if self.content_type == 'application/octet-stream':
self.content_type = 'text/plain'
if self.content_type == "application/octet-stream":
self.content_type = "text/plain"
if self.charset is None:
self.charset = 'utf-8'
self.charset = "utf-8"
self._body = text.encode(self.charset)
self._body_payload = False
@ -633,19 +731,18 @@ class Response(StreamResponse):
def content_length(self, value: Optional[int]) -> None:
raise RuntimeError("Content length is set automatically")
async def write_eof(self, data: bytes=b'') -> None:
async def write_eof(self, data: bytes = b"") -> None:
if self._eof_sent:
return
if self._compressed_body is None:
body = self._body # type: Optional[Union[bytes, Payload]]
else:
body = self._compressed_body
assert not data, "data arg is not supported, got {!r}".format(data)
assert not data, f"data arg is not supported, got {data!r}"
assert self._req is not None
assert self._payload_writer is not None
if body is not None:
if (self._req._method == hdrs.METH_HEAD or
self._status in [204, 304]):
if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
await super().write_eof()
elif self._body_payload:
payload = cast(Payload, body)
@ -656,22 +753,22 @@ class Response(StreamResponse):
else:
await super().write_eof()
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
if not self._body_payload:
if self._body is not None:
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
else:
self._headers[hdrs.CONTENT_LENGTH] = '0'
self._headers[hdrs.CONTENT_LENGTH] = "0"
return await super()._start(request)
def _compress_body(self, zlib_mode: int) -> None:
assert zlib_mode > 0
compressobj = zlib.compressobj(wbits=zlib_mode)
body_in = self._body
assert body_in is not None
self._compressed_body = \
compressobj.compress(body_in) + compressobj.flush()
self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._body_payload or self._chunked:
@ -680,14 +777,18 @@ class Response(StreamResponse):
if coding != ContentCoding.identity:
# Instead of using _payload_writer.enable_compression,
# compress the whole body
zlib_mode = (16 + zlib.MAX_WBITS
if coding == ContentCoding.gzip else -zlib.MAX_WBITS)
zlib_mode = (
16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
)
body_in = self._body
assert body_in is not None
if self._zlib_executor_size is not None and \
len(body_in) > self._zlib_executor_size:
if (
self._zlib_executor_size is not None
and len(body_in) > self._zlib_executor_size
):
await asyncio.get_event_loop().run_in_executor(
self._zlib_executor, self._compress_body, zlib_mode)
self._zlib_executor, self._compress_body, zlib_mode
)
else:
self._compress_body(zlib_mode)
@ -698,20 +799,27 @@ class Response(StreamResponse):
self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
def json_response(data: Any=sentinel, *,
text: str=None,
body: bytes=None,
status: int=200,
reason: Optional[str]=None,
headers: LooseHeaders=None,
content_type: str='application/json',
dumps: JSONEncoder=json.dumps) -> Response:
def json_response(
data: Any = sentinel,
*,
text: Optional[str] = None,
body: Optional[bytes] = None,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
) -> Response:
if data is not sentinel:
if text or body:
raise ValueError(
"only one of data, text, or body should be specified"
)
raise ValueError("only one of data, text, or body should be specified")
else:
text = dumps(data)
return Response(text=text, body=body, status=status, reason=reason,
headers=headers, content_type=content_type)
return Response(
text=text,
body=body,
status=status,
reason=reason,
headers=headers,
content_type=content_type,
)

View file

@ -3,7 +3,6 @@ import os # noqa
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
Iterator,
@ -19,75 +18,89 @@ import attr
from . import hdrs
from .abc import AbstractView
from .typedefs import PathLike
from .typedefs import Handler, PathLike
if TYPE_CHECKING: # pragma: no cover
from .web_urldispatcher import UrlDispatcher
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import AbstractRoute, UrlDispatcher
else:
Request = StreamResponse = UrlDispatcher = None
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
__all__ = ('AbstractRouteDef', 'RouteDef', 'StaticDef', 'RouteTableDef',
'head', 'options', 'get', 'post', 'patch', 'put', 'delete',
'route', 'view', 'static')
__all__ = (
"AbstractRouteDef",
"RouteDef",
"StaticDef",
"RouteTableDef",
"head",
"options",
"get",
"post",
"patch",
"put",
"delete",
"route",
"view",
"static",
)
class AbstractRouteDef(abc.ABC):
@abc.abstractmethod
def register(self, router: UrlDispatcher) -> None:
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
pass # pragma: no cover
_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
_HandlerType = Union[Type[AbstractView], _SimpleHandler]
_HandlerType = Union[Type[AbstractView], Handler]
@attr.s(frozen=True, repr=False, slots=True)
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class RouteDef(AbstractRouteDef):
method = attr.ib(type=str)
path = attr.ib(type=str)
handler = attr.ib() # type: _HandlerType
kwargs = attr.ib(type=Dict[str, Any])
method: str
path: str
handler: _HandlerType
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(", {}={!r}".format(name, value))
return ("<RouteDef {method} {path} -> {handler.__name__!r}"
"{info}>".format(method=self.method, path=self.path,
handler=self.handler, info=''.join(info)))
info.append(f", {name}={value!r}")
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
method=self.method, path=self.path, handler=self.handler, info="".join(info)
)
def register(self, router: UrlDispatcher) -> None:
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
if self.method in hdrs.METH_ALL:
reg = getattr(router, 'add_'+self.method.lower())
reg(self.path, self.handler, **self.kwargs)
reg = getattr(router, "add_" + self.method.lower())
return [reg(self.path, self.handler, **self.kwargs)]
else:
router.add_route(self.method, self.path, self.handler,
**self.kwargs)
return [
router.add_route(self.method, self.path, self.handler, **self.kwargs)
]
@attr.s(frozen=True, repr=False, slots=True)
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class StaticDef(AbstractRouteDef):
prefix = attr.ib(type=str)
path = attr.ib() # type: PathLike
kwargs = attr.ib(type=Dict[str, Any])
prefix: str
path: PathLike
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(", {}={!r}".format(name, value))
return ("<StaticDef {prefix} -> {path}"
"{info}>".format(prefix=self.prefix, path=self.path,
info=''.join(info)))
info.append(f", {name}={value!r}")
return "<StaticDef {prefix} -> {path}" "{info}>".format(
prefix=self.prefix, path=self.path, info="".join(info)
)
def register(self, router: UrlDispatcher) -> None:
router.add_static(self.prefix, self.path, **self.kwargs)
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
resource = router.add_static(self.prefix, self.path, **self.kwargs)
routes = resource.get_info().get("routes", {})
return list(routes.values())
def route(method: str, path: str, handler: _HandlerType,
**kwargs: Any) -> RouteDef:
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return RouteDef(method, path, handler, kwargs)
@ -99,10 +112,17 @@ def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
def get(path: str, handler: _HandlerType, *, name: Optional[str]=None,
allow_head: bool=True, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_GET, path, handler, name=name,
allow_head=allow_head, **kwargs)
def get(
path: str,
handler: _HandlerType,
*,
name: Optional[str] = None,
allow_head: bool = True,
**kwargs: Any,
) -> RouteDef:
return route(
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
)
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
@ -125,8 +145,7 @@ def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
return route(hdrs.METH_ANY, path, handler, **kwargs)
def static(prefix: str, path: PathLike,
**kwargs: Any) -> StaticDef:
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
return StaticDef(prefix, path, kwargs)
@ -135,19 +154,22 @@ _Deco = Callable[[_HandlerType], _HandlerType]
class RouteTableDef(Sequence[AbstractRouteDef]):
"""Route definition table"""
def __init__(self) -> None:
self._items = [] # type: List[AbstractRouteDef]
def __repr__(self) -> str:
return "<RouteTableDef count={}>".format(len(self._items))
return f"<RouteTableDef count={len(self._items)}>"
@overload
def __getitem__(self, index: int) -> AbstractRouteDef: ... # noqa
def __getitem__(self, index: int) -> AbstractRouteDef:
...
@overload # noqa
def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... # noqa
@overload
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
...
def __getitem__(self, index): # type: ignore # noqa
def __getitem__(self, index): # type: ignore[no-untyped-def]
return self._items[index]
def __iter__(self) -> Iterator[AbstractRouteDef]:
@ -159,13 +181,11 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
def __contains__(self, item: object) -> bool:
return item in self._items
def route(self,
method: str,
path: str,
**kwargs: Any) -> _Deco:
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
def inner(handler: _HandlerType) -> _HandlerType:
self._items.append(RouteDef(method, path, handler, kwargs))
return handler
return inner
def head(self, path: str, **kwargs: Any) -> _Deco:
@ -189,6 +209,5 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
def view(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_ANY, path, **kwargs)
def static(self, prefix: str, path: PathLike,
**kwargs: Any) -> None:
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
self._items.append(StaticDef(prefix, path, kwargs))

View file

@ -12,11 +12,20 @@ from .web_server import Server
try:
from ssl import SSLContext
except ImportError:
SSLContext = object # type: ignore
SSLContext = object # type: ignore[misc,assignment]
__all__ = ('BaseSite', 'TCPSite', 'UnixSite', 'NamedPipeSite', 'SockSite',
'BaseRunner', 'AppRunner', 'ServerRunner', 'GracefulExit')
__all__ = (
"BaseSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
"SockSite",
"BaseRunner",
"AppRunner",
"ServerRunner",
"GracefulExit",
)
class GracefulExit(SystemExit):
@ -28,13 +37,16 @@ def _raise_graceful_exit() -> None:
class BaseSite(ABC):
__slots__ = ('_runner', '_shutdown_timeout', '_ssl_context', '_backlog',
'_server')
__slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
def __init__(self, runner: 'BaseRunner', *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
def __init__(
self,
runner: "BaseRunner",
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
if runner.server is None:
raise RuntimeError("Call runner.setup() before making a site")
self._runner = runner
@ -59,7 +71,7 @@ class BaseSite(ABC):
return # not started yet
self._server.close()
# named pipes do not have wait_closed property
if hasattr(self._server, 'wait_closed'):
if hasattr(self._server, "wait_closed"):
await self._server.wait_closed()
await self._runner.shutdown()
assert self._runner.server
@ -68,18 +80,26 @@ class BaseSite(ABC):
class TCPSite(BaseSite):
__slots__ = ('_host', '_port', '_reuse_address', '_reuse_port')
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
def __init__(self, runner: 'BaseRunner',
host: str=None, port: int=None, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128, reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
if host is None:
host = "0.0.0.0"
def __init__(
self,
runner: "BaseRunner",
host: Optional[str] = None,
port: Optional[int] = None,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._host = host
if port is None:
port = 8443 if self._ssl_context else 8080
@ -89,36 +109,50 @@ class TCPSite(BaseSite):
@property
def name(self) -> str:
scheme = 'https' if self._ssl_context else 'http'
return str(URL.build(scheme=scheme, host=self._host, port=self._port))
scheme = "https" if self._ssl_context else "http"
host = "0.0.0.0" if self._host is None else self._host
return str(URL.build(scheme=scheme, host=host, port=self._port))
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server( # type: ignore
server, self._host, self._port,
ssl=self._ssl_context, backlog=self._backlog,
self._server = await loop.create_server(
server,
self._host,
self._port,
ssl=self._ssl_context,
backlog=self._backlog,
reuse_address=self._reuse_address,
reuse_port=self._reuse_port)
reuse_port=self._reuse_port,
)
class UnixSite(BaseSite):
__slots__ = ('_path', )
__slots__ = ("_path",)
def __init__(self, runner: 'BaseRunner', path: str, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
def __init__(
self,
runner: "BaseRunner",
path: str,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._path = path
@property
def name(self) -> str:
scheme = 'https' if self._ssl_context else 'http'
return '{}://unix:{}:'.format(scheme, self._path)
scheme = "https" if self._ssl_context else "http"
return f"{scheme}://unix:{self._path}:"
async def start(self) -> None:
await super().start()
@ -126,19 +160,23 @@ class UnixSite(BaseSite):
server = self._runner.server
assert server is not None
self._server = await loop.create_unix_server(
server, self._path,
ssl=self._ssl_context, backlog=self._backlog)
server, self._path, ssl=self._ssl_context, backlog=self._backlog
)
class NamedPipeSite(BaseSite):
__slots__ = ('_path', )
__slots__ = ("_path",)
def __init__(self, runner: 'BaseRunner', path: str, *,
shutdown_timeout: float=60.0) -> None:
def __init__(
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
) -> None:
loop = asyncio.get_event_loop()
if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore
raise RuntimeError("Named Pipes only available in proactor"
"loop under windows")
if not isinstance(
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor" "loop under windows"
)
super().__init__(runner, shutdown_timeout=shutdown_timeout)
self._path = path
@ -151,25 +189,34 @@ class NamedPipeSite(BaseSite):
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
_server = await loop.start_serving_pipe( # type: ignore
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
server, self._path
)
self._server = _server[0]
class SockSite(BaseSite):
__slots__ = ('_sock', '_name')
__slots__ = ("_sock", "_name")
def __init__(self, runner: 'BaseRunner', sock: socket.socket, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
def __init__(
self,
runner: "BaseRunner",
sock: socket.socket,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._sock = sock
scheme = 'https' if self._ssl_context else 'http'
if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX:
name = '{}://unix:{}:'.format(scheme, sock.getsockname())
scheme = "https" if self._ssl_context else "http"
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
name = f"{scheme}://unix:{sock.getsockname()}:"
else:
host, port = sock.getsockname()[:2]
name = str(URL.build(scheme=scheme, host=host, port=port))
@ -184,15 +231,15 @@ class SockSite(BaseSite):
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server( # type: ignore
server, sock=self._sock,
ssl=self._ssl_context, backlog=self._backlog)
self._server = await loop.create_server(
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
)
class BaseRunner(ABC):
__slots__ = ('_handle_signals', '_kwargs', '_server', '_sites')
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
def __init__(self, *, handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
self._handle_signals = handle_signals
self._kwargs = kwargs
self._server = None # type: Optional[Server]
@ -203,8 +250,8 @@ class BaseRunner(ABC):
return self._server
@property
def addresses(self) -> List[str]:
ret = [] # type: List[str]
def addresses(self) -> List[Any]:
ret = [] # type: List[Any]
for site in self._sites:
server = site._server
if server is not None:
@ -238,10 +285,6 @@ class BaseRunner(ABC):
async def cleanup(self) -> None:
loop = asyncio.get_event_loop()
if self._server is None:
# no started yet, do nothing
return
# The loop over sites is intentional, an exception on gather()
# leaves self._sites in unpredictable state.
# The loop guaranties that a site is either deleted on success or
@ -268,29 +311,27 @@ class BaseRunner(ABC):
def _reg_site(self, site: BaseSite) -> None:
if site in self._sites:
raise RuntimeError("Site {} is already registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is already registered in runner {self}")
self._sites.append(site)
def _check_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError("Site {} is not registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is not registered in runner {self}")
def _unreg_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError("Site {} is not registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is not registered in runner {self}")
self._sites.remove(site)
class ServerRunner(BaseRunner):
"""Low-level web server runner"""
__slots__ = ('_web_server',)
__slots__ = ("_web_server",)
def __init__(self, web_server: Server, *,
handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
self._web_server = web_server
@ -307,14 +348,17 @@ class ServerRunner(BaseRunner):
class AppRunner(BaseRunner):
"""Web Application runner"""
__slots__ = ('_app',)
__slots__ = ("_app",)
def __init__(self, app: Application, *,
handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
if not isinstance(app, Application):
raise TypeError("The first argument should be web.Application "
"instance, got {!r}".format(app))
raise TypeError(
"The first argument should be web.Application "
"instance, got {!r}".format(app)
)
self._app = app
@property

View file

@ -9,17 +9,18 @@ from .streams import StreamReader
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
from .web_request import BaseRequest
__all__ = ('Server',)
__all__ = ("Server",)
class Server:
def __init__(self,
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
def __init__(
self,
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any
) -> None:
self._loop = get_running_loop(loop)
self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
self._kwargs = kwargs
@ -31,26 +32,30 @@ class Server:
def connections(self) -> List[RequestHandler]:
return list(self._connections.keys())
def connection_made(self, handler: RequestHandler,
transport: asyncio.Transport) -> None:
def connection_made(
self, handler: RequestHandler, transport: asyncio.Transport
) -> None:
self._connections[handler] = transport
def connection_lost(self, handler: RequestHandler,
exc: Optional[BaseException]=None) -> None:
def connection_lost(
self, handler: RequestHandler, exc: Optional[BaseException] = None
) -> None:
if handler in self._connections:
del self._connections[handler]
def _make_request(self, message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: 'asyncio.Task[None]') -> BaseRequest:
return BaseRequest(
message, payload, protocol, writer, task, self._loop)
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
) -> BaseRequest:
return BaseRequest(message, payload, protocol, writer, task, self._loop)
async def shutdown(self, timeout: Optional[float]=None) -> None:
async def shutdown(self, timeout: Optional[float] = None) -> None:
coros = [conn.shutdown(timeout) for conn in self._connections]
await asyncio.gather(*coros, loop=self._loop)
await asyncio.gather(*coros)
self._connections.clear()
def __call__(self) -> RequestHandler:

File diff suppressed because it is too large Load diff

View file

@ -3,7 +3,7 @@ import base64
import binascii
import hashlib
import json
from typing import Any, Iterable, Optional, Tuple
from typing import Any, Iterable, Optional, Tuple, cast
import async_timeout
import attr
@ -19,26 +19,32 @@ from .http import (
WebSocketError,
WebSocketReader,
WebSocketWriter,
WSCloseCode,
WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen,
ws_ext_parse,
)
from .http import WSMsgType as WSMsgType
from .http import ws_ext_gen, ws_ext_parse
from .log import ws_logger
from .streams import EofStream, FlowControlDataQueue
from .typedefs import JSONDecoder, JSONEncoder
from .typedefs import Final, JSONDecoder, JSONEncoder
from .web_exceptions import HTTPBadRequest, HTTPException
from .web_request import BaseRequest
from .web_response import StreamResponse
__all__ = ('WebSocketResponse', 'WebSocketReady', 'WSMsgType',)
__all__ = (
"WebSocketResponse",
"WebSocketReady",
"WSMsgType",
)
THRESHOLD_CONNLOST_ACCESS = 5
THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class WebSocketReady:
ok = attr.ib(type=bool)
protocol = attr.ib(type=Optional[str])
ok: bool
protocol: Optional[str]
def __bool__(self) -> bool:
return self.ok
@ -48,12 +54,18 @@ class WebSocketResponse(StreamResponse):
_length_check = False
def __init__(self, *,
timeout: float=10.0, receive_timeout: Optional[float]=None,
autoclose: bool=True, autoping: bool=True,
heartbeat: Optional[float]=None,
protocols: Iterable[str]=(),
compress: bool=True, max_msg_size: int=4*1024*1024) -> None:
def __init__(
self,
*,
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
protocols: Iterable[str] = (),
compress: bool = True,
max_msg_size: int = 4 * 1024 * 1024,
) -> None:
super().__init__(status=101)
self._protocols = protocols
self._ws_protocol = None # type: Optional[str]
@ -71,10 +83,10 @@ class WebSocketResponse(StreamResponse):
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb = None
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb = None
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
self._compress = compress
self._max_msg_size = max_msg_size
@ -91,25 +103,29 @@ class WebSocketResponse(StreamResponse):
self._cancel_heartbeat()
if self._heartbeat is not None:
assert self._loop is not None
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
assert self._loop is not None
# fire-and-forget a task is not perfect but maybe ok for
# sending ping. Otherwise we need a long-living heartbeat
# task in the class.
self._loop.create_task(self._writer.ping()) # type: ignore
self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if self._req is not None and self._req.transport is not None:
self._closed = True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
self._req.transport.close()
@ -125,27 +141,32 @@ class WebSocketResponse(StreamResponse):
await payload_writer.drain()
return payload_writer
def _handshake(self, request: BaseRequest) -> Tuple['CIMultiDict[str]',
str,
bool,
bool]:
def _handshake(
self, request: BaseRequest
) -> Tuple["CIMultiDict[str]", str, bool, bool]:
headers = request.headers
if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip():
if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
raise HTTPBadRequest(
text=('No WebSocket UPGRADE hdr: {}\n Can '
'"Upgrade" only to "WebSocket".')
.format(headers.get(hdrs.UPGRADE)))
text=(
"No WebSocket UPGRADE hdr: {}\n Can "
'"Upgrade" only to "WebSocket".'
).format(headers.get(hdrs.UPGRADE))
)
if 'upgrade' not in headers.get(hdrs.CONNECTION, '').lower():
if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
raise HTTPBadRequest(
text='No CONNECTION upgrade hdr: {}'.format(
headers.get(hdrs.CONNECTION)))
text="No CONNECTION upgrade hdr: {}".format(
headers.get(hdrs.CONNECTION)
)
)
# find common sub-protocol between client and server
protocol = None
if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
req_protocols = [str(proto.strip()) for proto in
headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')]
req_protocols = [
str(proto.strip())
for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
]
for proto in req_protocols:
if proto in self._protocols:
@ -154,31 +175,34 @@ class WebSocketResponse(StreamResponse):
else:
# No overlap found: Return no protocol as per spec
ws_logger.warning(
'Client protocols %r dont overlap server-known ones %r',
req_protocols, self._protocols)
"Client protocols %r dont overlap server-known ones %r",
req_protocols,
self._protocols,
)
# check supported version
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, '')
if version not in ('13', '8', '7'):
raise HTTPBadRequest(
text='Unsupported version: {}'.format(version))
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
if version not in ("13", "8", "7"):
raise HTTPBadRequest(text=f"Unsupported version: {version}")
# check client handshake for validity
key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
try:
if not key or len(base64.b64decode(key)) != 16:
raise HTTPBadRequest(
text='Handshake error: {!r}'.format(key))
raise HTTPBadRequest(text=f"Handshake error: {key!r}")
except binascii.Error:
raise HTTPBadRequest(
text='Handshake error: {!r}'.format(key)) from None
raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
accept_val = base64.b64encode(
hashlib.sha1(key.encode() + WS_KEY).digest()).decode()
response_headers = CIMultiDict( # type: ignore
{hdrs.UPGRADE: 'websocket',
hdrs.CONNECTION: 'upgrade',
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val})
hashlib.sha1(key.encode() + WS_KEY).digest()
).decode()
response_headers = CIMultiDict( # type: ignore[var-annotated]
{
hdrs.UPGRADE: "websocket", # type: ignore[arg-type]
hdrs.CONNECTION: "upgrade",
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
}
)
notakeover = False
compress = 0
@ -188,24 +212,24 @@ class WebSocketResponse(StreamResponse):
# If something happened, just drop compress extension
compress, notakeover = ws_ext_parse(extensions, isserver=True)
if compress:
enabledext = ws_ext_gen(compress=compress, isserver=True,
server_notakeover=notakeover)
enabledext = ws_ext_gen(
compress=compress, isserver=True, server_notakeover=notakeover
)
response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
if protocol:
response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
return (response_headers, # type: ignore
protocol,
compress,
notakeover)
return (
response_headers,
protocol,
compress,
notakeover,
) # type: ignore[return-value]
def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
self._loop = request._loop
headers, protocol, compress, notakeover = self._handshake(
request)
self._reset_heartbeat()
headers, protocol, compress, notakeover = self._handshake(request)
self.set_status(101)
self.headers.update(headers)
@ -213,29 +237,32 @@ class WebSocketResponse(StreamResponse):
self._compress = compress
transport = request._protocol.transport
assert transport is not None
writer = WebSocketWriter(request._protocol,
transport,
compress=compress,
notakeover=notakeover)
writer = WebSocketWriter(
request._protocol, transport, compress=compress, notakeover=notakeover
)
return protocol, writer
def _post_start(self, request: BaseRequest,
protocol: str, writer: WebSocketWriter) -> None:
def _post_start(
self, request: BaseRequest, protocol: str, writer: WebSocketWriter
) -> None:
self._ws_protocol = protocol
self._writer = writer
self._reset_heartbeat()
loop = self._loop
assert loop is not None
self._reader = FlowControlDataQueue(
request._protocol, limit=2 ** 16, loop=loop)
request.protocol.set_parser(WebSocketReader(
self._reader, self._max_msg_size, compress=self._compress))
self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
request.protocol.set_parser(
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
)
# disable HTTP keepalive for WebSocket
request.protocol.keep_alive(False)
def can_prepare(self, request: BaseRequest) -> WebSocketReady:
if self._writer is not None:
raise RuntimeError('Already started')
raise RuntimeError("Already started")
try:
_, protocol, _, _ = self._handshake(request)
except HTTPException:
@ -262,38 +289,41 @@ class WebSocketResponse(StreamResponse):
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
await self._writer.ping(message)
async def pong(self, message: bytes=b'') -> None:
async def pong(self, message: bytes = b"") -> None:
# unsolicited pong
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
await self._writer.pong(message)
async def send_str(self, data: str, compress: Optional[bool]=None) -> None:
async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes,
compress: Optional[bool]=None) -> None:
async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(self, data: Any, compress: Optional[bool]=None, *,
dumps: JSONEncoder=json.dumps) -> None:
async def send_json(
self,
data: Any,
compress: Optional[bool] = None,
*,
dumps: JSONEncoder = json.dumps,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def write_eof(self) -> None: # type: ignore
async def write_eof(self) -> None: # type: ignore[override]
if self._eof_sent:
return
if self._payload_writer is None:
@ -302,9 +332,9 @@ class WebSocketResponse(StreamResponse):
await self.close()
self._eof_sent = True
async def close(self, *, code: int=1000, message: bytes=b'') -> bool:
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
self._cancel_heartbeat()
reader = self._reader
@ -324,10 +354,10 @@ class WebSocketResponse(StreamResponse):
assert writer is not None
await writer.drain()
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except Exception as exc:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
return True
@ -337,13 +367,13 @@ class WebSocketResponse(StreamResponse):
reader = self._reader
assert reader is not None
try:
with async_timeout.timeout(self._timeout, loop=self._loop):
async with async_timeout.timeout(self._timeout):
msg = await reader.read()
except asyncio.CancelledError:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except Exception as exc:
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
return True
@ -351,27 +381,26 @@ class WebSocketResponse(StreamResponse):
self._close_code = msg.data
return True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
return True
else:
return False
async def receive(self, timeout: Optional[float]=None) -> WSMessage:
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
if self._reader is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
loop = self._loop
assert loop is not None
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
self._conn_lost += 1
if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
raise RuntimeError('WebSocket connection is closed.')
raise RuntimeError("WebSocket connection is closed.")
return WS_CLOSED_MESSAGE
elif self._closing:
return WS_CLOSING_MESSAGE
@ -379,8 +408,7 @@ class WebSocketResponse(StreamResponse):
try:
self._waiting = loop.create_future()
try:
with async_timeout.timeout(
timeout or self._receive_timeout, loop=self._loop):
async with async_timeout.timeout(timeout or self._receive_timeout):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
@ -388,10 +416,10 @@ class WebSocketResponse(StreamResponse):
set_result(waiter, True)
self._waiting = None
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except EofStream:
self._close_code = 1000
self._close_code = WSCloseCode.OK
await self.close()
return WSMessage(WSMsgType.CLOSED, None, None)
except WebSocketError as exc:
@ -401,7 +429,7 @@ class WebSocketResponse(StreamResponse):
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = 1006
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
@ -420,37 +448,40 @@ class WebSocketResponse(StreamResponse):
return msg
async def receive_str(self, *, timeout: Optional[float]=None) -> str:
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not WSMsgType.TEXT".format(
msg.type, msg.data))
return msg.data
msg.type, msg.data
)
)
return cast(str, msg.data)
async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes:
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
return msg.data
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return cast(bytes, msg.data)
async def receive_json(self, *, loads: JSONDecoder=json.loads,
timeout: Optional[float]=None) -> Any:
async def receive_json(
self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
async def write(self, data: bytes) -> None:
raise RuntimeError("Cannot call .write() for websocket")
def __aiter__(self) -> 'WebSocketResponse':
def __aiter__(self) -> "WebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg
def _cancel(self, exc: BaseException) -> None:
if self._reader is not None:
self._reader.set_exception(exc)

View file

@ -19,18 +19,17 @@ from .web_log import AccessLogger
try:
import ssl
SSLContext = ssl.SSLContext # noqa
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
__all__ = ('GunicornWebWorker',
'GunicornUVLoopWebWorker',
'GunicornTokioWebWorker')
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
class GunicornWebWorker(base.Worker):
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
@ -58,28 +57,39 @@ class GunicornWebWorker(base.Worker):
self.loop.run_until_complete(self._task)
except Exception:
self.log.exception("Exception in gunicorn worker")
if sys.version_info >= (3, 6):
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.close()
sys.exit(self.exit_code)
async def _run(self) -> None:
runner = None
if isinstance(self.wsgi, Application):
app = self.wsgi
elif asyncio.iscoroutinefunction(self.wsgi):
app = await self.wsgi()
wsgi = await self.wsgi()
if isinstance(wsgi, web.AppRunner):
runner = wsgi
app = runner.app
else:
app = wsgi
else:
raise RuntimeError("wsgi app should be either Application or "
"async function returning Application, got {}"
.format(self.wsgi))
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format))
raise RuntimeError(
"wsgi app should be either Application or "
"async function returning Application, got {}".format(self.wsgi)
)
if runner is None:
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(
app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format
),
)
await runner.setup()
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
@ -90,14 +100,17 @@ class GunicornWebWorker(base.Worker):
assert server is not None
for sock in self.sockets:
site = web.SockSite(
runner, sock, ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95)
runner,
sock,
ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await site.start()
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive: # type: ignore
while self.alive: # type: ignore[has-type]
self.notify()
cnt = server.requests_count
@ -115,7 +128,7 @@ class GunicornWebWorker(base.Worker):
await runner.cleanup()
def _wait_next_notify(self) -> 'asyncio.Future[bool]':
def _wait_next_notify(self) -> "asyncio.Future[bool]":
self._notify_waiter_done()
loop = self.loop
@ -125,7 +138,9 @@ class GunicornWebWorker(base.Worker):
return waiter
def _notify_waiter_done(self, waiter: 'asyncio.Future[bool]'=None) -> None:
def _notify_waiter_done(
self, waiter: Optional["asyncio.Future[bool]"] = None
) -> None:
if waiter is None:
waiter = self._notify_waiter
if waiter is not None:
@ -137,28 +152,42 @@ class GunicornWebWorker(base.Worker):
def init_signals(self) -> None:
# Set up signals through the event loop API.
self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit,
signal.SIGQUIT, None)
self.loop.add_signal_handler(
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
)
self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit,
signal.SIGTERM, None)
self.loop.add_signal_handler(
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
)
self.loop.add_signal_handler(signal.SIGINT, self.handle_quit,
signal.SIGINT, None)
self.loop.add_signal_handler(
signal.SIGINT, self.handle_quit, signal.SIGINT, None
)
self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch,
signal.SIGWINCH, None)
self.loop.add_signal_handler(
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
)
self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1,
signal.SIGUSR1, None)
self.loop.add_signal_handler(
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
)
self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort,
signal.SIGABRT, None)
self.loop.add_signal_handler(
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# See: https://github.com/aio-libs/aiohttp/issues/6130
if sys.version_info < (3, 8):
# Starting from Python 3.8,
# the default child watcher is ThreadedChildWatcher.
# The watcher doesn't depend on SIGCHLD signal,
# there is no need to reset it.
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def handle_quit(self, sig: int, frame: FrameType) -> None:
self.alive = False
@ -176,13 +205,13 @@ class GunicornWebWorker(base.Worker):
sys.exit(1)
@staticmethod
def _create_ssl_context(cfg: Any) -> 'SSLContext':
""" Creates SSLContext instance for usage in asyncio.create_server.
def _create_ssl_context(cfg: Any) -> "SSLContext":
"""Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
if ssl is None: # pragma: no cover
raise RuntimeError('SSL is not supported.')
raise RuntimeError("SSL is not supported.")
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
@ -196,7 +225,7 @@ class GunicornWebWorker(base.Worker):
def _get_valid_log_format(self, source_format: str) -> str:
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
return self.DEFAULT_AIOHTTP_LOG_FORMAT
elif re.search(r'%\([^\)]+\)', source_format):
elif re.search(r"%\([^\)]+\)", source_format):
raise ValueError(
"Gunicorn's style options in form of `%(name)s` are not "
"supported for the log formatting. Please use aiohttp's "
@ -209,7 +238,6 @@ class GunicornWebWorker(base.Worker):
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self) -> None:
import uvloop
@ -226,7 +254,6 @@ class GunicornUVLoopWebWorker(GunicornWebWorker):
class GunicornTokioWebWorker(GunicornWebWorker):
def init_process(self) -> None: # pragma: no cover
import tokio

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,165 +0,0 @@
Metadata-Version: 2.1
Name: async-timeout
Version: 3.0.1
Summary: Timeout context manager for asyncio programs
Home-page: https://github.com/aio-libs/async_timeout/
Author: Andrew Svetlov
Author-email: andrew.svetlov@gmail.com
License: Apache 2
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5.3
async-timeout
=============
.. image:: https://travis-ci.org/aio-libs/async-timeout.svg?branch=master
:target: https://travis-ci.org/aio-libs/async-timeout
.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/async-timeout
.. image:: https://img.shields.io/pypi/v/async-timeout.svg
:target: https://pypi.python.org/pypi/async-timeout
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
asyncio-compatible timeout context manager.
Usage example
-------------
The context manager is useful in cases when you want to apply timeout
logic around block of code or in cases when ``asyncio.wait_for()`` is
not suitable. Also it's much faster than ``asyncio.wait_for()``
because ``timeout`` doesn't create a new task.
The ``timeout(timeout, *, loop=None)`` call returns a context manager
that cancels a block on *timeout* expiring::
async with timeout(1.5):
await inner()
1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
happens.
2. Otherwise ``inner()`` is cancelled internally by sending
``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
raised outside of context manager scope.
*timeout* parameter could be ``None`` for skipping timeout functionality.
Context manager has ``.expired`` property for check if timeout happens
exactly in context manager::
async with timeout(1.5) as cm:
await inner()
print(cm.expired)
The property is ``True`` if ``inner()`` execution is cancelled by
timeout context manager.
If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
is ``False``.
Installation
------------
::
$ pip install async-timeout
The library is Python 3 only!
Authors and License
-------------------
The module is written by Andrew Svetlov.
It's *Apache 2* licensed and freely available.
CHANGES
=======
3.0.1 (2018-10-09)
------------------
- More aggressive typing (#48)
3.0.0 (2018-05-05)
------------------
- Drop Python 3.4, the minimal supported version is Python 3.5.3
- Provide type annotations
2.0.1 (2018-03-13)
------------------
* Fix ``PendingDeprecationWarning`` on Python 3.7 (#33)
2.0.0 (2017-10-09)
------------------
* Changed `timeout <= 0` behaviour
* Backward incompatibility change, prior this version `0` was
shortcut for `None`
* when timeout <= 0 `TimeoutError` raised faster
1.4.0 (2017-09-09)
------------------
* Implement `remaining` property (#20)
* If timeout is not started yet or started unconstrained:
`remaining` is `None`
* If timeout is expired: `remaining` is `0.0`
* All others: roughly amount of time before `TimeoutError` is triggered
1.3.0 (2017-08-23)
------------------
* Don't suppress nested exception on timeout. Exception context points
on cancelled line with suspended `await` (#13)
* Introduce `.timeout` property (#16)
* Add methods for using as async context manager (#9)
1.2.1 (2017-05-02)
------------------
* Support unpublished event loop's "current_task" api.
1.2.0 (2017-03-11)
------------------
* Extra check on context manager exit
* 0 is no-op timeout
1.1.0 (2016-10-20)
------------------
* Rename to `async-timeout`
1.0.0 (2016-09-09)
------------------
* The first release.

View file

@ -1,9 +0,0 @@
async_timeout-3.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
async_timeout-3.0.1.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
async_timeout-3.0.1.dist-info/METADATA,sha256=_3ByJ8L0-cU5wWu75_Rl8n0ZkbSCgW15fMAu_DzwTm0,4013
async_timeout-3.0.1.dist-info/RECORD,,
async_timeout-3.0.1.dist-info/WHEEL,sha256=-ZFxwj8mZJPIVcZGLrsQ8UGRcxVAOExzPLVBGR7u7bE,92
async_timeout-3.0.1.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14
async_timeout/__init__.py,sha256=mGvWOoRqLtScEU3kmzqtTSH7EQsHvu8zhgHxOTXCn7c,3654
async_timeout/__pycache__/__init__.cpython-38.pyc,,
async_timeout/py.typed,sha256=9LJP7QJ0oxYYrBtmXuFirzMbS3D9_3Tz-d3tyUtNp0U,11

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.32.1)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -1,115 +1,247 @@
import asyncio
import enum
import sys
import warnings
from types import TracebackType
from typing import Optional, Type, Any # noqa
from typing import Any, Optional, Type
__version__ = '3.0.1'
PY_37 = sys.version_info >= (3, 7)
if sys.version_info >= (3, 8):
from typing import final
else:
from typing_extensions import final
class timeout:
__version__ = "4.0.2"
__all__ = ("timeout", "timeout_at", "Timeout")
def timeout(delay: Optional[float]) -> "Timeout":
"""timeout context manager.
Useful in cases when you want to apply timeout logic around block
of code or in cases when asyncio.wait_for is not suitable. For example:
>>> with timeout(0.001):
>>> async with timeout(0.001):
... async with aiohttp.get('https://github.com') as r:
... await r.text()
timeout - value in seconds or None to disable timeout logic
loop - asyncio compatible event loop
delay - value in seconds or None to disable timeout logic
"""
def __init__(self, timeout: Optional[float],
*, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._timeout = timeout
if loop is None:
loop = asyncio.get_event_loop()
loop = _get_running_loop()
if delay is not None:
deadline = loop.time() + delay # type: Optional[float]
else:
deadline = None
return Timeout(deadline, loop)
def timeout_at(deadline: Optional[float]) -> "Timeout":
"""Schedule the timeout at absolute time.
deadline argument points on the time in the same clock system
as loop.time().
Please note: it is not POSIX time but a time with
undefined starting base, e.g. the time of the system power on.
>>> async with timeout_at(loop.time() + 10):
... async with aiohttp.get('https://github.com') as r:
... await r.text()
"""
loop = _get_running_loop()
return Timeout(deadline, loop)
class _State(enum.Enum):
INIT = "INIT"
ENTER = "ENTER"
TIMEOUT = "TIMEOUT"
EXIT = "EXIT"
@final
class Timeout:
# Internal class, please don't instantiate it directly
# Use timeout() and timeout_at() public factories instead.
#
# Implementation note: `async with timeout()` is preferred
# over `with timeout()`.
# While technically the Timeout class implementation
# doesn't need to be async at all,
# the `async with` statement explicitly points that
# the context manager should be used from async function context.
#
# This design allows to avoid many silly misusages.
#
# TimeoutError is raised immadiatelly when scheduled
# if the deadline is passed.
# The purpose is to time out as sson as possible
# without waiting for the next await expression.
__slots__ = ("_deadline", "_loop", "_state", "_timeout_handler")
def __init__(
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
) -> None:
self._loop = loop
self._task = None # type: Optional[asyncio.Task[Any]]
self._cancelled = False
self._cancel_handler = None # type: Optional[asyncio.Handle]
self._cancel_at = None # type: Optional[float]
self._state = _State.INIT
def __enter__(self) -> 'timeout':
return self._do_enter()
self._timeout_handler = None # type: Optional[asyncio.Handle]
if deadline is None:
self._deadline = None # type: Optional[float]
else:
self.update(deadline)
def __exit__(self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType) -> Optional[bool]:
def __enter__(self) -> "Timeout":
warnings.warn(
"with timeout() is deprecated, use async with timeout() instead",
DeprecationWarning,
stacklevel=2,
)
self._do_enter()
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
self._do_exit(exc_type)
return None
async def __aenter__(self) -> 'timeout':
return self._do_enter()
async def __aenter__(self) -> "Timeout":
self._do_enter()
return self
async def __aexit__(self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
self._do_exit(exc_type)
return None
@property
def expired(self) -> bool:
return self._cancelled
"""Is timeout expired during execution?"""
return self._state == _State.TIMEOUT
@property
def remaining(self) -> Optional[float]:
if self._cancel_at is not None:
return max(self._cancel_at - self._loop.time(), 0.0)
def deadline(self) -> Optional[float]:
return self._deadline
def reject(self) -> None:
"""Reject scheduled timeout if any."""
# cancel is maybe better name but
# task.cancel() raises CancelledError in asyncio world.
if self._state not in (_State.INIT, _State.ENTER):
raise RuntimeError(f"invalid state {self._state.value}")
self._reject()
def _reject(self) -> None:
if self._timeout_handler is not None:
self._timeout_handler.cancel()
self._timeout_handler = None
def shift(self, delay: float) -> None:
"""Advance timeout on delay seconds.
The delay can be negative.
Raise RuntimeError if shift is called when deadline is not scheduled
"""
deadline = self._deadline
if deadline is None:
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
self.update(deadline + delay)
def update(self, deadline: float) -> None:
"""Set deadline to absolute value.
deadline argument points on the time in the same clock system
as loop.time().
If new deadline is in the past the timeout is raised immediatelly.
Please note: it is not POSIX time but a time with
undefined starting base, e.g. the time of the system power on.
"""
if self._state == _State.EXIT:
raise RuntimeError("cannot reschedule after exit from context manager")
if self._state == _State.TIMEOUT:
raise RuntimeError("cannot reschedule expired timeout")
if self._timeout_handler is not None:
self._timeout_handler.cancel()
self._deadline = deadline
if self._state != _State.INIT:
self._reschedule()
def _reschedule(self) -> None:
assert self._state == _State.ENTER
deadline = self._deadline
if deadline is None:
return
now = self._loop.time()
if self._timeout_handler is not None:
self._timeout_handler.cancel()
task = _current_task(self._loop)
if deadline <= now:
self._timeout_handler = self._loop.call_soon(self._on_timeout, task)
else:
return None
self._timeout_handler = self._loop.call_at(deadline, self._on_timeout, task)
def _do_enter(self) -> 'timeout':
# Support Tornado 5- without timeout
# Details: https://github.com/python/asyncio/issues/392
if self._timeout is None:
return self
def _do_enter(self) -> None:
if self._state != _State.INIT:
raise RuntimeError(f"invalid state {self._state.value}")
self._state = _State.ENTER
self._reschedule()
self._task = current_task(self._loop)
if self._task is None:
raise RuntimeError('Timeout context manager should be used '
'inside a task')
if self._timeout <= 0:
self._loop.call_soon(self._cancel_task)
return self
self._cancel_at = self._loop.time() + self._timeout
self._cancel_handler = self._loop.call_at(
self._cancel_at, self._cancel_task)
return self
def _do_exit(self, exc_type: Type[BaseException]) -> None:
if exc_type is asyncio.CancelledError and self._cancelled:
self._cancel_handler = None
self._task = None
def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
self._timeout_handler = None
raise asyncio.TimeoutError
if self._timeout is not None and self._cancel_handler is not None:
self._cancel_handler.cancel()
self._cancel_handler = None
self._task = None
# timeout has not expired
self._state = _State.EXIT
self._reject()
return None
def _cancel_task(self) -> None:
if self._task is not None:
self._task.cancel()
self._cancelled = True
def _on_timeout(self, task: "asyncio.Task[None]") -> None:
task.cancel()
self._state = _State.TIMEOUT
# drop the reference early
self._timeout_handler = None
def current_task(loop: asyncio.AbstractEventLoop) -> 'asyncio.Task[Any]':
if PY_37:
task = asyncio.current_task(loop=loop) # type: ignore
else:
task = asyncio.Task.current_task(loop=loop)
if task is None:
# this should be removed, tokio must use register_task and family API
if hasattr(loop, 'current_task'):
task = loop.current_task() # type: ignore
if sys.version_info >= (3, 7):
return task
def _current_task(loop: asyncio.AbstractEventLoop) -> "Optional[asyncio.Task[Any]]":
return asyncio.current_task(loop=loop)
else:
def _current_task(loop: asyncio.AbstractEventLoop) -> "Optional[asyncio.Task[Any]]":
return asyncio.Task.current_task(loop=loop)
if sys.version_info >= (3, 7):
def _get_running_loop() -> asyncio.AbstractEventLoop:
return asyncio.get_running_loop()
else:
def _get_running_loop() -> asyncio.AbstractEventLoop:
loop = asyncio.get_event_loop()
if not loop.is_running():
raise RuntimeError("no running event loop")
return loop

View file

@ -1 +1 @@
Placeholder
Placeholder

View file

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015-2020 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View file

@ -1,149 +0,0 @@
Metadata-Version: 2.1
Name: discord.py
Version: 1.5.1
Summary: A Python wrapper for the Discord API
Home-page: https://github.com/Rapptz/discord.py
Author: Rapptz
License: MIT
Project-URL: Documentation, https://discordpy.readthedocs.io/en/latest/
Project-URL: Issue tracker, https://github.com/Rapptz/discord.py/issues
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: License :: OSI Approved :: MIT License
Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Topic :: Internet
Classifier: Topic :: Software Development :: Libraries
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Utilities
Requires-Python: >=3.5.3
Description-Content-Type: text/x-rst
Requires-Dist: aiohttp (<3.7.0,>=3.6.0)
Provides-Extra: docs
Requires-Dist: sphinx (==1.8.5) ; extra == 'docs'
Requires-Dist: sphinxcontrib-trio (==1.1.1) ; extra == 'docs'
Requires-Dist: sphinxcontrib-websupport ; extra == 'docs'
Provides-Extra: voice
Requires-Dist: PyNaCl (==1.3.0) ; extra == 'voice'
discord.py
==========
.. image:: https://discord.com/api/guilds/336642139381301249/embed.png
:target: https://discord.gg/r3sSKJJ
:alt: Discord server invite
.. image:: https://img.shields.io/pypi/v/discord.py.svg
:target: https://pypi.python.org/pypi/discord.py
:alt: PyPI version info
.. image:: https://img.shields.io/pypi/pyversions/discord.py.svg
:target: https://pypi.python.org/pypi/discord.py
:alt: PyPI supported Python versions
A modern, easy to use, feature-rich, and async ready API wrapper for Discord written in Python.
Key Features
-------------
- Modern Pythonic API using ``async`` and ``await``.
- Proper rate limit handling.
- 100% coverage of the supported Discord API.
- Optimised in both speed and memory.
Installing
----------
**Python 3.5.3 or higher is required**
To install the library without full voice support, you can just run the following command:
.. code:: sh
# Linux/macOS
python3 -m pip install -U discord.py
# Windows
py -3 -m pip install -U discord.py
Otherwise to get voice support you should run the following command:
.. code:: sh
# Linux/macOS
python3 -m pip install -U "discord.py[voice]"
# Windows
py -3 -m pip install -U discord.py[voice]
To install the development version, do the following:
.. code:: sh
$ git clone https://github.com/Rapptz/discord.py
$ cd discord.py
$ python3 -m pip install -U .[voice]
Optional Packages
~~~~~~~~~~~~~~~~~~
* PyNaCl (for voice support)
Please note that on Linux installing voice you must install the following packages via your favourite package manager (e.g. ``apt``, ``dnf``, etc) before running the above commands:
* libffi-dev (or ``libffi-devel`` on some systems)
* python-dev (e.g. ``python3.6-dev`` for Python 3.6)
Quick Example
--------------
.. code:: py
import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged on as', self.user)
async def on_message(self, message):
# don't respond to ourselves
if message.author == self.user:
return
if message.content == 'ping':
await message.channel.send('pong')
client = MyClient()
client.run('token')
Bot Example
~~~~~~~~~~~~~
.. code:: py
import discord
from discord.ext import commands
bot = commands.Bot(command_prefix='>')
@bot.command()
async def ping(ctx):
await ctx.send('pong')
bot.run('token')
You can find more examples in the examples directory.
Links
------
- `Documentation <https://discordpy.readthedocs.io/en/latest/index.html>`_
- `Official Discord Server <https://discord.gg/r3sSKJJ>`_
- `Discord API <https://discord.gg/discord-api>`_

View file

@ -1,129 +0,0 @@
discord.py-1.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
discord.py-1.5.1.dist-info/LICENSE,sha256=d2k2mW1m6rbORuTheZyYRXXbkg1lWLfG7jHUdI3YLGI,1078
discord.py-1.5.1.dist-info/METADATA,sha256=5H1sV440_k4Tyr9FsXDH_mpy-hCExJQS0Hm5DAFzqQE,4092
discord.py-1.5.1.dist-info/RECORD,,
discord.py-1.5.1.dist-info/WHEEL,sha256=v8slff5hmCpvciQ3G55d2d1CnOBupjDFJHDE2dUb1Ao,97
discord.py-1.5.1.dist-info/authors.sh,sha256=voAGwpshKMpc51nQU3Zst1sKCUJvtjhjZCo6MKAp9kU,203
discord.py-1.5.1.dist-info/top_level.txt,sha256=fJkrNbR-_8ubMBUcDEJBcfkpECrvSEmMrNKgvLlQFoM,8
discord/__init__.py,sha256=ABJSvJRE7YCB3XxAKbe9ew9HDVRX9uhmgC3H-kblWdU,2087
discord/__main__.py,sha256=ooH8tfr48ciak8V3_Qn1N7EnmfYTKFidnOWJuHyzpg0,9885
discord/__pycache__/__init__.cpython-38.pyc,,
discord/__pycache__/__main__.cpython-38.pyc,,
discord/__pycache__/abc.cpython-38.pyc,,
discord/__pycache__/activity.cpython-38.pyc,,
discord/__pycache__/appinfo.cpython-38.pyc,,
discord/__pycache__/asset.cpython-38.pyc,,
discord/__pycache__/audit_logs.cpython-38.pyc,,
discord/__pycache__/backoff.cpython-38.pyc,,
discord/__pycache__/calls.cpython-38.pyc,,
discord/__pycache__/channel.cpython-38.pyc,,
discord/__pycache__/client.cpython-38.pyc,,
discord/__pycache__/colour.cpython-38.pyc,,
discord/__pycache__/context_managers.cpython-38.pyc,,
discord/__pycache__/embeds.cpython-38.pyc,,
discord/__pycache__/emoji.cpython-38.pyc,,
discord/__pycache__/enums.cpython-38.pyc,,
discord/__pycache__/errors.cpython-38.pyc,,
discord/__pycache__/file.cpython-38.pyc,,
discord/__pycache__/flags.cpython-38.pyc,,
discord/__pycache__/gateway.cpython-38.pyc,,
discord/__pycache__/guild.cpython-38.pyc,,
discord/__pycache__/http.cpython-38.pyc,,
discord/__pycache__/integrations.cpython-38.pyc,,
discord/__pycache__/invite.cpython-38.pyc,,
discord/__pycache__/iterators.cpython-38.pyc,,
discord/__pycache__/member.cpython-38.pyc,,
discord/__pycache__/mentions.cpython-38.pyc,,
discord/__pycache__/message.cpython-38.pyc,,
discord/__pycache__/mixins.cpython-38.pyc,,
discord/__pycache__/object.cpython-38.pyc,,
discord/__pycache__/oggparse.cpython-38.pyc,,
discord/__pycache__/opus.cpython-38.pyc,,
discord/__pycache__/partial_emoji.cpython-38.pyc,,
discord/__pycache__/permissions.cpython-38.pyc,,
discord/__pycache__/player.cpython-38.pyc,,
discord/__pycache__/raw_models.cpython-38.pyc,,
discord/__pycache__/reaction.cpython-38.pyc,,
discord/__pycache__/relationship.cpython-38.pyc,,
discord/__pycache__/role.cpython-38.pyc,,
discord/__pycache__/shard.cpython-38.pyc,,
discord/__pycache__/state.cpython-38.pyc,,
discord/__pycache__/team.cpython-38.pyc,,
discord/__pycache__/template.cpython-38.pyc,,
discord/__pycache__/user.cpython-38.pyc,,
discord/__pycache__/utils.cpython-38.pyc,,
discord/__pycache__/voice_client.cpython-38.pyc,,
discord/__pycache__/webhook.cpython-38.pyc,,
discord/__pycache__/widget.cpython-38.pyc,,
discord/abc.py,sha256=s7XY6D_Roi4bxKKuH3wW6Ce40aAw-TVe2M2664DCE4c,40045
discord/activity.py,sha256=TCN4SzmNY4f8huwpsy_3lg2H3CN1N2MbUVP8oINyBvs,23444
discord/appinfo.py,sha256=pSljy54mIZa9rkyYfh0vsuKtSb_26ay-PQGgPMZatUo,5229
discord/asset.py,sha256=pMBqi5tHhH5ycmky1saYHUv8IQjXmyASEjguOZMf6dw,7595
discord/audit_logs.py,sha256=lbfBxRYTNY7XHf22HnDtH-aBIi8WDTmRtsYIZsFaorY,13403
discord/backoff.py,sha256=btxvujy2vUATg9C0odcSVSjsbV6JNQXUlV0dGqG8jg4,3166
discord/bin/libopus-0.x64.dll,sha256=zqIypk0pRCwXafYPCDVbDJsWPjNb-Emf8I2YbWsN790,423142
discord/bin/libopus-0.x86.dll,sha256=3T1ErXk2NKZC13CzwfovFOmdSXtkwpoVRL3j8FykAlM,380885
discord/calls.py,sha256=voiZw0sLyJUraBoSs7dtsQRsq7Xj2Z6yO4YBie3_P4U,5292
discord/channel.py,sha256=uHD539DVQRGbqL5J9Hrc5onlBWiEIInHJyrx8KwrmbU,42352
discord/client.py,sha256=NZPBQz4rZWgyGe5HDQoV56SJ1IN6u12thMbITcdno7I,49704
discord/colour.py,sha256=SdxhhACprJEAeDVnui0qI6rnmjkQg7pL2_fN3-pHy7M,7493
discord/context_managers.py,sha256=Yz9XznhY6A9KHstnDMoff53o9BMpqelTZpcHgn-NO90,2281
discord/embeds.py,sha256=Q9DStAV3Q7ezrA_G4vFfWBLTwnEUwyMxnGKYc_ls_7M,17385
discord/emoji.py,sha256=Yi40p7DS9rDDedSmgBypehH5hct3MksByTLEnv4p_T0,7103
discord/enums.py,sha256=qJeEhhacTISVtsSoDI6-BCmUXm7j3KerVeW_rr_93-s,13334
discord/errors.py,sha256=Z3Ld7JIAWL0jqNZCkNcmY0QpWTjBP8O6jpdFm-tGK9E,6981
discord/ext/commands/__init__.py,sha256=flll4nfZB_6_lrET8U35knwWrA-EMgYMnJigW_vzX4M,449
discord/ext/commands/__pycache__/__init__.cpython-38.pyc,,
discord/ext/commands/__pycache__/_types.cpython-38.pyc,,
discord/ext/commands/__pycache__/bot.cpython-38.pyc,,
discord/ext/commands/__pycache__/cog.cpython-38.pyc,,
discord/ext/commands/__pycache__/context.cpython-38.pyc,,
discord/ext/commands/__pycache__/converter.cpython-38.pyc,,
discord/ext/commands/__pycache__/cooldowns.cpython-38.pyc,,
discord/ext/commands/__pycache__/core.cpython-38.pyc,,
discord/ext/commands/__pycache__/errors.cpython-38.pyc,,
discord/ext/commands/__pycache__/help.cpython-38.pyc,,
discord/ext/commands/__pycache__/view.cpython-38.pyc,,
discord/ext/commands/_types.py,sha256=kPsr-QZLMDmGERmAHGsz4qWZbmF7FSZB3AQzNRuYMJw,1287
discord/ext/commands/bot.py,sha256=XIg6lOh5EGWLvxpVfdOz3CqFYCdBeNpkfVu-rcW5-IM,34733
discord/ext/commands/cog.py,sha256=fROZe24-JyI4HAykpaNDuI1HhID6Qs65Ma_J1TuV7Kg,15201
discord/ext/commands/context.py,sha256=NBTgALx1YtmzeOisCe-i158wQ6B7Xi9pdmBoDNs2r5o,11585
discord/ext/commands/converter.py,sha256=gbhY31mjU6-4Q1DbrBqANRLHov2Lg39ThC4g-CEAfBM,22312
discord/ext/commands/cooldowns.py,sha256=lr68n6_XrEYcFrJ6c7v7dQO3LQbvgMzy9wbCReX-Ar0,9219
discord/ext/commands/core.py,sha256=tlaMcFYZfSRMtLX80EXPMyGlvn8X3X-q-EX3i_JJ0RA,69442
discord/ext/commands/errors.py,sha256=uZoCwUQ1_A7cEFtZgMn5mX8xJvVCmBZlWBI8TEbJiCY,24839
discord/ext/commands/help.py,sha256=CyYa1a7PQJQn_ztgvmAz52J9edI9EGY5hi06FxJxwCY,47717
discord/ext/commands/view.py,sha256=v8Cr1UqULWKeb5NgFQVpDj6M_Atn4yrW0pD2oCuVMDU,5996
discord/ext/tasks/__init__.py,sha256=PxHRluZL9cO7SoOraWkHZjzl9pQlH8tq2g2Vvgv-rAM,16485
discord/ext/tasks/__pycache__/__init__.cpython-38.pyc,,
discord/file.py,sha256=VQobigUOo4DjVfi8F9XeFipYXhLgpO4HuiRvJIZtWyg,3977
discord/flags.py,sha256=7V5NCWj5U-HXcTBRb-N3xul8gLc2Fbtu_U22lqbpLmE,30196
discord/gateway.py,sha256=6Hfi4JqBevHomrs19TIWkPRacn62t_4a1xRxxviXsFs,31269
discord/guild.py,sha256=N4cUTAJbknkIqdsIe2g6Jq_9OS3Du4gabLyUQvI6OC0,74021
discord/http.py,sha256=UwWAlp8Mr5_yfN9z4axpIJ4YZd0qI4E5fNxBxnp0fVw,37399
discord/integrations.py,sha256=e6fW4O9nbP1r0DX-jqd-Qk3ElMg5fGAS18JeWHCCTss,7146
discord/invite.py,sha256=V2LAMA_TAdL1D-0rhnTg5FZHkPtwu87J7dj1PGxH6kk,14459
discord/iterators.py,sha256=9IFyw7j6VOvdutDzg6-DXqOA4fpbQ-lbwrPEgbOHnMU,22259
discord/member.py,sha256=bnxmtrR7Lh-CDMws_X5ic4zQVvFpV_8Kmh-GUQLf2gA,24519
discord/mentions.py,sha256=oN4D0YFDtKm6LDdbE0ubz-nWZ7HeUfAE-204OowcfkM,4413
discord/message.py,sha256=ELu6x3hmTZJCH10OlXUyplAhR2UYxb8trhaRVnlAb8Q,40723
discord/mixins.py,sha256=OLAVyfyr9kc01MJaCqfNFm5O3acEH0Be6mUlUt2OvEQ,1505
discord/object.py,sha256=mSKJA8fsp-PZekk7JXpc85BLYGfWsFP8bFsy3FhNthU,2664
discord/oggparse.py,sha256=RwlVDlzsXhWZcwEhUblH3bcrHqYk4olzPO1qMdJX9rI,3136
discord/opus.py,sha256=MzSDKi2kIwPSQAlWtADmNaNIkEIXcXVHkHBPdpueH8Q,8545
discord/partial_emoji.py,sha256=kN109imyTbdmg2QV05gKllXRIj_OI7c-3PlRhrabNfw,4500
discord/permissions.py,sha256=vzAuYjmkBPsd1k5aHI1vZZhozvhxzwXFIbVUTfUFbxE,18279
discord/player.py,sha256=zGaB3VqNbMH9Aho0DTUL2gY9MfAri5TnNa22R53QEL4,22943
discord/raw_models.py,sha256=NVvTLE-gN6wGsIF4aBkeqIMMzPEZBb_RUWER8O_K98E,7224
discord/reaction.py,sha256=OxGDN9RYq5f3Bc2SgsSxAgpWqtxpcSJ-SSctLsVeNWY,6395
discord/relationship.py,sha256=oQSHFWcAKyYQ2jzIkQrKLvGddJ-sBCkYBbJsOkMblcg,2431
discord/role.py,sha256=USTMNkqU2MHisTgpj-mqXzZMQpPfYBEhvnzvEljtfiY,9564
discord/shard.py,sha256=pxvM33JIoBhd8sTuBBtAQDQjqLRmP5SXGYIZgzctpbk,18546
discord/state.py,sha256=Kz9EtbjTfcLfEPCi7R5EuReThzJnvZczVuVoPphj_Ms,47215
discord/team.py,sha256=XJ34MJv6xAQfkrSxwj3VcAconzfpsdiCmwueabCJC3k,4211
discord/template.py,sha256=2oDgjxW8KFL284wBb965dPjoJexOImfK-OER4P1UxTQ,5283
discord/user.py,sha256=rJIPDkx9BGA_p3DhJBM9QhF7vTDe2n_pSzaogFGaOA8,28471
discord/utils.py,sha256=3eH9eWeSYbsYlt1cxji057rO3owF_Wc_DbZlASOyEzE,17053
discord/voice_client.py,sha256=Ur9FXYmHXzprtG3YTS5N3M6C77PtJUxPgXqPCPCaQiM,22645
discord/webhook.py,sha256=x1rzouxSSx4ZtOCgNxtijsUrbRZIDYkdozqAnPZnrY0,30723
discord/widget.py,sha256=IrTEVGATg7K0bNItnzqAWgVIBT0LW9PAENxM2nXM2gc,8619

View file

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.6)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -1,5 +0,0 @@
#!/bin/bash
# save as i.e.: git-authors and set the executable flag
git ls-tree -r -z --name-only HEAD -- $1 | xargs -0 -n1 git blame -w \
--line-porcelain HEAD |grep "^author "|sort|uniq -c|sort -nr

View file

@ -1,12 +1,10 @@
# -*- coding: utf-8 -*-
"""
Discord API Wrapper
~~~~~~~~~~~~~~~~~~~
A basic wrapper for the Discord API.
:copyright: (c) 2015-2020 Rapptz
:copyright: (c) 2015-present Rapptz
:license: MIT, see LICENSE for more details.
"""
@ -14,60 +12,74 @@ A basic wrapper for the Discord API.
__title__ = 'discord'
__author__ = 'Rapptz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-2020 Rapptz'
__version__ = '1.5.1'
__copyright__ = 'Copyright 2015-present Rapptz'
__version__ = '2.0.1'
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
from collections import namedtuple
import logging
from typing import NamedTuple, Literal
from .client import Client
from .appinfo import AppInfo
from .user import User, ClientUser, Profile
from .emoji import Emoji
from .partial_emoji import PartialEmoji
from .client import *
from .appinfo import *
from .user import *
from .emoji import *
from .partial_emoji import *
from .activity import *
from .channel import *
from .guild import Guild
from .guild import *
from .flags import *
from .relationship import Relationship
from .member import Member, VoiceState
from .message import Message, MessageReference, Attachment
from .asset import Asset
from .member import *
from .message import *
from .asset import *
from .errors import *
from .calls import CallMessage, GroupCall
from .permissions import Permissions, PermissionOverwrite
from .role import Role
from .file import File
from .colour import Color, Colour
from .integrations import Integration, IntegrationAccount
from .invite import Invite, PartialInviteChannel, PartialInviteGuild
from .template import Template
from .widget import Widget, WidgetMember, WidgetChannel
from .object import Object
from .reaction import Reaction
from . import utils, opus, abc
from .permissions import *
from .role import *
from .file import *
from .colour import *
from .integrations import *
from .invite import *
from .template import *
from .welcome_screen import *
from .widget import *
from .object import *
from .reaction import *
from . import (
utils as utils,
opus as opus,
abc as abc,
ui as ui,
app_commands as app_commands,
)
from .enums import *
from .embeds import Embed
from .mentions import AllowedMentions
from .shard import AutoShardedClient, ShardInfo
from .embeds import *
from .mentions import *
from .shard import *
from .player import *
from .webhook import *
from .voice_client import VoiceClient, VoiceProtocol
from .audit_logs import AuditLogChanges, AuditLogEntry, AuditLogDiff
from .voice_client import *
from .audit_logs import *
from .raw_models import *
from .team import *
from .sticker import *
from .stage_instance import *
from .scheduled_event import *
from .interactions import *
from .components import *
from .threads import *
from .automod import *
VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial')
version_info = VersionInfo(major=1, minor=5, micro=1, releaselevel='final', serial=0)
class VersionInfo(NamedTuple):
major: int
minor: int
micro: int
releaselevel: Literal["alpha", "beta", "candidate", "final"]
serial: int
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
version_info: VersionInfo = VersionInfo(major=2, minor=0, micro=1, releaselevel='final', serial=0)
logging.getLogger(__name__).addHandler(logging.NullHandler())
del logging, NamedTuple, Literal, VersionInfo

View file

@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2020 Rapptz
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
@ -24,63 +22,75 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import Optional, Tuple, Dict
import argparse
import sys
from pathlib import Path
import discord
import pkg_resources
import importlib.metadata
import aiohttp
import platform
def show_version():
def show_version() -> None:
entries = []
entries.append('- Python v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(sys.version_info))
version_info = discord.version_info
entries.append('- discord.py v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(version_info))
if version_info.releaselevel != 'final':
pkg = pkg_resources.get_distribution('discord.py')
if pkg:
entries.append(' - discord.py pkg_resources: v{0}'.format(pkg.version))
version = importlib.metadata.version('discord.py')
if version:
entries.append(f' - discord.py metadata: v{version}')
entries.append('- aiohttp v{0.__version__}'.format(aiohttp))
entries.append(f'- aiohttp v{aiohttp.__version__}')
uname = platform.uname()
entries.append('- system info: {0.system} {0.release} {0.version}'.format(uname))
print('\n'.join(entries))
def core(parser, args):
def core(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None:
if args.version:
show_version()
else:
parser.print_help()
bot_template = """#!/usr/bin/env python3
# -*- coding: utf-8 -*-
_bot_template = """#!/usr/bin/env python3
from discord.ext import commands
import discord
import config
class Bot(commands.{base}):
def __init__(self, **kwargs):
super().__init__(command_prefix=commands.when_mentioned_or('{prefix}'), **kwargs)
def __init__(self, intents: discord.Intents, **kwargs):
super().__init__(command_prefix=commands.when_mentioned_or('{prefix}'), intents=intents, **kwargs)
async def setup_hook(self):
for cog in config.cogs:
try:
self.load_extension(cog)
await self.load_extension(cog)
except Exception as exc:
print('Could not load extension {{0}} due to {{1.__class__.__name__}}: {{1}}'.format(cog, exc))
print(f'Could not load extension {{cog}} due to {{exc.__class__.__name__}}: {{exc}}')
async def on_ready(self):
print('Logged on as {{0}} (ID: {{0.id}})'.format(self.user))
print(f'Logged on as {{self.user}} (ID: {{self.user.id}})')
bot = Bot()
intents = discord.Intents.default()
intents.message_content = True
bot = Bot(intents=intents)
# write general commands here
bot.run(config.token)
"""
gitignore_template = """# Byte-compiled / optimized / DLL files
_gitignore_template = """# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
@ -110,9 +120,7 @@ var/
config.py
"""
cog_template = '''# -*- coding: utf-8 -*-
from discord.ext import commands
_cog_template = '''from discord.ext import commands
import discord
class {name}(commands.Cog{attrs}):
@ -121,12 +129,16 @@ class {name}(commands.Cog{attrs}):
def __init__(self, bot):
self.bot = bot
{extra}
def setup(bot):
bot.add_cog({name}(bot))
async def setup(bot):
await bot.add_cog({name}(bot))
'''
cog_extras = '''
def cog_unload(self):
_cog_extras = '''
async def cog_load(self):
# loading logic goes here
pass
async def cog_unload(self):
# clean up logic goes here
pass
@ -145,6 +157,10 @@ cog_extras = '''
async def cog_command_error(self, ctx, error):
# error handling to every command in here
pass
async def cog_app_command_error(self, interaction, error):
# error handling to every application command in here
pass
async def cog_before_invoke(self, ctx):
# called before a command is called here
@ -160,7 +176,7 @@ cog_extras = '''
# certain file names and directory names are forbidden
# see: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx
# although some of this doesn't apply to Linux, we might as well be consistent
_base_table = {
_base_table: Dict[str, Optional[str]] = {
'<': '-',
'>': '-',
':': '-',
@ -175,24 +191,48 @@ _base_table = {
# NUL (0) and 1-31 are disallowed
_base_table.update((chr(i), None) for i in range(32))
translation_table = str.maketrans(_base_table)
_translation_table = str.maketrans(_base_table)
def to_path(parser, name, *, replace_spaces=False):
def to_path(parser: argparse.ArgumentParser, name: str, *, replace_spaces: bool = False) -> Path:
if isinstance(name, Path):
return name
if sys.platform == 'win32':
forbidden = ('CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', 'COM6', 'COM7', \
'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9')
forbidden = (
'CON',
'PRN',
'AUX',
'NUL',
'COM1',
'COM2',
'COM3',
'COM4',
'COM5',
'COM6',
'COM7',
'COM8',
'COM9',
'LPT1',
'LPT2',
'LPT3',
'LPT4',
'LPT5',
'LPT6',
'LPT7',
'LPT8',
'LPT9',
)
if len(name) <= 4 and name.upper() in forbidden:
parser.error('invalid directory name given, use a different one')
name = name.translate(translation_table)
name = name.translate(_translation_table)
if replace_spaces:
name = name.replace(' ', '-')
return Path(name)
def newbot(parser, args):
def newbot(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None:
new_directory = to_path(parser, args.directory) / to_path(parser, args.name)
# as a note exist_ok for Path is a 3.5+ only feature
@ -200,7 +240,7 @@ def newbot(parser, args):
try:
new_directory.mkdir(exist_ok=True, parents=True)
except OSError as exc:
parser.error('could not create our bot directory ({})'.format(exc))
parser.error(f'could not create our bot directory ({exc})')
cogs = new_directory / 'cogs'
@ -209,63 +249,66 @@ def newbot(parser, args):
init = cogs / '__init__.py'
init.touch()
except OSError as exc:
print('warning: could not create cogs directory ({})'.format(exc))
print(f'warning: could not create cogs directory ({exc})')
try:
with open(str(new_directory / 'config.py'), 'w', encoding='utf-8') as fp:
fp.write('token = "place your token here"\ncogs = []\n')
except OSError as exc:
parser.error('could not create config file ({})'.format(exc))
parser.error(f'could not create config file ({exc})')
try:
with open(str(new_directory / 'bot.py'), 'w', encoding='utf-8') as fp:
base = 'Bot' if not args.sharded else 'AutoShardedBot'
fp.write(bot_template.format(base=base, prefix=args.prefix))
fp.write(_bot_template.format(base=base, prefix=args.prefix))
except OSError as exc:
parser.error('could not create bot file ({})'.format(exc))
parser.error(f'could not create bot file ({exc})')
if not args.no_git:
try:
with open(str(new_directory / '.gitignore'), 'w', encoding='utf-8') as fp:
fp.write(gitignore_template)
fp.write(_gitignore_template)
except OSError as exc:
print('warning: could not create .gitignore file ({})'.format(exc))
print(f'warning: could not create .gitignore file ({exc})')
print('successfully made bot at', new_directory)
def newcog(parser, args):
def newcog(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None:
cog_dir = to_path(parser, args.directory)
try:
cog_dir.mkdir(exist_ok=True)
except OSError as exc:
print('warning: could not create cogs directory ({})'.format(exc))
print(f'warning: could not create cogs directory ({exc})')
directory = cog_dir / to_path(parser, args.name)
directory = directory.with_suffix('.py')
try:
with open(str(directory), 'w', encoding='utf-8') as fp:
attrs = ''
extra = cog_extras if args.full else ''
extra = _cog_extras if args.full else ''
if args.class_name:
name = args.class_name
else:
name = str(directory.stem)
if '-' in name:
name = name.replace('-', ' ').title().replace(' ', '')
if '-' in name or '_' in name:
translation = str.maketrans('-_', ' ')
name = name.translate(translation).title().replace(' ', '')
else:
name = name.title()
if args.display_name:
attrs += ', name="{}"'.format(args.display_name)
attrs += f', name="{args.display_name}"'
if args.hide_commands:
attrs += ', command_attrs=dict(hidden=True)'
fp.write(cog_template.format(name=name, extra=extra, attrs=attrs))
fp.write(_cog_template.format(name=name, extra=extra, attrs=attrs))
except OSError as exc:
parser.error('could not create cog file ({})'.format(exc))
parser.error(f'could not create cog file ({exc})')
else:
print('successfully made cog at', directory)
def add_newbot_args(subparser):
def add_newbot_args(subparser: argparse._SubParsersAction[argparse.ArgumentParser]) -> None:
parser = subparser.add_parser('newbot', help='creates a command bot project quickly')
parser.set_defaults(func=newbot)
@ -275,7 +318,8 @@ def add_newbot_args(subparser):
parser.add_argument('--sharded', help='whether to use AutoShardedBot', action='store_true')
parser.add_argument('--no-git', help='do not create a .gitignore file', action='store_true', dest='no_git')
def add_newcog_args(subparser):
def add_newcog_args(subparser: argparse._SubParsersAction[argparse.ArgumentParser]) -> None:
parser = subparser.add_parser('newcog', help='creates a new cog template quickly')
parser.set_defaults(func=newcog)
@ -286,7 +330,8 @@ def add_newcog_args(subparser):
parser.add_argument('--hide-commands', help='whether to hide all commands in the cog', action='store_true')
parser.add_argument('--full', help='add all special methods as well', action='store_true')
def parse_args():
def parse_args() -> Tuple[argparse.ArgumentParser, argparse.Namespace]:
parser = argparse.ArgumentParser(prog='discord', description='Tools for helping with discord.py')
parser.add_argument('-v', '--version', action='store_true', help='shows the library version')
parser.set_defaults(func=core)
@ -296,8 +341,11 @@ def parse_args():
add_newcog_args(subparser)
return parser, parser.parse_args()
def main():
def main() -> None:
parser, args = parse_args()
args.func(parser, args)
main()
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more