Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGES/10037.misc.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Improved performances of creating objects during the HTTP request lifecycle -- by :user:`bdraco`.
105 changes: 52 additions & 53 deletions aiohttp/client_proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def data_received(self, data: bytes) -> None:
if not data:
return

# custom payload parser
# custom payload parser - currently always WebSocketReader
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
Expand All @@ -268,57 +268,56 @@ def data_received(self, data: bytes) -> None:
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data

if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
return

# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as underlying_exc:
if self.transport is not None:
# connection.release() could be called BEFORE
# data_received(), the transport is already
# closed in this case
self.transport.close()
# should_close is True after the call
if isinstance(underlying_exc, HttpProcessingError):
exc = HttpProcessingError(
code=underlying_exc.code,
message=underlying_exc.message,
headers=underlying_exc.headers,
)
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as underlying_exc:
if self.transport is not None:
# connection.release() could be called BEFORE
# data_received(), the transport is already
# closed in this case
self.transport.close()
# should_close is True after the call
if isinstance(underlying_exc, HttpProcessingError):
exc = HttpProcessingError(
code=underlying_exc.code,
message=underlying_exc.message,
headers=underlying_exc.headers,
)
else:
exc = HttpProcessingError()
self.set_exception(exc, underlying_exc)
return

self._upgraded = upgraded

payload: Optional[StreamReader] = None
for message, payload in messages:
if message.should_close:
self._should_close = True

self._payload = payload

if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
self.feed_data((message, EMPTY_PAYLOAD))
else:
self.feed_data((message, payload))
if payload is not None:
# new message(s) was processed
# register timeout handler unsubscribing
# either on end-of-stream or immediately for
# EMPTY_PAYLOAD
if payload is not EMPTY_PAYLOAD:
payload.on_eof(self._drop_timeout)
else:
self._drop_timeout()
exc = HttpProcessingError()
self.set_exception(exc, underlying_exc)
return

if tail:
if upgraded:
self.data_received(tail)
else:
self._tail = tail
self._upgraded = upgraded

payload: Optional[StreamReader] = None
for message, payload in messages:
if message.should_close:
self._should_close = True

self._payload = payload

if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
self.feed_data((message, EMPTY_PAYLOAD))
else:
self.feed_data((message, payload))

if payload is not None:
# new message(s) was processed
# register timeout handler unsubscribing
# either on end-of-stream or immediately for
# EMPTY_PAYLOAD
if payload is not EMPTY_PAYLOAD:
payload.on_eof(self._drop_timeout)
else:
self._drop_timeout()

if upgraded and tail:
self.data_received(tail)
1 change: 0 additions & 1 deletion aiohttp/client_reqrep.py
Original file line number Diff line number Diff line change
Expand Up @@ -802,7 +802,6 @@ def __init__(
) -> None:
# URL forbids subclasses, so a simple type check is enough.
assert type(url) is URL
super().__init__()

self.method = method

Expand Down
27 changes: 8 additions & 19 deletions aiohttp/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -719,15 +719,12 @@ def ceil_timeout(


class HeadersMixin:
__slots__ = ("_content_type", "_content_dict", "_stored_content_type")
"""Mixin for handling headers."""

_headers: MultiMapping[str]

def __init__(self) -> None:
super().__init__()
self._content_type: Optional[str] = None
self._content_dict: Optional[Dict[str, str]] = None
self._stored_content_type: Union[str, None, _SENTINEL] = sentinel
_content_type: Optional[str] = None
_content_dict: Optional[Dict[str, str]] = None
_stored_content_type: Union[str, None, _SENTINEL] = sentinel

def _parse_content_type(self, raw: Optional[str]) -> None:
self._stored_content_type = raw
Expand Down Expand Up @@ -921,22 +918,14 @@ def __repr__(self) -> str:


class CookieMixin:
# The `_cookies` slots is not defined here because non-empty slots cannot
# be combined with an Exception base class, as is done in HTTPException.
# CookieMixin subclasses with slots should define the `_cookies`
# slot themselves.
__slots__ = ()
"""Mixin for handling cookies."""

def __init__(self) -> None:
super().__init__()
# Mypy doesn't like that _cookies isn't in __slots__.
# See the comment on this class's __slots__ for why this is OK.
self._cookies: Optional[SimpleCookie] = None # type: ignore[misc]
_cookies: Optional[SimpleCookie] = None

@property
def cookies(self) -> SimpleCookie:
if self._cookies is None:
self._cookies = SimpleCookie() # type: ignore[misc]
self._cookies = SimpleCookie()
return self._cookies

def set_cookie(
Expand All @@ -958,7 +947,7 @@ def set_cookie(
Also updates only those params which are not None.
"""
if self._cookies is None:
self._cookies = SimpleCookie() # type: ignore[misc]
self._cookies = SimpleCookie()

self._cookies[name] = value
c = self._cookies[name]
Expand Down
1 change: 0 additions & 1 deletion aiohttp/web_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ def __init__(
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
super().__init__()
if reason is None:
reason = self.default_reason
elif "\n" in reason:
Expand Down
11 changes: 7 additions & 4 deletions aiohttp/web_fileresponse.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ def __init__(
self._path = pathlib.Path(path)
self._chunk_size = chunk_size

def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes:
fobj.seek(offset)
return fobj.read(chunk_size) # type: ignore[no-any-return]

async def _sendfile_fallback(
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
Expand All @@ -93,10 +97,9 @@ async def _sendfile_fallback(

chunk_size = self._chunk_size
loop = asyncio.get_event_loop()

await loop.run_in_executor(None, fobj.seek, offset)

chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
chunk = await loop.run_in_executor(
None, self._seek_and_read, fobj, offset, chunk_size
)
while chunk:
await writer.write(chunk)
count = count - chunk_size
Expand Down
43 changes: 6 additions & 37 deletions aiohttp/web_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,26 +127,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
hdrs.METH_DELETE,
}

__slots__ = (
"_message",
"_protocol",
"_payload_writer",
"_payload",
"_headers",
"_method",
"_version",
"_rel_url",
"_post",
"_read_bytes",
"_state",
"_cache",
"_task",
"_client_max_size",
"_loop",
"_transport_sslcontext",
"_transport_peername",
"__weakref__",
)
_post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
_read_bytes: Optional[bytes] = None

def __init__(
self,
Expand All @@ -163,9 +145,6 @@ def __init__(
host: Optional[str] = None,
remote: Optional[str] = None,
) -> None:
super().__init__()
if state is None:
state = {}
self._message = message
self._protocol = protocol
self._payload_writer = payload_writer
Expand All @@ -189,20 +168,18 @@ def __init__(
self._cache["scheme"] = url.scheme
self._rel_url = url.relative()
else:
self._rel_url = message.url
self._rel_url = url
if scheme is not None:
self._cache["scheme"] = scheme
if host is not None:
self._cache["host"] = host
self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
self._read_bytes: Optional[bytes] = None

self._state = state
self._state = {} if state is None else state
self._task = task
self._client_max_size = client_max_size
self._loop = loop

transport = self._protocol.transport
transport = protocol.transport
assert transport is not None
self._transport_sslcontext = transport.get_extra_info("sslcontext")
self._transport_peername = transport.get_extra_info("peername")
Expand Down Expand Up @@ -838,16 +815,8 @@ def _finish(self) -> None:


class Request(BaseRequest):
__slots__ = ("_match_info",)

def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)

# matchdict, route_name, handler
# or information about traversal lookup

# initialized after route resolving
self._match_info: Optional[UrlMappingMatchInfo] = None
_match_info: Optional["UrlMappingMatchInfo"] = None

def clone(
self,
Expand Down
Loading
Loading