Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGES/12281.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixed spurious ``Future exception was never retrieved`` warning on disconnect during back-pressure -- by :user:`availov`.
1 change: 1 addition & 0 deletions CHANGES/12312.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
``Cookiejar.save()`` now uses ``0x600`` permissions to better protect them from being read by other users -- by :user:`digiscrypt`.
1 change: 1 addition & 0 deletions CONTRIBUTORS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,7 @@ Yegor Roganov
Yifei Kong
Young-Ho Cha
Yuriy Shatrov
Yury Novikov
Yury Pliner
Yury Selivanov
Yusuke Tsutsumi
Expand Down
2 changes: 1 addition & 1 deletion aiohttp/base_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,4 @@ async def _drain_helper(self) -> None:
if waiter is None:
waiter = self._loop.create_future()
self._drain_waiter = waiter
await asyncio.shield(waiter)
await waiter
12 changes: 11 additions & 1 deletion aiohttp/cookiejar.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import heapq
import itertools
import json
import os
import pathlib
import re
import time
Expand Down Expand Up @@ -137,7 +138,16 @@ def save(self, file_path: PathLike) -> None:
if attr_val:
morsel_data[attr] = attr_val
data[key][name] = morsel_data
with file_path.open(mode="w", encoding="utf-8") as f:

# Cookie persistence may include authentication/session tokens.
# Use 0o600 at creation time to avoid umask-dependent overexposure
# and enforce least-privilege access to sensitive credential data.
with open(
file_path,
mode="w",
encoding="utf-8",
opener=lambda path, flags: os.open(path, flags, 0o600),
) as f:
json.dump(data, f, indent=2)

def load(self, file_path: PathLike) -> None:
Expand Down
8 changes: 6 additions & 2 deletions aiohttp/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,12 @@ def init_signals(self) -> None:
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# See: https://github.com/aio-libs/aiohttp/issues/6130

# Reset SIGCHLD to default so Gunicorn doesn't swallow subprocess
# return codes. Without this, workers inherit the master arbiter's
# SIGCHLD handler, causing spurious "Worker exited" errors when
# application code spawns subprocesses.
signal.signal(signal.SIGCHLD, signal.SIG_DFL)

def handle_quit(self, sig: int, frame: FrameType | None) -> None:
self.alive = False
Expand Down
36 changes: 36 additions & 0 deletions tests/test_cookiejar.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import heapq
import itertools
import logging
import os
import stat
from http.cookies import BaseCookie, Morsel, SimpleCookie
from operator import not_
from pathlib import Path
Expand Down Expand Up @@ -1624,6 +1626,40 @@ def test_save_load_json_secure_cookies(tmp_path: Path) -> None:
assert cookie["domain"] == "example.com"


@pytest.mark.skipif(
os.name != "posix", reason="POSIX permission bits are required for this test"
)
def test_save_creates_private_cookie_file(tmp_path: Path) -> None:
file_path = tmp_path / "private-cookies.json"
jar = CookieJar()
jar.update_cookies_from_headers(
["token=abc123; Path=/"], URL("https://example.com/")
)

jar.save(file_path=file_path)

assert file_path.exists()
assert stat.S_IMODE(file_path.stat().st_mode) == 0o600


@pytest.mark.skipif(
os.name != "posix", reason="POSIX permission bits are required for this test"
)
def test_save_preserves_existing_cookie_file_permissions(tmp_path: Path) -> None:
file_path = tmp_path / "existing-cookies.json"
file_path.write_text("{}", encoding="utf-8")
file_path.chmod(0o644)

jar = CookieJar()
jar.update_cookies_from_headers(
["token=abc123; Path=/"], URL("https://example.com/")
)

jar.save(file_path=file_path)

assert stat.S_IMODE(file_path.stat().st_mode) == 0o644


async def test_cookie_jar_unsafe_property() -> None:
jar_safe = CookieJar()
assert jar_safe.unsafe is False
Expand Down
53 changes: 51 additions & 2 deletions tests/test_web_server.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import asyncio
import gc
import socket
from contextlib import suppress
from typing import NoReturn
from typing import Any, NoReturn
from unittest import mock

import pytest

from aiohttp import client, web
from aiohttp.http_exceptions import BadHttpMethod, BadStatusLine
from aiohttp.pytest_plugin import AiohttpClient, AiohttpRawServer
from aiohttp.pytest_plugin import AiohttpClient, AiohttpRawServer, AiohttpServer


async def test_simple_server(
Expand Down Expand Up @@ -454,3 +455,51 @@ async def on_request(request: web.Request) -> web.Response:
assert done_event.is_set()
finally:
await asyncio.gather(runner.shutdown(), site.stop())


async def test_no_future_warning_on_disconnect_during_backpressure(
aiohttp_server: AiohttpServer,
) -> None:
loop = asyncio.get_running_loop()
exc_handler_calls: list[dict[str, Any]] = []
original_handler = loop.get_exception_handler()
loop.set_exception_handler(lambda _loop, ctx: exc_handler_calls.append(ctx))
protocol = None

async def handler(request: web.Request) -> NoReturn:
nonlocal protocol
protocol = request.protocol
resp = web.StreamResponse()
await resp.prepare(request)
while True:
await resp.write(b"x" * 65536)

app = web.Application()
app.router.add_route("GET", "/", handler)
# aiohttp_server enables handler_cancellation by default so the handler
# task is cancelled when connection_lost() fires.
server = await aiohttp_server(app)

# Open a raw asyncio connection so we control exactly when the client
# side closes.
reader, writer = await asyncio.open_connection(server.host, server.port)
writer.write(b"GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")
await writer.drain()

try:
# Poll until the server protocol reports that writing is paused.
async def wait_for_backpressure() -> None:
while protocol is None or not protocol.writing_paused:
await asyncio.sleep(0.01)

await asyncio.wait_for(wait_for_backpressure(), timeout=5.0)

writer.close()
await asyncio.sleep(0.1)

gc.collect()
await asyncio.sleep(0)
finally:
loop.set_exception_handler(original_handler)

assert not exc_handler_calls
Loading