Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Remove aiofiles dependency #596

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 1 addition & 23 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ packages = [

[tool.poetry.dependencies]
python = ">=3.11,<3.13"
aiofiles = ">=24.1.0,<25.0"
aiosqlite = ">=0.18"
argcomplete = ">=2,<4"
zstandard = ">=0.19"
Expand All @@ -56,7 +55,6 @@ pylsp-rope = "^0.1"
pytest = ">=7.1,<9.0"
pytest-asyncio = ">=0.20,<0.25"
python-lsp-server = "^1.5"
types-aiofiles = ">=23.1,<25.0"
types-psutil = ">=5.9.5.10,<7.0.0.0"
types-tabulate = "^0.9"
myst-parser = ">=3.0.1,<4.1"
Expand Down
15 changes: 5 additions & 10 deletions src/gallia/command/uds.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import json
from argparse import ArgumentParser, BooleanOptionalAction, Namespace

import aiofiles

from gallia.command.base import FileNames, Scanner
from gallia.config import Config
from gallia.log import get_logger
Expand Down Expand Up @@ -157,9 +155,8 @@ async def setup(self, args: Namespace) -> None:

if args.properties is True:
path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value)
async with aiofiles.open(path, "w") as file:
await file.write(json.dumps(await self.ecu.properties(True), indent=4))
await file.write("\n")
properties = await self.ecu.properties(True)
path.write_text(json.dumps(properties, indent=4) + "\n")

if self.db_handler is not None:
self._apply_implicit_logging_setting()
Expand All @@ -175,13 +172,11 @@ async def setup(self, args: Namespace) -> None:
async def teardown(self, args: Namespace) -> None:
if args.properties is True and not self.ecu.transport.is_closed:
path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_POST.value)
async with aiofiles.open(path, "w") as file:
await file.write(json.dumps(await self.ecu.properties(True), indent=4))
await file.write("\n")
properties = await self.ecu.properties(True)
path.write_text(json.dumps(properties, indent=4) + "\n")

path_pre = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value)
async with aiofiles.open(path_pre) as file:
prop_pre = json.loads(await file.read())
prop_pre = json.loads(path_pre.read_text())

if args.compare_properties and await self.ecu.properties(False) != prop_pre:
logger.warning("ecu properties differ, please investigate!")
Expand Down
62 changes: 23 additions & 39 deletions src/gallia/commands/discover/doip.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from itertools import product
from urllib.parse import parse_qs, urlparse

import aiofiles
import psutil

from gallia.command import AsyncScript
Expand Down Expand Up @@ -316,10 +315,10 @@ async def enumerate_routing_activation_requests( # noqa: PLR0913
f"doip://{tgt_hostname}:{tgt_port}?protocol_version={self.protocol_version}&activation_type={routing_activation_type:#x}&src_addr={source_address:#x}"
)
logger.notice(f"[🤯] Holy moly, it actually worked: {targets[-1]}")
async with aiofiles.open(
self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt"), "a"
with self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt").open(
"a"
) as f:
await f.write(f"{targets[-1]}\n")
f.write(f"{targets[-1]}\n")

if len(targets) > 0:
logger.notice("[⚔️] It's dangerous to test alone, take one of these:")
Expand Down Expand Up @@ -359,10 +358,8 @@ async def enumerate_target_addresses( # noqa: PLR0913
# If we reach this, the request was not denied due to unknown TargetAddress
known_targets.append(current_target)
logger.notice(f"[🥇] HEUREKA: target address {target_addr:#x} is valid! ")
async with aiofiles.open(
self.artifacts_dir.joinpath("3_valid_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("3_valid_targets.txt").open("a") as f:
f.write(f"{current_target}\n")

logger.info(f"[⏳] Waiting for reply of target {target_addr:#x}")
# Hardcoded loop to detect potential broadcasts
Expand All @@ -379,20 +376,16 @@ async def enumerate_target_addresses( # noqa: PLR0913
logger.notice(
f"[🤑] B-B-B-B-B-B-BROADCAST at TargetAddress {target_addr:#x}! Got reply from {pot_broadcast:#x}"
)
async with aiofiles.open(
self.artifacts_dir.joinpath("6_unsolicited_replies.txt"), "a"
) as f:
await f.write(
with self.artifacts_dir.joinpath("6_unsolicited_replies.txt").open("a") as f:
f.write(
f"target_addr={target_addr:#x} yielded reply from {pot_broadcast:#x}; could also be late answer triggered by previous address!\n"
)

resp = TesterPresentResponse.parse_static(data)
logger.notice(f"[🥳] It cannot get nicer: {target_addr:#x} responded: {resp}")
responsive_targets.append(current_target)
async with aiofiles.open(
self.artifacts_dir.joinpath("4_responsive_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("4_responsive_targets.txt").open("a") as f:
f.write(f"{current_target}\n")
if self.db_handler is not None:
await self.db_handler.insert_discovery_result(current_target)

Expand All @@ -403,36 +396,28 @@ async def enumerate_target_addresses( # noqa: PLR0913
elif e.nack_code == DiagnosticMessageNegativeAckCodes.TargetUnreachable:
logger.info(f"[💤] {target_addr:#x} is (currently?) unreachable")
unreachable_targets.append(current_target)
async with aiofiles.open(
self.artifacts_dir.joinpath("5_unreachable_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("5_unreachable_targets.txt").open("a") as f:
f.write(f"{current_target}\n")
continue
else:
logger.warning(
f"[🤷] {target_addr:#x} is behaving strangely: {e.nack_code.name}"
)
async with aiofiles.open(
self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a"
) as f:
await f.write(f"{target_addr:#x}: {e.nack_code.name}\n")
with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f:
f.write(f"{target_addr:#x}: {e.nack_code.name}\n")
continue

except TimeoutError: # This triggers when DoIP ACK but no UDS reply
logger.info(f"[🙊] Presumably no active ECU on target address {target_addr:#x}")
async with aiofiles.open(
self.artifacts_dir.joinpath("5_unresponsive_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("5_unresponsive_targets.txt").open("a") as f:
f.write(f"{current_target}\n")
continue

except ConnectionError as e:
# Whenever this triggers, but sometimes connections are closed not by us
logger.warn(f"[🫦] Sexy, but unexpected: {target_addr:#x} triggered {e!r}")
async with aiofiles.open(
self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a"
) as f:
await f.write(f"{target_addr:#x}: {e}\n")
with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f:
f.write(f"{target_addr:#x}: {e}\n")
# Re-establish DoIP connection
await conn.close()
await asyncio.sleep(tcp_connect_delay)
Expand Down Expand Up @@ -546,13 +531,12 @@ async def run_udp_discovery(self) -> list[tuple[str, int]]:

if len(found) > 0:
logger.notice("[💎] Look what valid hosts I've found:")
for item in found:
url = f"doip://{item[0]}:{item[1]}"
logger.notice(url)
async with aiofiles.open(
self.artifacts_dir.joinpath("0_valid_hosts.txt"), "a"
) as f:
await f.write(f"{url}\n")

with self.artifacts_dir.joinpath("0_valid_hosts.txt").open() as f:
for item in found:
url = f"doip://{item[0]}:{item[1]}"
logger.notice(url)
f.write(f"{url}\n")
else:
logger.notice(
"[👸] Your princess is in another castle: no DoIP endpoints here it seems..."
Expand Down
8 changes: 3 additions & 5 deletions src/gallia/commands/scan/uds/sa_dump_seeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
from argparse import ArgumentParser, Namespace
from pathlib import Path

import aiofiles

from gallia.command import UDSScanner
from gallia.config import Config
from gallia.log import get_logger
Expand Down Expand Up @@ -135,7 +133,7 @@ async def main(self, args: Namespace) -> None:

i = -1
seeds_file = Path.joinpath(self.artifacts_dir, "seeds.bin")
file = await aiofiles.open(seeds_file, "wb", buffering=0)
file = seeds_file.open("wb", buffering=0)
duration = args.duration * 60
start_time = time.time()
last_seed = b""
Expand Down Expand Up @@ -177,7 +175,7 @@ async def main(self, args: Namespace) -> None:

logger.info(f"Received seed of length {len(seed)}")

await file.write(seed)
file.write(seed)
if last_seed == seed:
logger.warning("Received the same seed as before")

Expand Down Expand Up @@ -222,6 +220,6 @@ async def main(self, args: Namespace) -> None:
logger.info(f"Sleeping for {args.sleep} seconds between seed requests…")
await asyncio.sleep(args.sleep)

await file.close()
file.close()
self.log_size(seeds_file, time.time() - start_time)
await self.ecu.leave_session(session, sleep=args.power_cycle_sleep)
2 changes: 0 additions & 2 deletions src/gallia/dumpcap.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,6 @@ async def stop(self) -> None:
await self.compressor

async def _compressor(self) -> None:
# Gzip support in aiofiles is missing.
# https://github.com/Tinche/aiofiles/issues/46
ready = False
assert self.proc.stdout
with await asyncio.to_thread(gzip.open, self.outfile, "wb") as f:
Expand Down
6 changes: 2 additions & 4 deletions src/gallia/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@
from typing import TYPE_CHECKING, Any, TypeVar
from urllib.parse import urlparse

import aiofiles

from gallia.log import Loglevel

if TYPE_CHECKING:
Expand Down Expand Up @@ -192,9 +190,9 @@ async def write_target_list(
:params db_handler: if given, urls are also written to the database as discovery results
:return: None
"""
async with aiofiles.open(path, "w") as f:
with path.open("w") as f:
for target in targets:
await f.write(f"{target}\n")
f.write(f"{target}\n")

if db_handler is not None:
await db_handler.insert_discovery_result(str(target))
Expand Down