Compare commits

...

9 Commits

Author SHA1 Message Date
Clemens Schwaighofer
037210756e v0.34.0: add BOM check for files 2025-11-06 18:22:45 +09:00
Clemens Schwaighofer
4e78d83092 Add checks for BOM encoding in files 2025-11-06 18:21:32 +09:00
Clemens Schwaighofer
0e6331fa6a v0.33.0: datetime parsing update 2025-11-06 13:26:07 +09:00
Clemens Schwaighofer
c98c5df63c Update datetime parse helper
Allow non T in isotime format, add non T normal datetime parsing
2025-11-06 13:24:27 +09:00
Clemens Schwaighofer
0981c74da9 v0.32.0: add email sending 2025-10-27 11:22:11 +09:00
Clemens Schwaighofer
31518799f6 README update 2025-10-27 11:20:46 +09:00
Clemens Schwaighofer
e8b4b9b48e Add send email class 2025-10-27 11:19:38 +09:00
Clemens Schwaighofer
cd06272b38 v0.31.1: fix dict_helper file name to dict_helpers 2025-10-27 10:42:45 +09:00
Clemens Schwaighofer
c5ab4352e3 Fix name dict_helper to dict_helpers
So we have the same name for everyhing
2025-10-27 10:40:12 +09:00
17 changed files with 2190 additions and 32 deletions

View File

@@ -10,6 +10,7 @@ This is a pip package that can be installed into any project and covers the foll
- logging update with exception logs
- requests wrapper for easier auth pass on access
- dict fingerprinting
- sending email
- jmespath search
- json helpers for conten replace and output
- dump outputs for data for debugging
@@ -26,6 +27,7 @@ This is a pip package that can be installed into any project and covers the foll
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
- db_handling: SQLite interface class
- encyption_handling: symmetric encryption
- email_handling: simple email sending
- file_handling: crc handling for file content and file names, progress bar
- json_handling: jmespath support and json date support, replace content in dict with json paths
- iterator_handling: list and dictionary handling support (search, fingerprinting, etc)
@@ -50,7 +52,7 @@ Have the following setup in `project.toml`
```toml
[[tool.uv.index]]
name = "egra-gitea"
name = "opj-pypi"
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
explicit = true
@@ -58,15 +60,15 @@ explicit = true
```sh
uv build
uv publish --index egra-gitea --token <gitea token>
uv publish --index opj-pypi --token <gitea token>
```
## Test package
## Use package
We must set the full index URL here because we run with "--no-project"
```sh
uv run --with corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project -- python -c "import corelibs"
uv run --with corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project -- python -c "import corelibs"
```
### Python tests
@@ -99,7 +101,7 @@ uv run test-run/<script>
This will also add the index entry
```sh
uv add corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/
uv add corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/
```
## Python venv setup

View File

@@ -1,7 +1,7 @@
# MARK: Project info
[project]
name = "corelibs"
version = "0.31.0"
version = "0.34.0"
description = "Collection of utils for Python scripts"
readme = "README.md"
requires-python = ">=3.13"
@@ -17,7 +17,7 @@ dependencies = [
# MARK: build target
[[tool.uv.index]]
name = "egra-gitea"
name = "opj-pypi"
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
explicit = true
@@ -63,12 +63,13 @@ ignore = [
[tool.pylint.MASTER]
# this is for the tests/etc folders
init-hook='import sys; sys.path.append("src/")'
# MARK: Testing
[tool.pytest.ini_options]
testpaths = [
"tests",
]
[tool.coverage.run]
omit = [
"*/tests/*",

View File

@@ -159,10 +159,14 @@ def parse_flexible_date(
# Try different parsing methods
parsers: list[Callable[[str], datetime]] = [
# ISO 8601 format
# ISO 8601 format, also with missing "T"
lambda x: datetime.fromisoformat(x), # pylint: disable=W0108
lambda x: datetime.fromisoformat(x.replace(' ', 'T')), # pylint: disable=W0108
# Simple date format
lambda x: datetime.strptime(x, "%Y-%m-%d"),
# datetime without T
lambda x: datetime.strptime(x, "%Y-%m-%d %H:%M:%S"),
lambda x: datetime.strptime(x, "%Y-%m-%d %H:%M:%S.%f"),
# Alternative ISO formats (fallback)
lambda x: datetime.strptime(x, "%Y-%m-%dT%H:%M:%S"),
lambda x: datetime.strptime(x, "%Y-%m-%dT%H:%M:%S.%f"),

View File

View File

@@ -0,0 +1,199 @@
"""
Send email wrapper
"""
import smtplib
from email.message import EmailMessage
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from corelibs.logging_handling.log import Logger
class SendEmail:
"""
send emails based on a template to a list of receivers
"""
def __init__(
self,
log: "Logger",
settings: dict[str, Any],
template: dict[str, str],
from_email: str,
combined_send: bool = True,
receivers: list[str] | None = None,
data: list[dict[str, str]] | None = None,
):
"""
init send email class
Args:
template (dict): Dictionary with body and subject
from_email (str): from email as "Name" <email>
combined_send (bool): True for sending as one set for all receivers
receivers (list): list of emails to send to
data (dict): data to replace in template
args (Namespace): _description_
"""
self.log = log
self.settings = settings
# internal settings
self.template = template
self.from_email = from_email
self.combined_send = combined_send
self.receivers = receivers
self.data = data
def send_email(
self,
data: list[dict[str, str]] | None,
receivers: list[str] | None,
template: dict[str, str] | None = None,
from_email: str | None = None,
combined_send: bool | None = None,
test_only: bool | None = None
):
"""
build email and send
Arguments:
data {list[dict[str, str]] | None} -- _description_
receivers {list[str] | None} -- _description_
combined_send {bool | None} -- _description_
Keyword Arguments:
template {dict[str, str] | None} -- _description_ (default: {None})
from_email {str | None} -- _description_ (default: {None})
Raises:
ValueError: _description_
ValueError: _description_
"""
if data is None and self.data is not None:
data = self.data
if data is None:
raise ValueError("No replace data set, cannot send email")
if receivers is None and self.receivers is not None:
receivers = self.receivers
if receivers is None:
raise ValueError("No receivers list set, cannot send email")
if combined_send is None:
combined_send = self.combined_send
if test_only is not None:
self.settings['test'] = test_only
if template is None:
template = self.template
if from_email is None:
from_email = self.from_email
if not template['subject'] or not template['body']:
raise ValueError("Both Subject and Body must be set")
self.log.debug(
"[EMAIL]:\n"
f"Subject: {template['subject']}\n"
f"Body: {template['body']}\n"
f"From: {from_email}\n"
f"Combined send: {combined_send}\n"
f"Receivers: {receivers}\n"
f"Replace data: {data}"
)
# send email
self.send_email_list(
self.prepare_email_content(
from_email, template, data
),
receivers,
combined_send,
test_only
)
def prepare_email_content(
self,
from_email: str,
template: dict[str, str],
data: list[dict[str, str]],
) -> list[EmailMessage]:
"""
prepare email for sending
Args:
template (dict): template data for this email
data (dict): data to replace in email
Returns:
list: Email Message Objects as list
"""
_subject = ""
_body = ""
msg: list[EmailMessage] = []
for replace in data:
_subject = template["subject"]
_body = template["body"]
for key, value in replace.items():
_subject = _subject.replace(f"{{{{{key}}}}}", value)
_body = _body.replace(f"{{{{{key}}}}}", value)
# create a simple email and add subhect, from email
msg_email = EmailMessage()
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
msg_email.set_content(_body, charset="utf-8")
msg_email["Subject"] = _subject
msg_email["From"] = from_email
# push to array for sening
msg.append(msg_email)
return msg
def send_email_list(
self,
email: list[EmailMessage], receivers: list[str],
combined_send: bool | None = None,
test_only: bool | None = None
):
"""
send email to receivers list
Args:
email (list): Email Message object with set obdy, subject, from as list
receivers (array): email receivers list as array
combined_send (bool): True for sending as one set for all receivers
"""
if test_only is not None:
self.settings['test'] = test_only
# localhost (postfix does the rest)
smtp = None
smtp_host = self.settings.get('smtp_host', "localhost")
try:
smtp = smtplib.SMTP(smtp_host)
except ConnectionRefusedError as e:
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
# loop over messages and then over recievers
for msg in email:
if combined_send is True:
msg["To"] = ", ".join(receivers)
if not self.settings.get('test'):
if smtp is not None:
smtp.send_message(msg, msg["From"], receivers)
else:
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
else:
for receiver in receivers:
# send to
self.log.debug(f"===> Send to: {receiver}")
if "To" in msg:
msg.replace_header("To", receiver)
else:
msg["To"] = receiver
if not self.settings.get('test'):
if smtp is not None:
smtp.send_message(msg)
else:
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
# close smtp
if smtp is not None:
smtp.quit()
# __END__

View File

@@ -0,0 +1,75 @@
"""
File check if BOM encoded, needed for CSV load
"""
from pathlib import Path
from typing import TypedDict
class BomEncodingInfo(TypedDict):
"""BOM encoding info"""
has_bom: bool
bom_type: str | None
encoding: str | None
bom_length: int
bom_pattern: bytes | None
def is_bom_encoded(file_path: Path) -> bool:
"""
Detect if a file is BOM encoded
Args:
file_path (str): Path to the file to check
Returns:
bool: True if file has BOM, False otherwise
"""
return is_bom_encoded_info(file_path)['has_bom']
def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
"""
Enhanced BOM detection with additional file analysis
Args:
file_path (str): Path to the file to check
Returns:
dict: Comprehensive BOM and encoding information
"""
try:
# Read first 1024 bytes for analysis
with open(file_path, 'rb') as f:
header = f.read(4)
bom_patterns = {
b'\xef\xbb\xbf': ('UTF-8', 'utf-8', 3),
b'\xff\xfe\x00\x00': ('UTF-32 LE', 'utf-32-le', 4),
b'\x00\x00\xfe\xff': ('UTF-32 BE', 'utf-32-be', 4),
b'\xff\xfe': ('UTF-16 LE', 'utf-16-le', 2),
b'\xfe\xff': ('UTF-16 BE', 'utf-16-be', 2),
}
for bom_pattern, (encoding_name, encoding, length) in bom_patterns.items():
if header.startswith(bom_pattern):
return {
'has_bom': True,
'bom_type': encoding_name,
'encoding': encoding,
'bom_length': length,
'bom_pattern': bom_pattern
}
return {
'has_bom': False,
'bom_type': None,
'encoding': None,
'bom_length': 0,
'bom_pattern': None
}
except Exception as e:
raise ValueError(f"Error checking BOM encoding: {e}") from e
# __END__

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env python3
"""
BOM check for files
"""
from pathlib import Path
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
from corelibs.debug_handling.dump_data import dump_data
def main() -> None:
"""
Check files for BOM encoding
"""
base_path = Path(__file__).resolve().parent
for file_path in [
'test-data/sample_with_bom.csv',
'test-data/sample_without_bom.csv',
]:
has_bom = is_bom_encoded(base_path.joinpath(file_path))
bom_info = is_bom_encoded_info(base_path.joinpath(file_path))
print(f'File: {file_path}')
print(f' Has BOM: {has_bom}')
print(f' BOM Info: {dump_data(bom_info)}')
if __name__ == "__main__":
main()
# __END__

View File

@@ -0,0 +1,6 @@
Name,Age,City,Country
John Doe,25,New York,USA
Jane Smith,30,London,UK
山田太郎,28,東京,Japan
María García,35,Madrid,Spain
François Dupont,42,Paris,France
1 Name Age City Country
2 John Doe 25 New York USA
3 Jane Smith 30 London UK
4 山田太郎 28 東京 Japan
5 María García 35 Madrid Spain
6 François Dupont 42 Paris France

View File

@@ -0,0 +1,6 @@
Name,Age,City,Country
John Doe,25,New York,USA
Jane Smith,30,London,UK
山田太郎,28,東京,Japan
María García,35,Madrid,Spain
François Dupont,42,Paris,France
1 Name Age City Country
2 John Doe 25 New York USA
3 Jane Smith 30 London UK
4 山田太郎 28 東京 Japan
5 María García 35 Madrid Spain
6 François Dupont 42 Paris France

View File

@@ -5,7 +5,7 @@ Iterator helper testing
from typing import Any
from corelibs.debug_handling.dump_data import dump_data
from corelibs.iterator_handling.dict_mask import mask
from corelibs.iterator_handling.dict_helper import set_entry
from corelibs.iterator_handling.dict_helpers import set_entry
def __mask():

View File

View File

@@ -275,6 +275,53 @@ class TestParseFlexibleDate:
assert isinstance(result, datetime)
assert result.tzinfo is not None
def test_parse_flexible_date_missing_t_with_timezone_shift(self):
"""Test parse_flexible_date with timezone shift"""
result = parse_flexible_date('2023-12-25 15:30:45+00:00', timezone_tz='Asia/Tokyo', shift_time_zone=True)
assert isinstance(result, datetime)
assert result.tzinfo is not None
def test_parse_flexible_date_space_separated_datetime(self):
"""Test parse_flexible_date with space-separated datetime format"""
result = parse_flexible_date('2023-12-25 15:30:45')
assert isinstance(result, datetime)
assert result.year == 2023
assert result.month == 12
assert result.day == 25
assert result.hour == 15
assert result.minute == 30
assert result.second == 45
def test_parse_flexible_date_space_separated_with_microseconds(self):
"""Test parse_flexible_date with space-separated datetime and microseconds"""
result = parse_flexible_date('2023-12-25 15:30:45.123456')
assert isinstance(result, datetime)
assert result.year == 2023
assert result.month == 12
assert result.day == 25
assert result.hour == 15
assert result.minute == 30
assert result.second == 45
assert result.microsecond == 123456
def test_parse_flexible_date_t_separated_datetime(self):
"""Test parse_flexible_date with T-separated datetime (alternative ISO format)"""
result = parse_flexible_date('2023-12-25T15:30:45')
assert isinstance(result, datetime)
assert result.year == 2023
assert result.month == 12
assert result.day == 25
assert result.hour == 15
assert result.minute == 30
assert result.second == 45
def test_parse_flexible_date_t_separated_with_microseconds(self):
"""Test parse_flexible_date with T-separated datetime and microseconds"""
result = parse_flexible_date('2023-12-25T15:30:45.123456')
assert isinstance(result, datetime)
assert result.year == 2023
assert result.microsecond == 123456
def test_parse_flexible_date_invalid_format(self):
"""Test parse_flexible_date with invalid format returns None"""
result = parse_flexible_date('invalid-date')

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,538 @@
"""
PyTest: file_handling/file_bom_encoding
"""
from pathlib import Path
import pytest
from corelibs.file_handling.file_bom_encoding import (
is_bom_encoded,
is_bom_encoded_info,
BomEncodingInfo,
)
class TestIsBomEncoded:
"""Test suite for is_bom_encoded function"""
def test_utf8_bom_file(self, tmp_path: Path):
"""Test detection of UTF-8 BOM encoded file"""
test_file = tmp_path / "utf8_bom.txt"
# UTF-8 BOM: EF BB BF
content = b'\xef\xbb\xbfHello, World!'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
assert isinstance(result, bool)
def test_utf16_le_bom_file(self, tmp_path: Path):
"""Test detection of UTF-16 LE BOM encoded file"""
test_file = tmp_path / "utf16_le_bom.txt"
# UTF-16 LE BOM: FF FE
content = b'\xff\xfeH\x00e\x00l\x00l\x00o\x00'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
def test_utf16_be_bom_file(self, tmp_path: Path):
"""Test detection of UTF-16 BE BOM encoded file"""
test_file = tmp_path / "utf16_be_bom.txt"
# UTF-16 BE BOM: FE FF
content = b'\xfe\xff\x00H\x00e\x00l\x00l\x00o'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
def test_utf32_le_bom_file(self, tmp_path: Path):
"""Test detection of UTF-32 LE BOM encoded file"""
test_file = tmp_path / "utf32_le_bom.txt"
# UTF-32 LE BOM: FF FE 00 00
content = b'\xff\xfe\x00\x00H\x00\x00\x00e\x00\x00\x00'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
def test_utf32_be_bom_file(self, tmp_path: Path):
"""Test detection of UTF-32 BE BOM encoded file"""
test_file = tmp_path / "utf32_be_bom.txt"
# UTF-32 BE BOM: 00 00 FE FF
content = b'\x00\x00\xfe\xff\x00\x00\x00H\x00\x00\x00e'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
def test_no_bom_ascii_file(self, tmp_path: Path):
"""Test detection of ASCII file without BOM"""
test_file = tmp_path / "ascii.txt"
content = b'Hello, World!'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_no_bom_utf8_file(self, tmp_path: Path):
"""Test detection of UTF-8 file without BOM"""
test_file = tmp_path / "utf8_no_bom.txt"
content = 'Hello, 世界!'.encode('utf-8')
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_empty_file(self, tmp_path: Path):
"""Test detection on empty file"""
test_file = tmp_path / "empty.txt"
test_file.write_bytes(b'')
result = is_bom_encoded(test_file)
assert result is False
def test_binary_file_no_bom(self, tmp_path: Path):
"""Test detection on binary file without BOM"""
test_file = tmp_path / "binary.bin"
content = bytes(range(256))
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_partial_bom_pattern(self, tmp_path: Path):
"""Test file with partial BOM pattern that shouldn't match"""
test_file = tmp_path / "partial_bom.txt"
# Only first two bytes of UTF-8 BOM
content = b'\xef\xbbHello'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_false_positive_bom_pattern(self, tmp_path: Path):
"""Test file that contains BOM-like bytes but not at the start"""
test_file = tmp_path / "false_positive.txt"
content = b'Hello\xef\xbb\xbfWorld'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_nonexistent_file(self, tmp_path: Path):
"""Test that function raises error for non-existent file"""
test_file = tmp_path / "nonexistent.txt"
with pytest.raises(ValueError, match="Error checking BOM encoding"):
is_bom_encoded(test_file)
def test_very_small_file(self, tmp_path: Path):
"""Test file smaller than largest BOM pattern (4 bytes)"""
test_file = tmp_path / "small.txt"
content = b'Hi'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is False
def test_exactly_bom_size_utf8(self, tmp_path: Path):
"""Test file that is exactly the size of UTF-8 BOM"""
test_file = tmp_path / "exact_bom.txt"
content = b'\xef\xbb\xbf'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
def test_exactly_bom_size_utf32(self, tmp_path: Path):
"""Test file that is exactly the size of UTF-32 BOM"""
test_file = tmp_path / "exact_bom_utf32.txt"
content = b'\xff\xfe\x00\x00'
test_file.write_bytes(content)
result = is_bom_encoded(test_file)
assert result is True
class TestIsBomEncodedInfo:
"""Test suite for is_bom_encoded_info function"""
def test_utf8_bom_info(self, tmp_path: Path):
"""Test detailed info for UTF-8 BOM encoded file"""
test_file = tmp_path / "utf8_bom.txt"
content = b'\xef\xbb\xbfHello, UTF-8!'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert isinstance(result, dict)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-8'
assert result['encoding'] == 'utf-8'
assert result['bom_length'] == 3
assert result['bom_pattern'] == b'\xef\xbb\xbf'
def test_utf16_le_bom_info(self, tmp_path: Path):
"""Test detailed info for UTF-16 LE BOM encoded file"""
test_file = tmp_path / "utf16_le_bom.txt"
content = b'\xff\xfeH\x00e\x00l\x00l\x00o\x00'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-16 LE'
assert result['encoding'] == 'utf-16-le'
assert result['bom_length'] == 2
assert result['bom_pattern'] == b'\xff\xfe'
def test_utf16_be_bom_info(self, tmp_path: Path):
"""Test detailed info for UTF-16 BE BOM encoded file"""
test_file = tmp_path / "utf16_be_bom.txt"
content = b'\xfe\xff\x00H\x00e\x00l\x00l\x00o'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-16 BE'
assert result['encoding'] == 'utf-16-be'
assert result['bom_length'] == 2
assert result['bom_pattern'] == b'\xfe\xff'
def test_utf32_le_bom_info(self, tmp_path: Path):
"""Test detailed info for UTF-32 LE BOM encoded file"""
test_file = tmp_path / "utf32_le_bom.txt"
content = b'\xff\xfe\x00\x00H\x00\x00\x00e\x00\x00\x00'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-32 LE'
assert result['encoding'] == 'utf-32-le'
assert result['bom_length'] == 4
assert result['bom_pattern'] == b'\xff\xfe\x00\x00'
def test_utf32_be_bom_info(self, tmp_path: Path):
"""Test detailed info for UTF-32 BE BOM encoded file"""
test_file = tmp_path / "utf32_be_bom.txt"
content = b'\x00\x00\xfe\xff\x00\x00\x00H\x00\x00\x00e'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-32 BE'
assert result['encoding'] == 'utf-32-be'
assert result['bom_length'] == 4
assert result['bom_pattern'] == b'\x00\x00\xfe\xff'
def test_no_bom_info(self, tmp_path: Path):
"""Test detailed info for file without BOM"""
test_file = tmp_path / "no_bom.txt"
content = b'Hello, World!'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is False
assert result['bom_type'] is None
assert result['encoding'] is None
assert result['bom_length'] == 0
assert result['bom_pattern'] is None
def test_empty_file_info(self, tmp_path: Path):
"""Test detailed info for empty file"""
test_file = tmp_path / "empty.txt"
test_file.write_bytes(b'')
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is False
assert result['bom_type'] is None
assert result['encoding'] is None
assert result['bom_length'] == 0
assert result['bom_pattern'] is None
def test_bom_precedence_utf32_vs_utf16(self, tmp_path: Path):
"""Test that UTF-32 LE BOM takes precedence over UTF-16 LE when both match"""
test_file = tmp_path / "precedence.txt"
# UTF-32 LE BOM starts with UTF-16 LE BOM pattern
content = b'\xff\xfe\x00\x00Additional content'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
# Should detect UTF-32 LE, not UTF-16 LE
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-32 LE'
assert result['encoding'] == 'utf-32-le'
assert result['bom_length'] == 4
assert result['bom_pattern'] == b'\xff\xfe\x00\x00'
def test_return_type_validation(self, tmp_path: Path):
"""Test that return type matches BomEncodingInfo TypedDict"""
test_file = tmp_path / "test.txt"
test_file.write_bytes(b'Test content')
result = is_bom_encoded_info(test_file)
# Check all required keys are present
required_keys = {'has_bom', 'bom_type', 'encoding', 'bom_length', 'bom_pattern'}
assert set(result.keys()) == required_keys
# Check types
assert isinstance(result['has_bom'], bool)
assert result['bom_type'] is None or isinstance(result['bom_type'], str)
assert result['encoding'] is None or isinstance(result['encoding'], str)
assert isinstance(result['bom_length'], int)
assert result['bom_pattern'] is None or isinstance(result['bom_pattern'], bytes)
def test_nonexistent_file_error(self, tmp_path: Path):
"""Test that function raises ValueError for non-existent file"""
test_file = tmp_path / "nonexistent.txt"
with pytest.raises(ValueError) as exc_info:
is_bom_encoded_info(test_file)
assert "Error checking BOM encoding" in str(exc_info.value)
def test_directory_instead_of_file(self, tmp_path: Path):
"""Test that function raises error when given a directory"""
test_dir = tmp_path / "test_dir"
test_dir.mkdir()
with pytest.raises(ValueError, match="Error checking BOM encoding"):
is_bom_encoded_info(test_dir)
def test_large_file_with_bom(self, tmp_path: Path):
"""Test BOM detection on large file (only first 4 bytes matter)"""
test_file = tmp_path / "large_bom.txt"
# UTF-8 BOM followed by large content
content = b'\xef\xbb\xbf' + b'A' * 100000
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-8'
assert result['encoding'] == 'utf-8'
def test_bom_detection_priority_order(self, tmp_path: Path):
"""Test that BOM patterns are checked in the correct priority order"""
# The function should check longer patterns first to avoid false matches
test_cases = [
(b'\xff\xfe\x00\x00', 'UTF-32 LE'), # 4 bytes
(b'\x00\x00\xfe\xff', 'UTF-32 BE'), # 4 bytes
(b'\xff\xfe', 'UTF-16 LE'), # 2 bytes
(b'\xfe\xff', 'UTF-16 BE'), # 2 bytes
(b'\xef\xbb\xbf', 'UTF-8'), # 3 bytes
]
for i, (bom_bytes, expected_type) in enumerate(test_cases):
test_file = tmp_path / f"priority_test_{i}.txt"
content = bom_bytes + b'Content'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['bom_type'] == expected_type
assert result['bom_pattern'] == bom_bytes
def test_csv_file_with_utf8_bom(self, tmp_path: Path):
"""Test CSV file with UTF-8 BOM (common use case mentioned in docstring)"""
test_file = tmp_path / "data.csv"
content = b'\xef\xbb\xbf"Name","Age","City"\n"John",30,"New York"\n"Jane",25,"Tokyo"'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is True
assert result['bom_type'] == 'UTF-8'
assert result['encoding'] == 'utf-8'
assert result['bom_length'] == 3
def test_csv_file_without_bom(self, tmp_path: Path):
"""Test CSV file without BOM"""
test_file = tmp_path / "data_no_bom.csv"
content = b'"Name","Age","City"\n"John",30,"New York"\n"Jane",25,"Tokyo"'
test_file.write_bytes(content)
result = is_bom_encoded_info(test_file)
assert result['has_bom'] is False
assert result['bom_type'] is None
assert result['encoding'] is None
assert result['bom_length'] == 0
class TestBomEncodingInfo:
"""Test suite for BomEncodingInfo TypedDict"""
def test_typed_dict_structure(self):
"""Test that BomEncodingInfo has correct structure"""
# This is a type check - in actual usage, mypy would validate this
sample_info: BomEncodingInfo = {
'has_bom': True,
'bom_type': 'UTF-8',
'encoding': 'utf-8',
'bom_length': 3,
'bom_pattern': b'\xef\xbb\xbf'
}
assert sample_info['has_bom'] is True
assert sample_info['bom_type'] == 'UTF-8'
assert sample_info['encoding'] == 'utf-8'
assert sample_info['bom_length'] == 3
assert sample_info['bom_pattern'] == b'\xef\xbb\xbf'
def test_typed_dict_none_values(self):
"""Test TypedDict with None values"""
sample_info: BomEncodingInfo = {
'has_bom': False,
'bom_type': None,
'encoding': None,
'bom_length': 0,
'bom_pattern': None
}
assert sample_info['has_bom'] is False
assert sample_info['bom_type'] is None
assert sample_info['encoding'] is None
assert sample_info['bom_length'] == 0
assert sample_info['bom_pattern'] is None
class TestIntegration:
"""Integration tests for BOM encoding detection"""
def test_is_bom_encoded_uses_info_function(self, tmp_path: Path):
"""Test that is_bom_encoded uses is_bom_encoded_info internally"""
test_file = tmp_path / "integration.txt"
content = b'\xef\xbb\xbfIntegration test'
test_file.write_bytes(content)
# Both functions should return consistent results
simple_result = is_bom_encoded(test_file)
detailed_result = is_bom_encoded_info(test_file)
assert simple_result == detailed_result['has_bom']
assert simple_result is True
def test_multiple_file_bom_detection_workflow(self, tmp_path: Path):
"""Test a workflow of detecting BOM across multiple files"""
files = {
'utf8_bom.csv': b'\xef\xbb\xbf"data","value"\n"test",123',
'utf16_le.txt': b'\xff\xfeH\x00e\x00l\x00l\x00o\x00',
'no_bom.txt': b'Plain ASCII text',
'empty.txt': b'',
}
results = {}
detailed_results = {}
for filename, content in files.items():
file_path = tmp_path / filename
file_path.write_bytes(content)
results[filename] = is_bom_encoded(file_path)
detailed_results[filename] = is_bom_encoded_info(file_path)
# Verify results
assert results['utf8_bom.csv'] is True
assert results['utf16_le.txt'] is True
assert results['no_bom.txt'] is False
assert results['empty.txt'] is False
# Verify detailed results match simple results
for filename in files:
assert results[filename] == detailed_results[filename]['has_bom']
# Verify specific encoding details
assert detailed_results['utf8_bom.csv']['encoding'] == 'utf-8'
assert detailed_results['utf16_le.txt']['encoding'] == 'utf-16-le'
assert detailed_results['no_bom.txt']['encoding'] is None
def test_csv_loading_workflow(self, tmp_path: Path):
"""Test BOM detection workflow for CSV loading (main use case)"""
# Create CSV files with and without BOM
csv_with_bom = tmp_path / "data_with_bom.csv"
csv_without_bom = tmp_path / "data_without_bom.csv"
# CSV with UTF-8 BOM
bom_content = b'\xef\xbb\xbf"Name","Age"\n"Alice",30\n"Bob",25'
csv_with_bom.write_bytes(bom_content)
# CSV without BOM
no_bom_content = b'"Name","Age"\n"Charlie",35\n"Diana",28'
csv_without_bom.write_bytes(no_bom_content)
# Simulate CSV loading workflow
files_to_process = [csv_with_bom, csv_without_bom]
processing_info: list[dict[str, str | bool | int]] = []
for csv_file in files_to_process:
bom_info = is_bom_encoded_info(csv_file)
file_info: dict[str, str | bool | int] = {
'file': csv_file.name,
'has_bom': bom_info['has_bom'],
'encoding': bom_info['encoding'] or 'default',
'skip_bytes': bom_info['bom_length']
}
processing_info.append(file_info)
# Verify workflow results
assert len(processing_info) == 2
bom_file_info = next(info for info in processing_info if info['file'] == 'data_with_bom.csv')
no_bom_file_info = next(info for info in processing_info if info['file'] == 'data_without_bom.csv')
assert bom_file_info['has_bom'] is True
assert bom_file_info['encoding'] == 'utf-8'
assert bom_file_info['skip_bytes'] == 3
assert no_bom_file_info['has_bom'] is False
assert no_bom_file_info['encoding'] == 'default'
assert no_bom_file_info['skip_bytes'] == 0
def test_error_handling_consistency(self, tmp_path: Path):
"""Test that both functions handle errors consistently"""
nonexistent_file = tmp_path / "does_not_exist.txt"
# Both functions should raise ValueError for non-existent files
with pytest.raises(ValueError):
is_bom_encoded(nonexistent_file)
with pytest.raises(ValueError):
is_bom_encoded_info(nonexistent_file)
def test_all_supported_bom_types(self, tmp_path: Path):
"""Test detection of all supported BOM types"""
bom_test_cases = [
('utf8', b'\xef\xbb\xbf', 'UTF-8', 'utf-8', 3),
('utf16_le', b'\xff\xfe', 'UTF-16 LE', 'utf-16-le', 2),
('utf16_be', b'\xfe\xff', 'UTF-16 BE', 'utf-16-be', 2),
('utf32_le', b'\xff\xfe\x00\x00', 'UTF-32 LE', 'utf-32-le', 4),
('utf32_be', b'\x00\x00\xfe\xff', 'UTF-32 BE', 'utf-32-be', 4),
]
for name, bom_bytes, expected_type, expected_encoding, expected_length in bom_test_cases:
test_file = tmp_path / f"{name}_test.txt"
content = bom_bytes + b'Test content'
test_file.write_bytes(content)
# Test simple function
assert is_bom_encoded(test_file) is True
# Test detailed function
info = is_bom_encoded_info(test_file)
assert info['has_bom'] is True
assert info['bom_type'] == expected_type
assert info['encoding'] == expected_encoding
assert info['bom_length'] == expected_length
assert info['bom_pattern'] == bom_bytes
# __END__

View File

@@ -6,7 +6,7 @@ iterator_handling.dict_helper tests
from typing import Any
import pytest
from corelibs.iterator_handling.dict_helper import (
from corelibs.iterator_handling.dict_helpers import (
delete_keys_from_set,
build_dict,
set_entry,

42
uv.lock generated
View File

@@ -108,7 +108,7 @@ wheels = [
[[package]]
name = "corelibs"
version = "0.31.0"
version = "0.34.0"
source = { editable = "." }
dependencies = [
{ name = "cryptography" },
@@ -347,28 +347,28 @@ wheels = [
[[package]]
name = "psutil"
version = "7.1.2"
version = "7.1.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cd/ec/7b8e6b9b1d22708138630ef34c53ab2b61032c04f16adfdbb96791c8c70c/psutil-7.1.2.tar.gz", hash = "sha256:aa225cdde1335ff9684708ee8c72650f6598d5ed2114b9a7c5802030b1785018", size = 487424, upload-time = "2025-10-25T10:46:34.931Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/d9/b56cc9f883140ac10021a8c9b0f4e16eed1ba675c22513cdcbce3ba64014/psutil-7.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0cc5c6889b9871f231ed5455a9a02149e388fffcb30b607fb7a8896a6d95f22e", size = 238575, upload-time = "2025-10-25T10:46:38.728Z" },
{ url = "https://files.pythonhosted.org/packages/36/eb/28d22de383888deb252c818622196e709da98816e296ef95afda33f1c0a2/psutil-7.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8e9e77a977208d84aa363a4a12e0f72189d58bbf4e46b49aae29a2c6e93ef206", size = 239297, upload-time = "2025-10-25T10:46:41.347Z" },
{ url = "https://files.pythonhosted.org/packages/89/5d/220039e2f28cc129626e54d63892ab05c0d56a29818bfe7268dcb5008932/psutil-7.1.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d9623a5e4164d2220ecceb071f4b333b3c78866141e8887c072129185f41278", size = 280420, upload-time = "2025-10-25T10:46:44.122Z" },
{ url = "https://files.pythonhosted.org/packages/ba/7a/286f0e1c167445b2ef4a6cbdfc8c59fdb45a5a493788950cf8467201dc73/psutil-7.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:364b1c10fe4ed59c89ec49e5f1a70da353b27986fa8233b4b999df4742a5ee2f", size = 283049, upload-time = "2025-10-25T10:46:47.095Z" },
{ url = "https://files.pythonhosted.org/packages/aa/cc/7eb93260794a42e39b976f3a4dde89725800b9f573b014fac142002a5c98/psutil-7.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f101ef84de7e05d41310e3ccbdd65a6dd1d9eed85e8aaf0758405d022308e204", size = 248713, upload-time = "2025-10-25T10:46:49.573Z" },
{ url = "https://files.pythonhosted.org/packages/ab/1a/0681a92b53366e01f0a099f5237d0c8a2f79d322ac589cccde5e30c8a4e2/psutil-7.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:20c00824048a95de67f00afedc7b08b282aa08638585b0206a9fb51f28f1a165", size = 244644, upload-time = "2025-10-25T10:46:51.924Z" },
{ url = "https://files.pythonhosted.org/packages/56/9e/f1c5c746b4ed5320952acd3002d3962fe36f30524c00ea79fdf954cc6779/psutil-7.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:e09cfe92aa8e22b1ec5e2d394820cf86c5dff6367ac3242366485dfa874d43bc", size = 238640, upload-time = "2025-10-25T10:46:54.089Z" },
{ url = "https://files.pythonhosted.org/packages/32/ee/fd26216a735395cc25c3899634e34aeb41fb1f3dbb44acc67d9e594be562/psutil-7.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fa6342cf859c48b19df3e4aa170e4cfb64aadc50b11e06bb569c6c777b089c9e", size = 239303, upload-time = "2025-10-25T10:46:56.932Z" },
{ url = "https://files.pythonhosted.org/packages/3c/cd/7d96eaec4ef7742b845a9ce2759a2769ecce4ab7a99133da24abacbc9e41/psutil-7.1.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:625977443498ee7d6c1e63e93bacca893fd759a66c5f635d05e05811d23fb5ee", size = 281717, upload-time = "2025-10-25T10:46:59.116Z" },
{ url = "https://files.pythonhosted.org/packages/bc/1a/7f0b84bdb067d35fe7fade5fff888408688caf989806ce2d6dae08c72dd5/psutil-7.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a24bcd7b7f2918d934af0fb91859f621b873d6aa81267575e3655cd387572a7", size = 284575, upload-time = "2025-10-25T10:47:00.944Z" },
{ url = "https://files.pythonhosted.org/packages/de/05/7820ef8f7b275268917e0c750eada5834581206d9024ca88edce93c4b762/psutil-7.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:329f05610da6380982e6078b9d0881d9ab1e9a7eb7c02d833bfb7340aa634e31", size = 249491, upload-time = "2025-10-25T10:47:03.174Z" },
{ url = "https://files.pythonhosted.org/packages/db/9a/58de399c7cb58489f08498459ff096cd76b3f1ddc4f224ec2c5ef729c7d0/psutil-7.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:7b04c29e3c0c888e83ed4762b70f31e65c42673ea956cefa8ced0e31e185f582", size = 244880, upload-time = "2025-10-25T10:47:05.228Z" },
{ url = "https://files.pythonhosted.org/packages/ae/89/b9f8d47ddbc52d7301fc868e8224e5f44ed3c7f55e6d0f54ecaf5dd9ff5e/psutil-7.1.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c9ba5c19f2d46203ee8c152c7b01df6eec87d883cfd8ee1af2ef2727f6b0f814", size = 237244, upload-time = "2025-10-25T10:47:07.086Z" },
{ url = "https://files.pythonhosted.org/packages/c8/7a/8628c2f6b240680a67d73d8742bb9ff39b1820a693740e43096d5dcb01e5/psutil-7.1.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:2a486030d2fe81bec023f703d3d155f4823a10a47c36784c84f1cc7f8d39bedb", size = 238101, upload-time = "2025-10-25T10:47:09.523Z" },
{ url = "https://files.pythonhosted.org/packages/30/28/5e27f4d5a0e347f8e3cc16cd7d35533dbce086c95807f1f0e9cd77e26c10/psutil-7.1.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3efd8fc791492e7808a51cb2b94889db7578bfaea22df931424f874468e389e3", size = 258675, upload-time = "2025-10-25T10:47:11.082Z" },
{ url = "https://files.pythonhosted.org/packages/e5/5c/79cf60c9acf36d087f0db0f82066fca4a780e97e5b3a2e4c38209c03d170/psutil-7.1.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2aeb9b64f481b8eabfc633bd39e0016d4d8bbcd590d984af764d80bf0851b8a", size = 260203, upload-time = "2025-10-25T10:47:13.226Z" },
{ url = "https://files.pythonhosted.org/packages/f7/03/0a464404c51685dcb9329fdd660b1721e076ccd7b3d97dee066bcc9ffb15/psutil-7.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:8e17852114c4e7996fe9da4745c2bdef001ebbf2f260dec406290e66628bdb91", size = 246714, upload-time = "2025-10-25T10:47:15.093Z" },
{ url = "https://files.pythonhosted.org/packages/6a/32/97ca2090f2f1b45b01b6aa7ae161cfe50671de097311975ca6eea3e7aabc/psutil-7.1.2-cp37-abi3-win_arm64.whl", hash = "sha256:3e988455e61c240cc879cb62a008c2699231bf3e3d061d7fce4234463fd2abb4", size = 243742, upload-time = "2025-10-25T10:47:17.302Z" },
{ url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
{ url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
{ url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
{ url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
{ url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
{ url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
{ url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
{ url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
{ url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
{ url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
{ url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
{ url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
{ url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
{ url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
{ url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
{ url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
{ url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
{ url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
]
[[package]]